/* The Blackfin code generation auxiliary output file.
|
/* The Blackfin code generation auxiliary output file.
|
Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
|
Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
|
Contributed by Analog Devices.
|
Contributed by Analog Devices.
|
|
|
This file is part of GCC.
|
This file is part of GCC.
|
|
|
GCC is free software; you can redistribute it and/or modify it
|
GCC is free software; you can redistribute it and/or modify it
|
under the terms of the GNU General Public License as published
|
under the terms of the GNU General Public License as published
|
by the Free Software Foundation; either version 3, or (at your
|
by the Free Software Foundation; either version 3, or (at your
|
option) any later version.
|
option) any later version.
|
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT
|
GCC is distributed in the hope that it will be useful, but WITHOUT
|
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
|
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
|
License for more details.
|
License for more details.
|
|
|
You should have received a copy of the GNU General Public License
|
You should have received a copy of the GNU General Public License
|
along with GCC; see the file COPYING3. If not see
|
along with GCC; see the file COPYING3. If not see
|
<http://www.gnu.org/licenses/>. */
|
<http://www.gnu.org/licenses/>. */
|
|
|
#include "config.h"
|
#include "config.h"
|
#include "system.h"
|
#include "system.h"
|
#include "coretypes.h"
|
#include "coretypes.h"
|
#include "tm.h"
|
#include "tm.h"
|
#include "rtl.h"
|
#include "rtl.h"
|
#include "regs.h"
|
#include "regs.h"
|
#include "hard-reg-set.h"
|
#include "hard-reg-set.h"
|
#include "real.h"
|
#include "real.h"
|
#include "insn-config.h"
|
#include "insn-config.h"
|
#include "insn-codes.h"
|
#include "insn-codes.h"
|
#include "conditions.h"
|
#include "conditions.h"
|
#include "insn-flags.h"
|
#include "insn-flags.h"
|
#include "output.h"
|
#include "output.h"
|
#include "insn-attr.h"
|
#include "insn-attr.h"
|
#include "tree.h"
|
#include "tree.h"
|
#include "flags.h"
|
#include "flags.h"
|
#include "except.h"
|
#include "except.h"
|
#include "function.h"
|
#include "function.h"
|
#include "input.h"
|
#include "input.h"
|
#include "target.h"
|
#include "target.h"
|
#include "target-def.h"
|
#include "target-def.h"
|
#include "expr.h"
|
#include "expr.h"
|
#include "toplev.h"
|
#include "toplev.h"
|
#include "recog.h"
|
#include "recog.h"
|
#include "optabs.h"
|
#include "optabs.h"
|
#include "ggc.h"
|
#include "ggc.h"
|
#include "integrate.h"
|
#include "integrate.h"
|
#include "cgraph.h"
|
#include "cgraph.h"
|
#include "langhooks.h"
|
#include "langhooks.h"
|
#include "bfin-protos.h"
|
#include "bfin-protos.h"
|
#include "tm-preds.h"
|
#include "tm-preds.h"
|
#include "gt-bfin.h"
|
#include "gt-bfin.h"
|
#include "basic-block.h"
|
#include "basic-block.h"
|
|
|
/* A C structure for machine-specific, per-function data.
|
/* A C structure for machine-specific, per-function data.
|
This is added to the cfun structure. */
|
This is added to the cfun structure. */
|
struct machine_function GTY(())
|
struct machine_function GTY(())
|
{
|
{
|
int has_hardware_loops;
|
int has_hardware_loops;
|
};
|
};
|
|
|
/* Test and compare insns in bfin.md store the information needed to
|
/* Test and compare insns in bfin.md store the information needed to
|
generate branch and scc insns here. */
|
generate branch and scc insns here. */
|
rtx bfin_compare_op0, bfin_compare_op1;
|
rtx bfin_compare_op0, bfin_compare_op1;
|
|
|
/* RTX for condition code flag register and RETS register */
|
/* RTX for condition code flag register and RETS register */
|
extern GTY(()) rtx bfin_cc_rtx;
|
extern GTY(()) rtx bfin_cc_rtx;
|
extern GTY(()) rtx bfin_rets_rtx;
|
extern GTY(()) rtx bfin_rets_rtx;
|
rtx bfin_cc_rtx, bfin_rets_rtx;
|
rtx bfin_cc_rtx, bfin_rets_rtx;
|
|
|
int max_arg_registers = 0;
|
int max_arg_registers = 0;
|
|
|
/* Arrays used when emitting register names. */
|
/* Arrays used when emitting register names. */
|
const char *short_reg_names[] = SHORT_REGISTER_NAMES;
|
const char *short_reg_names[] = SHORT_REGISTER_NAMES;
|
const char *high_reg_names[] = HIGH_REGISTER_NAMES;
|
const char *high_reg_names[] = HIGH_REGISTER_NAMES;
|
const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
|
const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
|
const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
|
const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
|
|
|
static int arg_regs[] = FUNCTION_ARG_REGISTERS;
|
static int arg_regs[] = FUNCTION_ARG_REGISTERS;
|
|
|
/* Nonzero if -mshared-library-id was given. */
|
/* Nonzero if -mshared-library-id was given. */
|
static int bfin_lib_id_given;
|
static int bfin_lib_id_given;
|
|
|
static void
|
static void
|
bfin_globalize_label (FILE *stream, const char *name)
|
bfin_globalize_label (FILE *stream, const char *name)
|
{
|
{
|
fputs (".global ", stream);
|
fputs (".global ", stream);
|
assemble_name (stream, name);
|
assemble_name (stream, name);
|
fputc (';',stream);
|
fputc (';',stream);
|
fputc ('\n',stream);
|
fputc ('\n',stream);
|
}
|
}
|
|
|
static void
|
static void
|
output_file_start (void)
|
output_file_start (void)
|
{
|
{
|
FILE *file = asm_out_file;
|
FILE *file = asm_out_file;
|
int i;
|
int i;
|
|
|
fprintf (file, ".file \"%s\";\n", input_filename);
|
fprintf (file, ".file \"%s\";\n", input_filename);
|
|
|
for (i = 0; arg_regs[i] >= 0; i++)
|
for (i = 0; arg_regs[i] >= 0; i++)
|
;
|
;
|
max_arg_registers = i; /* how many arg reg used */
|
max_arg_registers = i; /* how many arg reg used */
|
}
|
}
|
|
|
/* Called early in the compilation to conditionally modify
|
/* Called early in the compilation to conditionally modify
|
fixed_regs/call_used_regs. */
|
fixed_regs/call_used_regs. */
|
|
|
void
|
void
|
conditional_register_usage (void)
|
conditional_register_usage (void)
|
{
|
{
|
/* initialize condition code flag register rtx */
|
/* initialize condition code flag register rtx */
|
bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
|
bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
|
bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
|
bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
|
}
|
}
|
|
|
/* Examine machine-dependent attributes of function type FUNTYPE and return its
|
/* Examine machine-dependent attributes of function type FUNTYPE and return its
|
type. See the definition of E_FUNKIND. */
|
type. See the definition of E_FUNKIND. */
|
|
|
static e_funkind funkind (tree funtype)
|
static e_funkind funkind (tree funtype)
|
{
|
{
|
tree attrs = TYPE_ATTRIBUTES (funtype);
|
tree attrs = TYPE_ATTRIBUTES (funtype);
|
if (lookup_attribute ("interrupt_handler", attrs))
|
if (lookup_attribute ("interrupt_handler", attrs))
|
return INTERRUPT_HANDLER;
|
return INTERRUPT_HANDLER;
|
else if (lookup_attribute ("exception_handler", attrs))
|
else if (lookup_attribute ("exception_handler", attrs))
|
return EXCPT_HANDLER;
|
return EXCPT_HANDLER;
|
else if (lookup_attribute ("nmi_handler", attrs))
|
else if (lookup_attribute ("nmi_handler", attrs))
|
return NMI_HANDLER;
|
return NMI_HANDLER;
|
else
|
else
|
return SUBROUTINE;
|
return SUBROUTINE;
|
}
|
}
|
|
|
/* Legitimize PIC addresses. If the address is already position-independent,
|
/* Legitimize PIC addresses. If the address is already position-independent,
|
we return ORIG. Newly generated position-independent addresses go into a
|
we return ORIG. Newly generated position-independent addresses go into a
|
reg. This is REG if nonzero, otherwise we allocate register(s) as
|
reg. This is REG if nonzero, otherwise we allocate register(s) as
|
necessary. PICREG is the register holding the pointer to the PIC offset
|
necessary. PICREG is the register holding the pointer to the PIC offset
|
table. */
|
table. */
|
|
|
static rtx
|
static rtx
|
legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
|
legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
|
{
|
{
|
rtx addr = orig;
|
rtx addr = orig;
|
rtx new = orig;
|
rtx new = orig;
|
|
|
if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
|
if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
|
{
|
{
|
if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
|
if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
|
reg = new = orig;
|
reg = new = orig;
|
else
|
else
|
{
|
{
|
int unspec;
|
int unspec;
|
rtx tmp;
|
rtx tmp;
|
|
|
if (TARGET_ID_SHARED_LIBRARY)
|
if (TARGET_ID_SHARED_LIBRARY)
|
unspec = UNSPEC_MOVE_PIC;
|
unspec = UNSPEC_MOVE_PIC;
|
else if (GET_CODE (addr) == SYMBOL_REF
|
else if (GET_CODE (addr) == SYMBOL_REF
|
&& SYMBOL_REF_FUNCTION_P (addr))
|
&& SYMBOL_REF_FUNCTION_P (addr))
|
{
|
{
|
unspec = UNSPEC_FUNCDESC_GOT17M4;
|
unspec = UNSPEC_FUNCDESC_GOT17M4;
|
}
|
}
|
else
|
else
|
{
|
{
|
unspec = UNSPEC_MOVE_FDPIC;
|
unspec = UNSPEC_MOVE_FDPIC;
|
}
|
}
|
|
|
if (reg == 0)
|
if (reg == 0)
|
{
|
{
|
gcc_assert (!no_new_pseudos);
|
gcc_assert (!no_new_pseudos);
|
reg = gen_reg_rtx (Pmode);
|
reg = gen_reg_rtx (Pmode);
|
}
|
}
|
|
|
tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
|
tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
|
new = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
|
new = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
|
|
|
emit_move_insn (reg, new);
|
emit_move_insn (reg, new);
|
}
|
}
|
if (picreg == pic_offset_table_rtx)
|
if (picreg == pic_offset_table_rtx)
|
current_function_uses_pic_offset_table = 1;
|
current_function_uses_pic_offset_table = 1;
|
return reg;
|
return reg;
|
}
|
}
|
|
|
else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
|
else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
|
{
|
{
|
rtx base;
|
rtx base;
|
|
|
if (GET_CODE (addr) == CONST)
|
if (GET_CODE (addr) == CONST)
|
{
|
{
|
addr = XEXP (addr, 0);
|
addr = XEXP (addr, 0);
|
gcc_assert (GET_CODE (addr) == PLUS);
|
gcc_assert (GET_CODE (addr) == PLUS);
|
}
|
}
|
|
|
if (XEXP (addr, 0) == picreg)
|
if (XEXP (addr, 0) == picreg)
|
return orig;
|
return orig;
|
|
|
if (reg == 0)
|
if (reg == 0)
|
{
|
{
|
gcc_assert (!no_new_pseudos);
|
gcc_assert (!no_new_pseudos);
|
reg = gen_reg_rtx (Pmode);
|
reg = gen_reg_rtx (Pmode);
|
}
|
}
|
|
|
base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
|
base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
|
addr = legitimize_pic_address (XEXP (addr, 1),
|
addr = legitimize_pic_address (XEXP (addr, 1),
|
base == reg ? NULL_RTX : reg,
|
base == reg ? NULL_RTX : reg,
|
picreg);
|
picreg);
|
|
|
if (GET_CODE (addr) == CONST_INT)
|
if (GET_CODE (addr) == CONST_INT)
|
{
|
{
|
gcc_assert (! reload_in_progress && ! reload_completed);
|
gcc_assert (! reload_in_progress && ! reload_completed);
|
addr = force_reg (Pmode, addr);
|
addr = force_reg (Pmode, addr);
|
}
|
}
|
|
|
if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
|
if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
|
{
|
{
|
base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
|
base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
|
addr = XEXP (addr, 1);
|
addr = XEXP (addr, 1);
|
}
|
}
|
|
|
return gen_rtx_PLUS (Pmode, base, addr);
|
return gen_rtx_PLUS (Pmode, base, addr);
|
}
|
}
|
|
|
return new;
|
return new;
|
}
|
}
|
|
|
/* Stack frame layout. */
|
/* Stack frame layout. */
|
|
|
/* Compute the number of DREGS to save with a push_multiple operation.
|
/* Compute the number of DREGS to save with a push_multiple operation.
|
This could include registers that aren't modified in the function,
|
This could include registers that aren't modified in the function,
|
since push_multiple only takes a range of registers.
|
since push_multiple only takes a range of registers.
|
If IS_INTHANDLER, then everything that is live must be saved, even
|
If IS_INTHANDLER, then everything that is live must be saved, even
|
if normally call-clobbered. */
|
if normally call-clobbered. */
|
|
|
static int
|
static int
|
n_dregs_to_save (bool is_inthandler)
|
n_dregs_to_save (bool is_inthandler)
|
{
|
{
|
unsigned i;
|
unsigned i;
|
|
|
for (i = REG_R0; i <= REG_R7; i++)
|
for (i = REG_R0; i <= REG_R7; i++)
|
{
|
{
|
if (regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
|
if (regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
|
return REG_R7 - i + 1;
|
return REG_R7 - i + 1;
|
|
|
if (current_function_calls_eh_return)
|
if (current_function_calls_eh_return)
|
{
|
{
|
unsigned j;
|
unsigned j;
|
for (j = 0; ; j++)
|
for (j = 0; ; j++)
|
{
|
{
|
unsigned test = EH_RETURN_DATA_REGNO (j);
|
unsigned test = EH_RETURN_DATA_REGNO (j);
|
if (test == INVALID_REGNUM)
|
if (test == INVALID_REGNUM)
|
break;
|
break;
|
if (test == i)
|
if (test == i)
|
return REG_R7 - i + 1;
|
return REG_R7 - i + 1;
|
}
|
}
|
}
|
}
|
|
|
}
|
}
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* Like n_dregs_to_save, but compute number of PREGS to save. */
|
/* Like n_dregs_to_save, but compute number of PREGS to save. */
|
|
|
static int
|
static int
|
n_pregs_to_save (bool is_inthandler)
|
n_pregs_to_save (bool is_inthandler)
|
{
|
{
|
unsigned i;
|
unsigned i;
|
|
|
for (i = REG_P0; i <= REG_P5; i++)
|
for (i = REG_P0; i <= REG_P5; i++)
|
if ((regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
|
if ((regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
|
|| (!TARGET_FDPIC
|
|| (!TARGET_FDPIC
|
&& i == PIC_OFFSET_TABLE_REGNUM
|
&& i == PIC_OFFSET_TABLE_REGNUM
|
&& (current_function_uses_pic_offset_table
|
&& (current_function_uses_pic_offset_table
|
|| (TARGET_ID_SHARED_LIBRARY && ! current_function_is_leaf))))
|
|| (TARGET_ID_SHARED_LIBRARY && ! current_function_is_leaf))))
|
return REG_P5 - i + 1;
|
return REG_P5 - i + 1;
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* Determine if we are going to save the frame pointer in the prologue. */
|
/* Determine if we are going to save the frame pointer in the prologue. */
|
|
|
static bool
|
static bool
|
must_save_fp_p (void)
|
must_save_fp_p (void)
|
{
|
{
|
return frame_pointer_needed || regs_ever_live[REG_FP];
|
return frame_pointer_needed || regs_ever_live[REG_FP];
|
}
|
}
|
|
|
static bool
|
static bool
|
stack_frame_needed_p (void)
|
stack_frame_needed_p (void)
|
{
|
{
|
/* EH return puts a new return address into the frame using an
|
/* EH return puts a new return address into the frame using an
|
address relative to the frame pointer. */
|
address relative to the frame pointer. */
|
if (current_function_calls_eh_return)
|
if (current_function_calls_eh_return)
|
return true;
|
return true;
|
return frame_pointer_needed;
|
return frame_pointer_needed;
|
}
|
}
|
|
|
/* Emit code to save registers in the prologue. SAVEALL is nonzero if we
|
/* Emit code to save registers in the prologue. SAVEALL is nonzero if we
|
must save all registers; this is used for interrupt handlers.
|
must save all registers; this is used for interrupt handlers.
|
SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
|
SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
|
this for an interrupt (or exception) handler. */
|
this for an interrupt (or exception) handler. */
|
|
|
static void
|
static void
|
expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
|
expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
|
{
|
{
|
int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
|
int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
|
int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
|
int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
|
int dregno = REG_R7 + 1 - ndregs;
|
int dregno = REG_R7 + 1 - ndregs;
|
int pregno = REG_P5 + 1 - npregs;
|
int pregno = REG_P5 + 1 - npregs;
|
int total = ndregs + npregs;
|
int total = ndregs + npregs;
|
int i;
|
int i;
|
rtx pat, insn, val;
|
rtx pat, insn, val;
|
|
|
if (total == 0)
|
if (total == 0)
|
return;
|
return;
|
|
|
val = GEN_INT (-total * 4);
|
val = GEN_INT (-total * 4);
|
pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 2));
|
pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 2));
|
XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
|
XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
|
UNSPEC_PUSH_MULTIPLE);
|
UNSPEC_PUSH_MULTIPLE);
|
XVECEXP (pat, 0, total + 1) = gen_rtx_SET (VOIDmode, spreg,
|
XVECEXP (pat, 0, total + 1) = gen_rtx_SET (VOIDmode, spreg,
|
gen_rtx_PLUS (Pmode, spreg,
|
gen_rtx_PLUS (Pmode, spreg,
|
val));
|
val));
|
RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total + 1)) = 1;
|
RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total + 1)) = 1;
|
for (i = 0; i < total; i++)
|
for (i = 0; i < total; i++)
|
{
|
{
|
rtx memref = gen_rtx_MEM (word_mode,
|
rtx memref = gen_rtx_MEM (word_mode,
|
gen_rtx_PLUS (Pmode, spreg,
|
gen_rtx_PLUS (Pmode, spreg,
|
GEN_INT (- i * 4 - 4)));
|
GEN_INT (- i * 4 - 4)));
|
rtx subpat;
|
rtx subpat;
|
if (ndregs > 0)
|
if (ndregs > 0)
|
{
|
{
|
subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
|
subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
|
dregno++));
|
dregno++));
|
ndregs--;
|
ndregs--;
|
}
|
}
|
else
|
else
|
{
|
{
|
subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
|
subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
|
pregno++));
|
pregno++));
|
npregs++;
|
npregs++;
|
}
|
}
|
XVECEXP (pat, 0, i + 1) = subpat;
|
XVECEXP (pat, 0, i + 1) = subpat;
|
RTX_FRAME_RELATED_P (subpat) = 1;
|
RTX_FRAME_RELATED_P (subpat) = 1;
|
}
|
}
|
insn = emit_insn (pat);
|
insn = emit_insn (pat);
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
|
|
/* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
|
/* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
|
must save all registers; this is used for interrupt handlers.
|
must save all registers; this is used for interrupt handlers.
|
SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
|
SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
|
this for an interrupt (or exception) handler. */
|
this for an interrupt (or exception) handler. */
|
|
|
static void
|
static void
|
expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
|
expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
|
{
|
{
|
int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
|
int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
|
int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
|
int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
|
int total = ndregs + npregs;
|
int total = ndregs + npregs;
|
int i, regno;
|
int i, regno;
|
rtx pat, insn;
|
rtx pat, insn;
|
|
|
if (total == 0)
|
if (total == 0)
|
return;
|
return;
|
|
|
pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 1));
|
pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 1));
|
XVECEXP (pat, 0, 0) = gen_rtx_SET (VOIDmode, spreg,
|
XVECEXP (pat, 0, 0) = gen_rtx_SET (VOIDmode, spreg,
|
gen_rtx_PLUS (Pmode, spreg,
|
gen_rtx_PLUS (Pmode, spreg,
|
GEN_INT (total * 4)));
|
GEN_INT (total * 4)));
|
|
|
if (npregs > 0)
|
if (npregs > 0)
|
regno = REG_P5 + 1;
|
regno = REG_P5 + 1;
|
else
|
else
|
regno = REG_R7 + 1;
|
regno = REG_R7 + 1;
|
|
|
for (i = 0; i < total; i++)
|
for (i = 0; i < total; i++)
|
{
|
{
|
rtx addr = (i > 0
|
rtx addr = (i > 0
|
? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
|
? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
|
: spreg);
|
: spreg);
|
rtx memref = gen_rtx_MEM (word_mode, addr);
|
rtx memref = gen_rtx_MEM (word_mode, addr);
|
|
|
regno--;
|
regno--;
|
XVECEXP (pat, 0, i + 1)
|
XVECEXP (pat, 0, i + 1)
|
= gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
|
= gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
|
|
|
if (npregs > 0)
|
if (npregs > 0)
|
{
|
{
|
if (--npregs == 0)
|
if (--npregs == 0)
|
regno = REG_R7 + 1;
|
regno = REG_R7 + 1;
|
}
|
}
|
}
|
}
|
|
|
insn = emit_insn (pat);
|
insn = emit_insn (pat);
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
|
|
/* Perform any needed actions needed for a function that is receiving a
|
/* Perform any needed actions needed for a function that is receiving a
|
variable number of arguments.
|
variable number of arguments.
|
|
|
CUM is as above.
|
CUM is as above.
|
|
|
MODE and TYPE are the mode and type of the current parameter.
|
MODE and TYPE are the mode and type of the current parameter.
|
|
|
PRETEND_SIZE is a variable that should be set to the amount of stack
|
PRETEND_SIZE is a variable that should be set to the amount of stack
|
that must be pushed by the prolog to pretend that our caller pushed
|
that must be pushed by the prolog to pretend that our caller pushed
|
it.
|
it.
|
|
|
Normally, this macro will push all remaining incoming registers on the
|
Normally, this macro will push all remaining incoming registers on the
|
stack and set PRETEND_SIZE to the length of the registers pushed.
|
stack and set PRETEND_SIZE to the length of the registers pushed.
|
|
|
Blackfin specific :
|
Blackfin specific :
|
- VDSP C compiler manual (our ABI) says that a variable args function
|
- VDSP C compiler manual (our ABI) says that a variable args function
|
should save the R0, R1 and R2 registers in the stack.
|
should save the R0, R1 and R2 registers in the stack.
|
- The caller will always leave space on the stack for the
|
- The caller will always leave space on the stack for the
|
arguments that are passed in registers, so we dont have
|
arguments that are passed in registers, so we dont have
|
to leave any extra space.
|
to leave any extra space.
|
- now, the vastart pointer can access all arguments from the stack. */
|
- now, the vastart pointer can access all arguments from the stack. */
|
|
|
static void
|
static void
|
setup_incoming_varargs (CUMULATIVE_ARGS *cum,
|
setup_incoming_varargs (CUMULATIVE_ARGS *cum,
|
enum machine_mode mode ATTRIBUTE_UNUSED,
|
enum machine_mode mode ATTRIBUTE_UNUSED,
|
tree type ATTRIBUTE_UNUSED, int *pretend_size,
|
tree type ATTRIBUTE_UNUSED, int *pretend_size,
|
int no_rtl)
|
int no_rtl)
|
{
|
{
|
rtx mem;
|
rtx mem;
|
int i;
|
int i;
|
|
|
if (no_rtl)
|
if (no_rtl)
|
return;
|
return;
|
|
|
/* The move for named arguments will be generated automatically by the
|
/* The move for named arguments will be generated automatically by the
|
compiler. We need to generate the move rtx for the unnamed arguments
|
compiler. We need to generate the move rtx for the unnamed arguments
|
if they are in the first 3 words. We assume at least 1 named argument
|
if they are in the first 3 words. We assume at least 1 named argument
|
exists, so we never generate [ARGP] = R0 here. */
|
exists, so we never generate [ARGP] = R0 here. */
|
|
|
for (i = cum->words + 1; i < max_arg_registers; i++)
|
for (i = cum->words + 1; i < max_arg_registers; i++)
|
{
|
{
|
mem = gen_rtx_MEM (Pmode,
|
mem = gen_rtx_MEM (Pmode,
|
plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
|
plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
|
emit_move_insn (mem, gen_rtx_REG (Pmode, i));
|
emit_move_insn (mem, gen_rtx_REG (Pmode, i));
|
}
|
}
|
|
|
*pretend_size = 0;
|
*pretend_size = 0;
|
}
|
}
|
|
|
/* Value should be nonzero if functions must have frame pointers.
|
/* Value should be nonzero if functions must have frame pointers.
|
Zero means the frame pointer need not be set up (and parms may
|
Zero means the frame pointer need not be set up (and parms may
|
be accessed via the stack pointer) in functions that seem suitable. */
|
be accessed via the stack pointer) in functions that seem suitable. */
|
|
|
int
|
int
|
bfin_frame_pointer_required (void)
|
bfin_frame_pointer_required (void)
|
{
|
{
|
e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
|
e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
|
|
|
if (fkind != SUBROUTINE)
|
if (fkind != SUBROUTINE)
|
return 1;
|
return 1;
|
|
|
/* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
|
/* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
|
so we have to override it for non-leaf functions. */
|
so we have to override it for non-leaf functions. */
|
if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
|
if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
|
return 1;
|
return 1;
|
|
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* Return the number of registers pushed during the prologue. */
|
/* Return the number of registers pushed during the prologue. */
|
|
|
static int
|
static int
|
n_regs_saved_by_prologue (void)
|
n_regs_saved_by_prologue (void)
|
{
|
{
|
e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
|
e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
|
bool is_inthandler = fkind != SUBROUTINE;
|
bool is_inthandler = fkind != SUBROUTINE;
|
tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
|
tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
|
bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
|
bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
|
|| (is_inthandler && !current_function_is_leaf));
|
|| (is_inthandler && !current_function_is_leaf));
|
int ndregs = all ? 8 : n_dregs_to_save (is_inthandler);
|
int ndregs = all ? 8 : n_dregs_to_save (is_inthandler);
|
int npregs = all ? 6 : n_pregs_to_save (is_inthandler);
|
int npregs = all ? 6 : n_pregs_to_save (is_inthandler);
|
int n = ndregs + npregs;
|
int n = ndregs + npregs;
|
|
|
if (all || stack_frame_needed_p ())
|
if (all || stack_frame_needed_p ())
|
/* We use a LINK instruction in this case. */
|
/* We use a LINK instruction in this case. */
|
n += 2;
|
n += 2;
|
else
|
else
|
{
|
{
|
if (must_save_fp_p ())
|
if (must_save_fp_p ())
|
n++;
|
n++;
|
if (! current_function_is_leaf)
|
if (! current_function_is_leaf)
|
n++;
|
n++;
|
}
|
}
|
|
|
if (fkind != SUBROUTINE)
|
if (fkind != SUBROUTINE)
|
{
|
{
|
int i;
|
int i;
|
|
|
/* Increment once for ASTAT. */
|
/* Increment once for ASTAT. */
|
n++;
|
n++;
|
|
|
/* RETE/X/N. */
|
/* RETE/X/N. */
|
if (lookup_attribute ("nesting", attrs))
|
if (lookup_attribute ("nesting", attrs))
|
n++;
|
n++;
|
|
|
for (i = REG_P7 + 1; i < REG_CC; i++)
|
for (i = REG_P7 + 1; i < REG_CC; i++)
|
if (all
|
if (all
|
|| regs_ever_live[i]
|
|| regs_ever_live[i]
|
|| (!leaf_function_p () && call_used_regs[i]))
|
|| (!leaf_function_p () && call_used_regs[i]))
|
n += i == REG_A0 || i == REG_A1 ? 2 : 1;
|
n += i == REG_A0 || i == REG_A1 ? 2 : 1;
|
}
|
}
|
return n;
|
return n;
|
}
|
}
|
|
|
/* Return the offset between two registers, one to be eliminated, and the other
|
/* Return the offset between two registers, one to be eliminated, and the other
|
its replacement, at the start of a routine. */
|
its replacement, at the start of a routine. */
|
|
|
HOST_WIDE_INT
|
HOST_WIDE_INT
|
bfin_initial_elimination_offset (int from, int to)
|
bfin_initial_elimination_offset (int from, int to)
|
{
|
{
|
HOST_WIDE_INT offset = 0;
|
HOST_WIDE_INT offset = 0;
|
|
|
if (from == ARG_POINTER_REGNUM)
|
if (from == ARG_POINTER_REGNUM)
|
offset = n_regs_saved_by_prologue () * 4;
|
offset = n_regs_saved_by_prologue () * 4;
|
|
|
if (to == STACK_POINTER_REGNUM)
|
if (to == STACK_POINTER_REGNUM)
|
{
|
{
|
if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
|
if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
|
offset += current_function_outgoing_args_size;
|
offset += current_function_outgoing_args_size;
|
else if (current_function_outgoing_args_size)
|
else if (current_function_outgoing_args_size)
|
offset += FIXED_STACK_AREA;
|
offset += FIXED_STACK_AREA;
|
|
|
offset += get_frame_size ();
|
offset += get_frame_size ();
|
}
|
}
|
|
|
return offset;
|
return offset;
|
}
|
}
|
|
|
/* Emit code to load a constant CONSTANT into register REG; setting
|
/* Emit code to load a constant CONSTANT into register REG; setting
|
RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
|
RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
|
Make sure that the insns we generate need not be split. */
|
Make sure that the insns we generate need not be split. */
|
|
|
static void
|
static void
|
frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
|
frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
|
{
|
{
|
rtx insn;
|
rtx insn;
|
rtx cst = GEN_INT (constant);
|
rtx cst = GEN_INT (constant);
|
|
|
if (constant >= -32768 && constant < 65536)
|
if (constant >= -32768 && constant < 65536)
|
insn = emit_move_insn (reg, cst);
|
insn = emit_move_insn (reg, cst);
|
else
|
else
|
{
|
{
|
/* We don't call split_load_immediate here, since dwarf2out.c can get
|
/* We don't call split_load_immediate here, since dwarf2out.c can get
|
confused about some of the more clever sequences it can generate. */
|
confused about some of the more clever sequences it can generate. */
|
insn = emit_insn (gen_movsi_high (reg, cst));
|
insn = emit_insn (gen_movsi_high (reg, cst));
|
if (related)
|
if (related)
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
insn = emit_insn (gen_movsi_low (reg, reg, cst));
|
insn = emit_insn (gen_movsi_low (reg, reg, cst));
|
}
|
}
|
if (related)
|
if (related)
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
|
|
/* Generate efficient code to add a value to the frame pointer. We
|
/* Generate efficient code to add a value to the frame pointer. We
|
can use P1 as a scratch register. Set RTX_FRAME_RELATED_P on the
|
can use P1 as a scratch register. Set RTX_FRAME_RELATED_P on the
|
generated insns if FRAME is nonzero. */
|
generated insns if FRAME is nonzero. */
|
|
|
static void
|
static void
|
add_to_sp (rtx spreg, HOST_WIDE_INT value, int frame)
|
add_to_sp (rtx spreg, HOST_WIDE_INT value, int frame)
|
{
|
{
|
if (value == 0)
|
if (value == 0)
|
return;
|
return;
|
|
|
/* Choose whether to use a sequence using a temporary register, or
|
/* Choose whether to use a sequence using a temporary register, or
|
a sequence with multiple adds. We can add a signed 7 bit value
|
a sequence with multiple adds. We can add a signed 7 bit value
|
in one instruction. */
|
in one instruction. */
|
if (value > 120 || value < -120)
|
if (value > 120 || value < -120)
|
{
|
{
|
rtx tmpreg = gen_rtx_REG (SImode, REG_P1);
|
rtx tmpreg = gen_rtx_REG (SImode, REG_P1);
|
rtx insn;
|
rtx insn;
|
|
|
if (frame)
|
if (frame)
|
frame_related_constant_load (tmpreg, value, TRUE);
|
frame_related_constant_load (tmpreg, value, TRUE);
|
else
|
else
|
{
|
{
|
insn = emit_move_insn (tmpreg, GEN_INT (value));
|
insn = emit_move_insn (tmpreg, GEN_INT (value));
|
if (frame)
|
if (frame)
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
|
|
insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
|
insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
|
if (frame)
|
if (frame)
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
else
|
else
|
do
|
do
|
{
|
{
|
int size = value;
|
int size = value;
|
rtx insn;
|
rtx insn;
|
|
|
if (size > 60)
|
if (size > 60)
|
size = 60;
|
size = 60;
|
else if (size < -60)
|
else if (size < -60)
|
/* We could use -62, but that would leave the stack unaligned, so
|
/* We could use -62, but that would leave the stack unaligned, so
|
it's no good. */
|
it's no good. */
|
size = -60;
|
size = -60;
|
|
|
insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (size)));
|
insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (size)));
|
if (frame)
|
if (frame)
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
value -= size;
|
value -= size;
|
}
|
}
|
while (value != 0);
|
while (value != 0);
|
}
|
}
|
|
|
/* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
|
/* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
|
is too large, generate a sequence of insns that has the same effect.
|
is too large, generate a sequence of insns that has the same effect.
|
SPREG contains (reg:SI REG_SP). */
|
SPREG contains (reg:SI REG_SP). */
|
|
|
static void
|
static void
|
emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
|
emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
|
{
|
{
|
HOST_WIDE_INT link_size = frame_size;
|
HOST_WIDE_INT link_size = frame_size;
|
rtx insn;
|
rtx insn;
|
int i;
|
int i;
|
|
|
if (link_size > 262140)
|
if (link_size > 262140)
|
link_size = 262140;
|
link_size = 262140;
|
|
|
/* Use a LINK insn with as big a constant as possible, then subtract
|
/* Use a LINK insn with as big a constant as possible, then subtract
|
any remaining size from the SP. */
|
any remaining size from the SP. */
|
insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
|
insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
|
|
for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
|
for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
|
{
|
{
|
rtx set = XVECEXP (PATTERN (insn), 0, i);
|
rtx set = XVECEXP (PATTERN (insn), 0, i);
|
gcc_assert (GET_CODE (set) == SET);
|
gcc_assert (GET_CODE (set) == SET);
|
RTX_FRAME_RELATED_P (set) = 1;
|
RTX_FRAME_RELATED_P (set) = 1;
|
}
|
}
|
|
|
frame_size -= link_size;
|
frame_size -= link_size;
|
|
|
if (frame_size > 0)
|
if (frame_size > 0)
|
{
|
{
|
/* Must use a call-clobbered PREG that isn't the static chain. */
|
/* Must use a call-clobbered PREG that isn't the static chain. */
|
rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
|
rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
|
|
|
frame_related_constant_load (tmpreg, -frame_size, TRUE);
|
frame_related_constant_load (tmpreg, -frame_size, TRUE);
|
insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
|
insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
}
|
}
|
|
|
/* Return the number of bytes we must reserve for outgoing arguments
|
/* Return the number of bytes we must reserve for outgoing arguments
|
in the current function's stack frame. */
|
in the current function's stack frame. */
|
|
|
static HOST_WIDE_INT
|
static HOST_WIDE_INT
|
arg_area_size (void)
|
arg_area_size (void)
|
{
|
{
|
if (current_function_outgoing_args_size)
|
if (current_function_outgoing_args_size)
|
{
|
{
|
if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
|
if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
|
return current_function_outgoing_args_size;
|
return current_function_outgoing_args_size;
|
else
|
else
|
return FIXED_STACK_AREA;
|
return FIXED_STACK_AREA;
|
}
|
}
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* Save RETS and FP, and allocate a stack frame. ALL is true if the
|
/* Save RETS and FP, and allocate a stack frame. ALL is true if the
|
function must save all its registers (true only for certain interrupt
|
function must save all its registers (true only for certain interrupt
|
handlers). */
|
handlers). */
|
|
|
static void
|
static void
|
do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
|
do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
|
{
|
{
|
frame_size += arg_area_size ();
|
frame_size += arg_area_size ();
|
|
|
if (all || stack_frame_needed_p ()
|
if (all || stack_frame_needed_p ()
|
|| (must_save_fp_p () && ! current_function_is_leaf))
|
|| (must_save_fp_p () && ! current_function_is_leaf))
|
emit_link_insn (spreg, frame_size);
|
emit_link_insn (spreg, frame_size);
|
else
|
else
|
{
|
{
|
if (! current_function_is_leaf)
|
if (! current_function_is_leaf)
|
{
|
{
|
rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
|
rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
|
gen_rtx_PRE_DEC (Pmode, spreg)),
|
gen_rtx_PRE_DEC (Pmode, spreg)),
|
bfin_rets_rtx);
|
bfin_rets_rtx);
|
rtx insn = emit_insn (pat);
|
rtx insn = emit_insn (pat);
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
if (must_save_fp_p ())
|
if (must_save_fp_p ())
|
{
|
{
|
rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
|
rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
|
gen_rtx_PRE_DEC (Pmode, spreg)),
|
gen_rtx_PRE_DEC (Pmode, spreg)),
|
gen_rtx_REG (Pmode, REG_FP));
|
gen_rtx_REG (Pmode, REG_FP));
|
rtx insn = emit_insn (pat);
|
rtx insn = emit_insn (pat);
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
add_to_sp (spreg, -frame_size, 1);
|
add_to_sp (spreg, -frame_size, 1);
|
}
|
}
|
}
|
}
|
|
|
/* Like do_link, but used for epilogues to deallocate the stack frame. */
|
/* Like do_link, but used for epilogues to deallocate the stack frame. */
|
|
|
static void
|
static void
|
do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all)
|
do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all)
|
{
|
{
|
frame_size += arg_area_size ();
|
frame_size += arg_area_size ();
|
|
|
if (all || stack_frame_needed_p ())
|
if (all || stack_frame_needed_p ())
|
emit_insn (gen_unlink ());
|
emit_insn (gen_unlink ());
|
else
|
else
|
{
|
{
|
rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
|
rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
|
|
|
add_to_sp (spreg, frame_size, 0);
|
add_to_sp (spreg, frame_size, 0);
|
if (must_save_fp_p ())
|
if (must_save_fp_p ())
|
{
|
{
|
rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
|
rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
|
emit_move_insn (fpreg, postinc);
|
emit_move_insn (fpreg, postinc);
|
emit_insn (gen_rtx_USE (VOIDmode, fpreg));
|
emit_insn (gen_rtx_USE (VOIDmode, fpreg));
|
}
|
}
|
if (! current_function_is_leaf)
|
if (! current_function_is_leaf)
|
{
|
{
|
emit_move_insn (bfin_rets_rtx, postinc);
|
emit_move_insn (bfin_rets_rtx, postinc);
|
emit_insn (gen_rtx_USE (VOIDmode, bfin_rets_rtx));
|
emit_insn (gen_rtx_USE (VOIDmode, bfin_rets_rtx));
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
/* Generate a prologue suitable for a function of kind FKIND. This is
|
/* Generate a prologue suitable for a function of kind FKIND. This is
|
called for interrupt and exception handler prologues.
|
called for interrupt and exception handler prologues.
|
SPREG contains (reg:SI REG_SP). */
|
SPREG contains (reg:SI REG_SP). */
|
|
|
static void
|
static void
|
expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
|
expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
|
{
|
{
|
int i;
|
int i;
|
HOST_WIDE_INT frame_size = get_frame_size ();
|
HOST_WIDE_INT frame_size = get_frame_size ();
|
rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
|
rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
|
rtx predec = gen_rtx_MEM (SImode, predec1);
|
rtx predec = gen_rtx_MEM (SImode, predec1);
|
rtx insn;
|
rtx insn;
|
tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
|
tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
|
bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
|
bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
|
tree kspisusp = lookup_attribute ("kspisusp", attrs);
|
tree kspisusp = lookup_attribute ("kspisusp", attrs);
|
|
|
if (kspisusp)
|
if (kspisusp)
|
{
|
{
|
insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
|
insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
|
|
/* We need space on the stack in case we need to save the argument
|
/* We need space on the stack in case we need to save the argument
|
registers. */
|
registers. */
|
if (fkind == EXCPT_HANDLER)
|
if (fkind == EXCPT_HANDLER)
|
{
|
{
|
insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
|
insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
|
|
insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
|
insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
|
|
/* If we're calling other functions, they won't save their call-clobbered
|
/* If we're calling other functions, they won't save their call-clobbered
|
registers, so we must save everything here. */
|
registers, so we must save everything here. */
|
if (!current_function_is_leaf)
|
if (!current_function_is_leaf)
|
all = true;
|
all = true;
|
expand_prologue_reg_save (spreg, all, true);
|
expand_prologue_reg_save (spreg, all, true);
|
|
|
for (i = REG_P7 + 1; i < REG_CC; i++)
|
for (i = REG_P7 + 1; i < REG_CC; i++)
|
if (all
|
if (all
|
|| regs_ever_live[i]
|
|| regs_ever_live[i]
|
|| (!leaf_function_p () && call_used_regs[i]))
|
|| (!leaf_function_p () && call_used_regs[i]))
|
{
|
{
|
if (i == REG_A0 || i == REG_A1)
|
if (i == REG_A0 || i == REG_A1)
|
insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
|
insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
|
gen_rtx_REG (PDImode, i));
|
gen_rtx_REG (PDImode, i));
|
else
|
else
|
insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
|
insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
|
|
if (lookup_attribute ("nesting", attrs))
|
if (lookup_attribute ("nesting", attrs))
|
{
|
{
|
rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
|
rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
|
: fkind == NMI_HANDLER ? REG_RETN
|
: fkind == NMI_HANDLER ? REG_RETN
|
: REG_RETI));
|
: REG_RETI));
|
insn = emit_move_insn (predec, srcreg);
|
insn = emit_move_insn (predec, srcreg);
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
|
|
do_link (spreg, frame_size, all);
|
do_link (spreg, frame_size, all);
|
|
|
if (fkind == EXCPT_HANDLER)
|
if (fkind == EXCPT_HANDLER)
|
{
|
{
|
rtx r0reg = gen_rtx_REG (SImode, REG_R0);
|
rtx r0reg = gen_rtx_REG (SImode, REG_R0);
|
rtx r1reg = gen_rtx_REG (SImode, REG_R1);
|
rtx r1reg = gen_rtx_REG (SImode, REG_R1);
|
rtx r2reg = gen_rtx_REG (SImode, REG_R2);
|
rtx r2reg = gen_rtx_REG (SImode, REG_R2);
|
rtx insn;
|
rtx insn;
|
|
|
insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
|
insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
|
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
|
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
|
NULL_RTX);
|
NULL_RTX);
|
insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
|
insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
|
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
|
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
|
NULL_RTX);
|
NULL_RTX);
|
insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
|
insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
|
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
|
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
|
NULL_RTX);
|
NULL_RTX);
|
insn = emit_move_insn (r1reg, spreg);
|
insn = emit_move_insn (r1reg, spreg);
|
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
|
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
|
NULL_RTX);
|
NULL_RTX);
|
insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
|
insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
|
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
|
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
|
NULL_RTX);
|
NULL_RTX);
|
insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
|
insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
|
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
|
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
|
NULL_RTX);
|
NULL_RTX);
|
}
|
}
|
}
|
}
|
|
|
/* Generate an epilogue suitable for a function of kind FKIND. This is
|
/* Generate an epilogue suitable for a function of kind FKIND. This is
|
called for interrupt and exception handler epilogues.
|
called for interrupt and exception handler epilogues.
|
SPREG contains (reg:SI REG_SP). */
|
SPREG contains (reg:SI REG_SP). */
|
|
|
static void
|
static void
|
expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind)
|
expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind)
|
{
|
{
|
int i;
|
int i;
|
rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
|
rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
|
rtx postinc = gen_rtx_MEM (SImode, postinc1);
|
rtx postinc = gen_rtx_MEM (SImode, postinc1);
|
tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
|
tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
|
bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
|
bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
|
|
|
/* A slightly crude technique to stop flow from trying to delete "dead"
|
/* A slightly crude technique to stop flow from trying to delete "dead"
|
insns. */
|
insns. */
|
MEM_VOLATILE_P (postinc) = 1;
|
MEM_VOLATILE_P (postinc) = 1;
|
|
|
do_unlink (spreg, get_frame_size (), all);
|
do_unlink (spreg, get_frame_size (), all);
|
|
|
if (lookup_attribute ("nesting", attrs))
|
if (lookup_attribute ("nesting", attrs))
|
{
|
{
|
rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
|
rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
|
: fkind == NMI_HANDLER ? REG_RETN
|
: fkind == NMI_HANDLER ? REG_RETN
|
: REG_RETI));
|
: REG_RETI));
|
emit_move_insn (srcreg, postinc);
|
emit_move_insn (srcreg, postinc);
|
}
|
}
|
|
|
/* If we're calling other functions, they won't save their call-clobbered
|
/* If we're calling other functions, they won't save their call-clobbered
|
registers, so we must save (and restore) everything here. */
|
registers, so we must save (and restore) everything here. */
|
if (!current_function_is_leaf)
|
if (!current_function_is_leaf)
|
all = true;
|
all = true;
|
|
|
for (i = REG_CC - 1; i > REG_P7; i--)
|
for (i = REG_CC - 1; i > REG_P7; i--)
|
if (all
|
if (all
|
|| regs_ever_live[i]
|
|| regs_ever_live[i]
|
|| (!leaf_function_p () && call_used_regs[i]))
|
|| (!leaf_function_p () && call_used_regs[i]))
|
{
|
{
|
if (i == REG_A0 || i == REG_A1)
|
if (i == REG_A0 || i == REG_A1)
|
{
|
{
|
rtx mem = gen_rtx_MEM (PDImode, postinc1);
|
rtx mem = gen_rtx_MEM (PDImode, postinc1);
|
MEM_VOLATILE_P (mem) = 1;
|
MEM_VOLATILE_P (mem) = 1;
|
emit_move_insn (gen_rtx_REG (PDImode, i), mem);
|
emit_move_insn (gen_rtx_REG (PDImode, i), mem);
|
}
|
}
|
else
|
else
|
emit_move_insn (gen_rtx_REG (SImode, i), postinc);
|
emit_move_insn (gen_rtx_REG (SImode, i), postinc);
|
}
|
}
|
|
|
expand_epilogue_reg_restore (spreg, all, true);
|
expand_epilogue_reg_restore (spreg, all, true);
|
|
|
emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
|
emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
|
|
|
/* Deallocate any space we left on the stack in case we needed to save the
|
/* Deallocate any space we left on the stack in case we needed to save the
|
argument registers. */
|
argument registers. */
|
if (fkind == EXCPT_HANDLER)
|
if (fkind == EXCPT_HANDLER)
|
emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
|
emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
|
|
|
emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
|
emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
|
}
|
}
|
|
|
/* Used while emitting the prologue to generate code to load the correct value
|
/* Used while emitting the prologue to generate code to load the correct value
|
into the PIC register, which is passed in DEST. */
|
into the PIC register, which is passed in DEST. */
|
|
|
static rtx
|
static rtx
|
bfin_load_pic_reg (rtx dest)
|
bfin_load_pic_reg (rtx dest)
|
{
|
{
|
struct cgraph_local_info *i = NULL;
|
struct cgraph_local_info *i = NULL;
|
rtx addr, insn;
|
rtx addr, insn;
|
|
|
if (flag_unit_at_a_time)
|
if (flag_unit_at_a_time)
|
i = cgraph_local_info (current_function_decl);
|
i = cgraph_local_info (current_function_decl);
|
|
|
/* Functions local to the translation unit don't need to reload the
|
/* Functions local to the translation unit don't need to reload the
|
pic reg, since the caller always passes a usable one. */
|
pic reg, since the caller always passes a usable one. */
|
if (i && i->local)
|
if (i && i->local)
|
return pic_offset_table_rtx;
|
return pic_offset_table_rtx;
|
|
|
if (bfin_lib_id_given)
|
if (bfin_lib_id_given)
|
addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
|
addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
|
else
|
else
|
addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
|
addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
|
gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
|
gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
|
UNSPEC_LIBRARY_OFFSET));
|
UNSPEC_LIBRARY_OFFSET));
|
insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
|
insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
|
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
|
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
|
return dest;
|
return dest;
|
}
|
}
|
|
|
/* Generate RTL for the prologue of the current function. */
|
/* Generate RTL for the prologue of the current function. */
|
|
|
void
|
void
|
bfin_expand_prologue (void)
|
bfin_expand_prologue (void)
|
{
|
{
|
rtx insn;
|
rtx insn;
|
HOST_WIDE_INT frame_size = get_frame_size ();
|
HOST_WIDE_INT frame_size = get_frame_size ();
|
rtx spreg = gen_rtx_REG (Pmode, REG_SP);
|
rtx spreg = gen_rtx_REG (Pmode, REG_SP);
|
e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
|
e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
|
rtx pic_reg_loaded = NULL_RTX;
|
rtx pic_reg_loaded = NULL_RTX;
|
|
|
if (fkind != SUBROUTINE)
|
if (fkind != SUBROUTINE)
|
{
|
{
|
expand_interrupt_handler_prologue (spreg, fkind);
|
expand_interrupt_handler_prologue (spreg, fkind);
|
return;
|
return;
|
}
|
}
|
|
|
if (current_function_limit_stack)
|
if (current_function_limit_stack)
|
{
|
{
|
HOST_WIDE_INT offset
|
HOST_WIDE_INT offset
|
= bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
|
= bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
|
STACK_POINTER_REGNUM);
|
STACK_POINTER_REGNUM);
|
rtx lim = stack_limit_rtx;
|
rtx lim = stack_limit_rtx;
|
|
|
if (GET_CODE (lim) == SYMBOL_REF)
|
if (GET_CODE (lim) == SYMBOL_REF)
|
{
|
{
|
rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
|
rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
|
if (TARGET_ID_SHARED_LIBRARY)
|
if (TARGET_ID_SHARED_LIBRARY)
|
{
|
{
|
rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
|
rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
|
rtx val;
|
rtx val;
|
pic_reg_loaded = bfin_load_pic_reg (p2reg);
|
pic_reg_loaded = bfin_load_pic_reg (p2reg);
|
val = legitimize_pic_address (stack_limit_rtx, p1reg,
|
val = legitimize_pic_address (stack_limit_rtx, p1reg,
|
pic_reg_loaded);
|
pic_reg_loaded);
|
emit_move_insn (p1reg, val);
|
emit_move_insn (p1reg, val);
|
frame_related_constant_load (p2reg, offset, FALSE);
|
frame_related_constant_load (p2reg, offset, FALSE);
|
emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
|
emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
|
lim = p2reg;
|
lim = p2reg;
|
}
|
}
|
else
|
else
|
{
|
{
|
rtx limit = plus_constant (stack_limit_rtx, offset);
|
rtx limit = plus_constant (stack_limit_rtx, offset);
|
emit_move_insn (p2reg, limit);
|
emit_move_insn (p2reg, limit);
|
lim = p2reg;
|
lim = p2reg;
|
}
|
}
|
}
|
}
|
emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
|
emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
|
emit_insn (gen_trapifcc ());
|
emit_insn (gen_trapifcc ());
|
}
|
}
|
expand_prologue_reg_save (spreg, 0, false);
|
expand_prologue_reg_save (spreg, 0, false);
|
|
|
do_link (spreg, frame_size, false);
|
do_link (spreg, frame_size, false);
|
|
|
if (TARGET_ID_SHARED_LIBRARY
|
if (TARGET_ID_SHARED_LIBRARY
|
&& (current_function_uses_pic_offset_table
|
&& (current_function_uses_pic_offset_table
|
|| !current_function_is_leaf))
|
|| !current_function_is_leaf))
|
bfin_load_pic_reg (pic_offset_table_rtx);
|
bfin_load_pic_reg (pic_offset_table_rtx);
|
}
|
}
|
|
|
/* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
|
/* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
|
if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
|
if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
|
eh_return pattern. */
|
eh_return pattern. */
|
|
|
void
|
void
|
bfin_expand_epilogue (int need_return, int eh_return)
|
bfin_expand_epilogue (int need_return, int eh_return)
|
{
|
{
|
rtx spreg = gen_rtx_REG (Pmode, REG_SP);
|
rtx spreg = gen_rtx_REG (Pmode, REG_SP);
|
e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
|
e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
|
|
|
if (fkind != SUBROUTINE)
|
if (fkind != SUBROUTINE)
|
{
|
{
|
expand_interrupt_handler_epilogue (spreg, fkind);
|
expand_interrupt_handler_epilogue (spreg, fkind);
|
return;
|
return;
|
}
|
}
|
|
|
do_unlink (spreg, get_frame_size (), false);
|
do_unlink (spreg, get_frame_size (), false);
|
|
|
expand_epilogue_reg_restore (spreg, false, false);
|
expand_epilogue_reg_restore (spreg, false, false);
|
|
|
/* Omit the return insn if this is for a sibcall. */
|
/* Omit the return insn if this is for a sibcall. */
|
if (! need_return)
|
if (! need_return)
|
return;
|
return;
|
|
|
if (eh_return)
|
if (eh_return)
|
emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
|
emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
|
|
|
emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
|
emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
|
}
|
}
|
|
|
/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
|
/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
|
|
|
int
|
int
|
bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
|
bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
|
unsigned int new_reg)
|
unsigned int new_reg)
|
{
|
{
|
/* Interrupt functions can only use registers that have already been
|
/* Interrupt functions can only use registers that have already been
|
saved by the prologue, even if they would normally be
|
saved by the prologue, even if they would normally be
|
call-clobbered. */
|
call-clobbered. */
|
|
|
if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
|
if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
|
&& !regs_ever_live[new_reg])
|
&& !regs_ever_live[new_reg])
|
return 0;
|
return 0;
|
|
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Return the value of the return address for the frame COUNT steps up
|
/* Return the value of the return address for the frame COUNT steps up
|
from the current frame, after the prologue.
|
from the current frame, after the prologue.
|
We punt for everything but the current frame by returning const0_rtx. */
|
We punt for everything but the current frame by returning const0_rtx. */
|
|
|
rtx
|
rtx
|
bfin_return_addr_rtx (int count)
|
bfin_return_addr_rtx (int count)
|
{
|
{
|
if (count != 0)
|
if (count != 0)
|
return const0_rtx;
|
return const0_rtx;
|
|
|
return get_hard_reg_initial_val (Pmode, REG_RETS);
|
return get_hard_reg_initial_val (Pmode, REG_RETS);
|
}
|
}
|
|
|
/* Try machine-dependent ways of modifying an illegitimate address X
|
/* Try machine-dependent ways of modifying an illegitimate address X
|
to be legitimate. If we find one, return the new, valid address,
|
to be legitimate. If we find one, return the new, valid address,
|
otherwise return NULL_RTX.
|
otherwise return NULL_RTX.
|
|
|
OLDX is the address as it was before break_out_memory_refs was called.
|
OLDX is the address as it was before break_out_memory_refs was called.
|
In some cases it is useful to look at this to decide what needs to be done.
|
In some cases it is useful to look at this to decide what needs to be done.
|
|
|
MODE is the mode of the memory reference. */
|
MODE is the mode of the memory reference. */
|
|
|
rtx
|
rtx
|
legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
|
legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
|
enum machine_mode mode ATTRIBUTE_UNUSED)
|
enum machine_mode mode ATTRIBUTE_UNUSED)
|
{
|
{
|
return NULL_RTX;
|
return NULL_RTX;
|
}
|
}
|
|
|
static rtx
|
static rtx
|
bfin_delegitimize_address (rtx orig_x)
|
bfin_delegitimize_address (rtx orig_x)
|
{
|
{
|
rtx x = orig_x, y;
|
rtx x = orig_x, y;
|
|
|
if (GET_CODE (x) != MEM)
|
if (GET_CODE (x) != MEM)
|
return orig_x;
|
return orig_x;
|
|
|
x = XEXP (x, 0);
|
x = XEXP (x, 0);
|
if (GET_CODE (x) == PLUS
|
if (GET_CODE (x) == PLUS
|
&& GET_CODE (XEXP (x, 1)) == UNSPEC
|
&& GET_CODE (XEXP (x, 1)) == UNSPEC
|
&& XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
|
&& XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
|
&& GET_CODE (XEXP (x, 0)) == REG
|
&& GET_CODE (XEXP (x, 0)) == REG
|
&& REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
|
&& REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
|
return XVECEXP (XEXP (x, 1), 0, 0);
|
return XVECEXP (XEXP (x, 1), 0, 0);
|
|
|
return orig_x;
|
return orig_x;
|
}
|
}
|
|
|
/* This predicate is used to compute the length of a load/store insn.
|
/* This predicate is used to compute the length of a load/store insn.
|
OP is a MEM rtx, we return nonzero if its addressing mode requires a
|
OP is a MEM rtx, we return nonzero if its addressing mode requires a
|
32 bit instruction. */
|
32 bit instruction. */
|
|
|
int
|
int
|
effective_address_32bit_p (rtx op, enum machine_mode mode)
|
effective_address_32bit_p (rtx op, enum machine_mode mode)
|
{
|
{
|
HOST_WIDE_INT offset;
|
HOST_WIDE_INT offset;
|
|
|
mode = GET_MODE (op);
|
mode = GET_MODE (op);
|
op = XEXP (op, 0);
|
op = XEXP (op, 0);
|
|
|
if (GET_CODE (op) != PLUS)
|
if (GET_CODE (op) != PLUS)
|
{
|
{
|
gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
|
gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
|
|| GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
|
|| GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
|
return 0;
|
return 0;
|
}
|
}
|
|
|
offset = INTVAL (XEXP (op, 1));
|
offset = INTVAL (XEXP (op, 1));
|
|
|
/* All byte loads use a 16 bit offset. */
|
/* All byte loads use a 16 bit offset. */
|
if (GET_MODE_SIZE (mode) == 1)
|
if (GET_MODE_SIZE (mode) == 1)
|
return 1;
|
return 1;
|
|
|
if (GET_MODE_SIZE (mode) == 4)
|
if (GET_MODE_SIZE (mode) == 4)
|
{
|
{
|
/* Frame pointer relative loads can use a negative offset, all others
|
/* Frame pointer relative loads can use a negative offset, all others
|
are restricted to a small positive one. */
|
are restricted to a small positive one. */
|
if (XEXP (op, 0) == frame_pointer_rtx)
|
if (XEXP (op, 0) == frame_pointer_rtx)
|
return offset < -128 || offset > 60;
|
return offset < -128 || offset > 60;
|
return offset < 0 || offset > 60;
|
return offset < 0 || offset > 60;
|
}
|
}
|
|
|
/* Must be HImode now. */
|
/* Must be HImode now. */
|
return offset < 0 || offset > 30;
|
return offset < 0 || offset > 30;
|
}
|
}
|
|
|
/* Returns true if X is a memory reference using an I register. */
|
/* Returns true if X is a memory reference using an I register. */
|
bool
|
bool
|
bfin_dsp_memref_p (rtx x)
|
bfin_dsp_memref_p (rtx x)
|
{
|
{
|
if (! MEM_P (x))
|
if (! MEM_P (x))
|
return false;
|
return false;
|
x = XEXP (x, 0);
|
x = XEXP (x, 0);
|
if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
|
if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
|
|| GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
|
|| GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
|
x = XEXP (x, 0);
|
x = XEXP (x, 0);
|
return IREG_P (x);
|
return IREG_P (x);
|
}
|
}
|
|
|
/* Return cost of the memory address ADDR.
|
/* Return cost of the memory address ADDR.
|
All addressing modes are equally cheap on the Blackfin. */
|
All addressing modes are equally cheap on the Blackfin. */
|
|
|
static int
|
static int
|
bfin_address_cost (rtx addr ATTRIBUTE_UNUSED)
|
bfin_address_cost (rtx addr ATTRIBUTE_UNUSED)
|
{
|
{
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Subroutine of print_operand; used to print a memory reference X to FILE. */
|
/* Subroutine of print_operand; used to print a memory reference X to FILE. */
|
|
|
void
|
void
|
print_address_operand (FILE *file, rtx x)
|
print_address_operand (FILE *file, rtx x)
|
{
|
{
|
switch (GET_CODE (x))
|
switch (GET_CODE (x))
|
{
|
{
|
case PLUS:
|
case PLUS:
|
output_address (XEXP (x, 0));
|
output_address (XEXP (x, 0));
|
fprintf (file, "+");
|
fprintf (file, "+");
|
output_address (XEXP (x, 1));
|
output_address (XEXP (x, 1));
|
break;
|
break;
|
|
|
case PRE_DEC:
|
case PRE_DEC:
|
fprintf (file, "--");
|
fprintf (file, "--");
|
output_address (XEXP (x, 0));
|
output_address (XEXP (x, 0));
|
break;
|
break;
|
case POST_INC:
|
case POST_INC:
|
output_address (XEXP (x, 0));
|
output_address (XEXP (x, 0));
|
fprintf (file, "++");
|
fprintf (file, "++");
|
break;
|
break;
|
case POST_DEC:
|
case POST_DEC:
|
output_address (XEXP (x, 0));
|
output_address (XEXP (x, 0));
|
fprintf (file, "--");
|
fprintf (file, "--");
|
break;
|
break;
|
|
|
default:
|
default:
|
gcc_assert (GET_CODE (x) != MEM);
|
gcc_assert (GET_CODE (x) != MEM);
|
print_operand (file, x, 0);
|
print_operand (file, x, 0);
|
break;
|
break;
|
}
|
}
|
}
|
}
|
|
|
/* Adding intp DImode support by Tony
|
/* Adding intp DImode support by Tony
|
* -- Q: (low word)
|
* -- Q: (low word)
|
* -- R: (high word)
|
* -- R: (high word)
|
*/
|
*/
|
|
|
void
|
void
|
print_operand (FILE *file, rtx x, char code)
|
print_operand (FILE *file, rtx x, char code)
|
{
|
{
|
enum machine_mode mode = GET_MODE (x);
|
enum machine_mode mode = GET_MODE (x);
|
|
|
switch (code)
|
switch (code)
|
{
|
{
|
case 'j':
|
case 'j':
|
switch (GET_CODE (x))
|
switch (GET_CODE (x))
|
{
|
{
|
case EQ:
|
case EQ:
|
fprintf (file, "e");
|
fprintf (file, "e");
|
break;
|
break;
|
case NE:
|
case NE:
|
fprintf (file, "ne");
|
fprintf (file, "ne");
|
break;
|
break;
|
case GT:
|
case GT:
|
fprintf (file, "g");
|
fprintf (file, "g");
|
break;
|
break;
|
case LT:
|
case LT:
|
fprintf (file, "l");
|
fprintf (file, "l");
|
break;
|
break;
|
case GE:
|
case GE:
|
fprintf (file, "ge");
|
fprintf (file, "ge");
|
break;
|
break;
|
case LE:
|
case LE:
|
fprintf (file, "le");
|
fprintf (file, "le");
|
break;
|
break;
|
case GTU:
|
case GTU:
|
fprintf (file, "g");
|
fprintf (file, "g");
|
break;
|
break;
|
case LTU:
|
case LTU:
|
fprintf (file, "l");
|
fprintf (file, "l");
|
break;
|
break;
|
case GEU:
|
case GEU:
|
fprintf (file, "ge");
|
fprintf (file, "ge");
|
break;
|
break;
|
case LEU:
|
case LEU:
|
fprintf (file, "le");
|
fprintf (file, "le");
|
break;
|
break;
|
default:
|
default:
|
output_operand_lossage ("invalid %%j value");
|
output_operand_lossage ("invalid %%j value");
|
}
|
}
|
break;
|
break;
|
|
|
case 'J': /* reverse logic */
|
case 'J': /* reverse logic */
|
switch (GET_CODE(x))
|
switch (GET_CODE(x))
|
{
|
{
|
case EQ:
|
case EQ:
|
fprintf (file, "ne");
|
fprintf (file, "ne");
|
break;
|
break;
|
case NE:
|
case NE:
|
fprintf (file, "e");
|
fprintf (file, "e");
|
break;
|
break;
|
case GT:
|
case GT:
|
fprintf (file, "le");
|
fprintf (file, "le");
|
break;
|
break;
|
case LT:
|
case LT:
|
fprintf (file, "ge");
|
fprintf (file, "ge");
|
break;
|
break;
|
case GE:
|
case GE:
|
fprintf (file, "l");
|
fprintf (file, "l");
|
break;
|
break;
|
case LE:
|
case LE:
|
fprintf (file, "g");
|
fprintf (file, "g");
|
break;
|
break;
|
case GTU:
|
case GTU:
|
fprintf (file, "le");
|
fprintf (file, "le");
|
break;
|
break;
|
case LTU:
|
case LTU:
|
fprintf (file, "ge");
|
fprintf (file, "ge");
|
break;
|
break;
|
case GEU:
|
case GEU:
|
fprintf (file, "l");
|
fprintf (file, "l");
|
break;
|
break;
|
case LEU:
|
case LEU:
|
fprintf (file, "g");
|
fprintf (file, "g");
|
break;
|
break;
|
default:
|
default:
|
output_operand_lossage ("invalid %%J value");
|
output_operand_lossage ("invalid %%J value");
|
}
|
}
|
break;
|
break;
|
|
|
default:
|
default:
|
switch (GET_CODE (x))
|
switch (GET_CODE (x))
|
{
|
{
|
case REG:
|
case REG:
|
if (code == 'h')
|
if (code == 'h')
|
{
|
{
|
gcc_assert (REGNO (x) < 32);
|
gcc_assert (REGNO (x) < 32);
|
fprintf (file, "%s", short_reg_names[REGNO (x)]);
|
fprintf (file, "%s", short_reg_names[REGNO (x)]);
|
/*fprintf (file, "\n%d\n ", REGNO (x));*/
|
/*fprintf (file, "\n%d\n ", REGNO (x));*/
|
break;
|
break;
|
}
|
}
|
else if (code == 'd')
|
else if (code == 'd')
|
{
|
{
|
gcc_assert (REGNO (x) < 32);
|
gcc_assert (REGNO (x) < 32);
|
fprintf (file, "%s", high_reg_names[REGNO (x)]);
|
fprintf (file, "%s", high_reg_names[REGNO (x)]);
|
break;
|
break;
|
}
|
}
|
else if (code == 'w')
|
else if (code == 'w')
|
{
|
{
|
gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
|
gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
|
fprintf (file, "%s.w", reg_names[REGNO (x)]);
|
fprintf (file, "%s.w", reg_names[REGNO (x)]);
|
}
|
}
|
else if (code == 'x')
|
else if (code == 'x')
|
{
|
{
|
gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
|
gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
|
fprintf (file, "%s.x", reg_names[REGNO (x)]);
|
fprintf (file, "%s.x", reg_names[REGNO (x)]);
|
}
|
}
|
else if (code == 'D')
|
else if (code == 'D')
|
{
|
{
|
fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
|
fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
|
}
|
}
|
else if (code == 'H')
|
else if (code == 'H')
|
{
|
{
|
gcc_assert (mode == DImode || mode == DFmode);
|
gcc_assert (mode == DImode || mode == DFmode);
|
gcc_assert (REG_P (x));
|
gcc_assert (REG_P (x));
|
fprintf (file, "%s", reg_names[REGNO (x) + 1]);
|
fprintf (file, "%s", reg_names[REGNO (x) + 1]);
|
}
|
}
|
else if (code == 'T')
|
else if (code == 'T')
|
{
|
{
|
gcc_assert (D_REGNO_P (REGNO (x)));
|
gcc_assert (D_REGNO_P (REGNO (x)));
|
fprintf (file, "%s", byte_reg_names[REGNO (x)]);
|
fprintf (file, "%s", byte_reg_names[REGNO (x)]);
|
}
|
}
|
else
|
else
|
fprintf (file, "%s", reg_names[REGNO (x)]);
|
fprintf (file, "%s", reg_names[REGNO (x)]);
|
break;
|
break;
|
|
|
case MEM:
|
case MEM:
|
fputc ('[', file);
|
fputc ('[', file);
|
x = XEXP (x,0);
|
x = XEXP (x,0);
|
print_address_operand (file, x);
|
print_address_operand (file, x);
|
fputc (']', file);
|
fputc (']', file);
|
break;
|
break;
|
|
|
case CONST_INT:
|
case CONST_INT:
|
if (code == 'M')
|
if (code == 'M')
|
{
|
{
|
switch (INTVAL (x))
|
switch (INTVAL (x))
|
{
|
{
|
case MACFLAG_NONE:
|
case MACFLAG_NONE:
|
break;
|
break;
|
case MACFLAG_FU:
|
case MACFLAG_FU:
|
fputs ("(FU)", file);
|
fputs ("(FU)", file);
|
break;
|
break;
|
case MACFLAG_T:
|
case MACFLAG_T:
|
fputs ("(T)", file);
|
fputs ("(T)", file);
|
break;
|
break;
|
case MACFLAG_TFU:
|
case MACFLAG_TFU:
|
fputs ("(TFU)", file);
|
fputs ("(TFU)", file);
|
break;
|
break;
|
case MACFLAG_W32:
|
case MACFLAG_W32:
|
fputs ("(W32)", file);
|
fputs ("(W32)", file);
|
break;
|
break;
|
case MACFLAG_IS:
|
case MACFLAG_IS:
|
fputs ("(IS)", file);
|
fputs ("(IS)", file);
|
break;
|
break;
|
case MACFLAG_IU:
|
case MACFLAG_IU:
|
fputs ("(IU)", file);
|
fputs ("(IU)", file);
|
break;
|
break;
|
case MACFLAG_IH:
|
case MACFLAG_IH:
|
fputs ("(IH)", file);
|
fputs ("(IH)", file);
|
break;
|
break;
|
case MACFLAG_M:
|
case MACFLAG_M:
|
fputs ("(M)", file);
|
fputs ("(M)", file);
|
break;
|
break;
|
case MACFLAG_ISS2:
|
case MACFLAG_ISS2:
|
fputs ("(ISS2)", file);
|
fputs ("(ISS2)", file);
|
break;
|
break;
|
case MACFLAG_S2RND:
|
case MACFLAG_S2RND:
|
fputs ("(S2RND)", file);
|
fputs ("(S2RND)", file);
|
break;
|
break;
|
default:
|
default:
|
gcc_unreachable ();
|
gcc_unreachable ();
|
}
|
}
|
break;
|
break;
|
}
|
}
|
else if (code == 'b')
|
else if (code == 'b')
|
{
|
{
|
if (INTVAL (x) == 0)
|
if (INTVAL (x) == 0)
|
fputs ("+=", file);
|
fputs ("+=", file);
|
else if (INTVAL (x) == 1)
|
else if (INTVAL (x) == 1)
|
fputs ("-=", file);
|
fputs ("-=", file);
|
else
|
else
|
gcc_unreachable ();
|
gcc_unreachable ();
|
break;
|
break;
|
}
|
}
|
/* Moves to half registers with d or h modifiers always use unsigned
|
/* Moves to half registers with d or h modifiers always use unsigned
|
constants. */
|
constants. */
|
else if (code == 'd')
|
else if (code == 'd')
|
x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
|
x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
|
else if (code == 'h')
|
else if (code == 'h')
|
x = GEN_INT (INTVAL (x) & 0xffff);
|
x = GEN_INT (INTVAL (x) & 0xffff);
|
else if (code == 'X')
|
else if (code == 'X')
|
x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
|
x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
|
else if (code == 'Y')
|
else if (code == 'Y')
|
x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
|
x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
|
else if (code == 'Z')
|
else if (code == 'Z')
|
/* Used for LINK insns. */
|
/* Used for LINK insns. */
|
x = GEN_INT (-8 - INTVAL (x));
|
x = GEN_INT (-8 - INTVAL (x));
|
|
|
/* fall through */
|
/* fall through */
|
|
|
case SYMBOL_REF:
|
case SYMBOL_REF:
|
output_addr_const (file, x);
|
output_addr_const (file, x);
|
break;
|
break;
|
|
|
case CONST_DOUBLE:
|
case CONST_DOUBLE:
|
output_operand_lossage ("invalid const_double operand");
|
output_operand_lossage ("invalid const_double operand");
|
break;
|
break;
|
|
|
case UNSPEC:
|
case UNSPEC:
|
switch (XINT (x, 1))
|
switch (XINT (x, 1))
|
{
|
{
|
case UNSPEC_MOVE_PIC:
|
case UNSPEC_MOVE_PIC:
|
output_addr_const (file, XVECEXP (x, 0, 0));
|
output_addr_const (file, XVECEXP (x, 0, 0));
|
fprintf (file, "@GOT");
|
fprintf (file, "@GOT");
|
break;
|
break;
|
|
|
case UNSPEC_MOVE_FDPIC:
|
case UNSPEC_MOVE_FDPIC:
|
output_addr_const (file, XVECEXP (x, 0, 0));
|
output_addr_const (file, XVECEXP (x, 0, 0));
|
fprintf (file, "@GOT17M4");
|
fprintf (file, "@GOT17M4");
|
break;
|
break;
|
|
|
case UNSPEC_FUNCDESC_GOT17M4:
|
case UNSPEC_FUNCDESC_GOT17M4:
|
output_addr_const (file, XVECEXP (x, 0, 0));
|
output_addr_const (file, XVECEXP (x, 0, 0));
|
fprintf (file, "@FUNCDESC_GOT17M4");
|
fprintf (file, "@FUNCDESC_GOT17M4");
|
break;
|
break;
|
|
|
case UNSPEC_LIBRARY_OFFSET:
|
case UNSPEC_LIBRARY_OFFSET:
|
fprintf (file, "_current_shared_library_p5_offset_");
|
fprintf (file, "_current_shared_library_p5_offset_");
|
break;
|
break;
|
|
|
default:
|
default:
|
gcc_unreachable ();
|
gcc_unreachable ();
|
}
|
}
|
break;
|
break;
|
|
|
default:
|
default:
|
output_addr_const (file, x);
|
output_addr_const (file, x);
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
/* Argument support functions. */
|
/* Argument support functions. */
|
|
|
/* Initialize a variable CUM of type CUMULATIVE_ARGS
|
/* Initialize a variable CUM of type CUMULATIVE_ARGS
|
for a call to a function whose data type is FNTYPE.
|
for a call to a function whose data type is FNTYPE.
|
For a library call, FNTYPE is 0.
|
For a library call, FNTYPE is 0.
|
VDSP C Compiler manual, our ABI says that
|
VDSP C Compiler manual, our ABI says that
|
first 3 words of arguments will use R0, R1 and R2.
|
first 3 words of arguments will use R0, R1 and R2.
|
*/
|
*/
|
|
|
void
|
void
|
init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
|
init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
|
rtx libname ATTRIBUTE_UNUSED)
|
rtx libname ATTRIBUTE_UNUSED)
|
{
|
{
|
static CUMULATIVE_ARGS zero_cum;
|
static CUMULATIVE_ARGS zero_cum;
|
|
|
*cum = zero_cum;
|
*cum = zero_cum;
|
|
|
/* Set up the number of registers to use for passing arguments. */
|
/* Set up the number of registers to use for passing arguments. */
|
|
|
cum->nregs = max_arg_registers;
|
cum->nregs = max_arg_registers;
|
cum->arg_regs = arg_regs;
|
cum->arg_regs = arg_regs;
|
|
|
cum->call_cookie = CALL_NORMAL;
|
cum->call_cookie = CALL_NORMAL;
|
/* Check for a longcall attribute. */
|
/* Check for a longcall attribute. */
|
if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
|
if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
|
cum->call_cookie |= CALL_SHORT;
|
cum->call_cookie |= CALL_SHORT;
|
else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
|
else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
|
cum->call_cookie |= CALL_LONG;
|
cum->call_cookie |= CALL_LONG;
|
|
|
return;
|
return;
|
}
|
}
|
|
|
/* Update the data in CUM to advance over an argument
|
/* Update the data in CUM to advance over an argument
|
of mode MODE and data type TYPE.
|
of mode MODE and data type TYPE.
|
(TYPE is null for libcalls where that information may not be available.) */
|
(TYPE is null for libcalls where that information may not be available.) */
|
|
|
void
|
void
|
function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
|
function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
|
int named ATTRIBUTE_UNUSED)
|
int named ATTRIBUTE_UNUSED)
|
{
|
{
|
int count, bytes, words;
|
int count, bytes, words;
|
|
|
bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
|
bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
|
words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
|
words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
|
|
|
cum->words += words;
|
cum->words += words;
|
cum->nregs -= words;
|
cum->nregs -= words;
|
|
|
if (cum->nregs <= 0)
|
if (cum->nregs <= 0)
|
{
|
{
|
cum->nregs = 0;
|
cum->nregs = 0;
|
cum->arg_regs = NULL;
|
cum->arg_regs = NULL;
|
}
|
}
|
else
|
else
|
{
|
{
|
for (count = 1; count <= words; count++)
|
for (count = 1; count <= words; count++)
|
cum->arg_regs++;
|
cum->arg_regs++;
|
}
|
}
|
|
|
return;
|
return;
|
}
|
}
|
|
|
/* Define where to put the arguments to a function.
|
/* Define where to put the arguments to a function.
|
Value is zero to push the argument on the stack,
|
Value is zero to push the argument on the stack,
|
or a hard register in which to store the argument.
|
or a hard register in which to store the argument.
|
|
|
MODE is the argument's machine mode.
|
MODE is the argument's machine mode.
|
TYPE is the data type of the argument (as a tree).
|
TYPE is the data type of the argument (as a tree).
|
This is null for libcalls where that information may
|
This is null for libcalls where that information may
|
not be available.
|
not be available.
|
CUM is a variable of type CUMULATIVE_ARGS which gives info about
|
CUM is a variable of type CUMULATIVE_ARGS which gives info about
|
the preceding args and about the function being called.
|
the preceding args and about the function being called.
|
NAMED is nonzero if this argument is a named parameter
|
NAMED is nonzero if this argument is a named parameter
|
(otherwise it is an extra parameter matching an ellipsis). */
|
(otherwise it is an extra parameter matching an ellipsis). */
|
|
|
struct rtx_def *
|
struct rtx_def *
|
function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
|
function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
|
int named ATTRIBUTE_UNUSED)
|
int named ATTRIBUTE_UNUSED)
|
{
|
{
|
int bytes
|
int bytes
|
= (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
|
= (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
|
|
|
if (mode == VOIDmode)
|
if (mode == VOIDmode)
|
/* Compute operand 2 of the call insn. */
|
/* Compute operand 2 of the call insn. */
|
return GEN_INT (cum->call_cookie);
|
return GEN_INT (cum->call_cookie);
|
|
|
if (bytes == -1)
|
if (bytes == -1)
|
return NULL_RTX;
|
return NULL_RTX;
|
|
|
if (cum->nregs)
|
if (cum->nregs)
|
return gen_rtx_REG (mode, *(cum->arg_regs));
|
return gen_rtx_REG (mode, *(cum->arg_regs));
|
|
|
return NULL_RTX;
|
return NULL_RTX;
|
}
|
}
|
|
|
/* For an arg passed partly in registers and partly in memory,
|
/* For an arg passed partly in registers and partly in memory,
|
this is the number of bytes passed in registers.
|
this is the number of bytes passed in registers.
|
For args passed entirely in registers or entirely in memory, zero.
|
For args passed entirely in registers or entirely in memory, zero.
|
|
|
Refer VDSP C Compiler manual, our ABI.
|
Refer VDSP C Compiler manual, our ABI.
|
First 3 words are in registers. So, if a an argument is larger
|
First 3 words are in registers. So, if a an argument is larger
|
than the registers available, it will span the register and
|
than the registers available, it will span the register and
|
stack. */
|
stack. */
|
|
|
static int
|
static int
|
bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
|
bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
|
tree type ATTRIBUTE_UNUSED,
|
tree type ATTRIBUTE_UNUSED,
|
bool named ATTRIBUTE_UNUSED)
|
bool named ATTRIBUTE_UNUSED)
|
{
|
{
|
int bytes
|
int bytes
|
= (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
|
= (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
|
int bytes_left = cum->nregs * UNITS_PER_WORD;
|
int bytes_left = cum->nregs * UNITS_PER_WORD;
|
|
|
if (bytes == -1)
|
if (bytes == -1)
|
return 0;
|
return 0;
|
|
|
if (bytes_left == 0)
|
if (bytes_left == 0)
|
return 0;
|
return 0;
|
if (bytes > bytes_left)
|
if (bytes > bytes_left)
|
return bytes_left;
|
return bytes_left;
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* Variable sized types are passed by reference. */
|
/* Variable sized types are passed by reference. */
|
|
|
static bool
|
static bool
|
bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
|
bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
|
enum machine_mode mode ATTRIBUTE_UNUSED,
|
enum machine_mode mode ATTRIBUTE_UNUSED,
|
tree type, bool named ATTRIBUTE_UNUSED)
|
tree type, bool named ATTRIBUTE_UNUSED)
|
{
|
{
|
return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
|
return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
|
}
|
}
|
|
|
/* Decide whether a type should be returned in memory (true)
|
/* Decide whether a type should be returned in memory (true)
|
or in a register (false). This is called by the macro
|
or in a register (false). This is called by the macro
|
RETURN_IN_MEMORY. */
|
RETURN_IN_MEMORY. */
|
|
|
int
|
int
|
bfin_return_in_memory (tree type)
|
bfin_return_in_memory (tree type)
|
{
|
{
|
int size = int_size_in_bytes (type);
|
int size = int_size_in_bytes (type);
|
return size > 2 * UNITS_PER_WORD || size == -1;
|
return size > 2 * UNITS_PER_WORD || size == -1;
|
}
|
}
|
|
|
/* Register in which address to store a structure value
|
/* Register in which address to store a structure value
|
is passed to a function. */
|
is passed to a function. */
|
static rtx
|
static rtx
|
bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
|
bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
|
int incoming ATTRIBUTE_UNUSED)
|
int incoming ATTRIBUTE_UNUSED)
|
{
|
{
|
return gen_rtx_REG (Pmode, REG_P0);
|
return gen_rtx_REG (Pmode, REG_P0);
|
}
|
}
|
|
|
/* Return true when register may be used to pass function parameters. */
|
/* Return true when register may be used to pass function parameters. */
|
|
|
bool
|
bool
|
function_arg_regno_p (int n)
|
function_arg_regno_p (int n)
|
{
|
{
|
int i;
|
int i;
|
for (i = 0; arg_regs[i] != -1; i++)
|
for (i = 0; arg_regs[i] != -1; i++)
|
if (n == arg_regs[i])
|
if (n == arg_regs[i])
|
return true;
|
return true;
|
return false;
|
return false;
|
}
|
}
|
|
|
/* Returns 1 if OP contains a symbol reference */
|
/* Returns 1 if OP contains a symbol reference */
|
|
|
int
|
int
|
symbolic_reference_mentioned_p (rtx op)
|
symbolic_reference_mentioned_p (rtx op)
|
{
|
{
|
register const char *fmt;
|
register const char *fmt;
|
register int i;
|
register int i;
|
|
|
if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
|
if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
|
return 1;
|
return 1;
|
|
|
fmt = GET_RTX_FORMAT (GET_CODE (op));
|
fmt = GET_RTX_FORMAT (GET_CODE (op));
|
for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
|
for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
|
{
|
{
|
if (fmt[i] == 'E')
|
if (fmt[i] == 'E')
|
{
|
{
|
register int j;
|
register int j;
|
|
|
for (j = XVECLEN (op, i) - 1; j >= 0; j--)
|
for (j = XVECLEN (op, i) - 1; j >= 0; j--)
|
if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
|
if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
|
return 1;
|
return 1;
|
}
|
}
|
|
|
else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
|
else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
|
return 1;
|
return 1;
|
}
|
}
|
|
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* Decide whether we can make a sibling call to a function. DECL is the
|
/* Decide whether we can make a sibling call to a function. DECL is the
|
declaration of the function being targeted by the call and EXP is the
|
declaration of the function being targeted by the call and EXP is the
|
CALL_EXPR representing the call. */
|
CALL_EXPR representing the call. */
|
|
|
static bool
|
static bool
|
bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
|
bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
|
tree exp ATTRIBUTE_UNUSED)
|
tree exp ATTRIBUTE_UNUSED)
|
{
|
{
|
e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
|
e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
|
return fkind == SUBROUTINE;
|
return fkind == SUBROUTINE;
|
}
|
}
|
|
|
/* Emit RTL insns to initialize the variable parts of a trampoline at
|
/* Emit RTL insns to initialize the variable parts of a trampoline at
|
TRAMP. FNADDR is an RTX for the address of the function's pure
|
TRAMP. FNADDR is an RTX for the address of the function's pure
|
code. CXT is an RTX for the static chain value for the function. */
|
code. CXT is an RTX for the static chain value for the function. */
|
|
|
void
|
void
|
initialize_trampoline (tramp, fnaddr, cxt)
|
initialize_trampoline (tramp, fnaddr, cxt)
|
rtx tramp, fnaddr, cxt;
|
rtx tramp, fnaddr, cxt;
|
{
|
{
|
rtx t1 = copy_to_reg (fnaddr);
|
rtx t1 = copy_to_reg (fnaddr);
|
rtx t2 = copy_to_reg (cxt);
|
rtx t2 = copy_to_reg (cxt);
|
rtx addr;
|
rtx addr;
|
int i = 0;
|
int i = 0;
|
|
|
if (TARGET_FDPIC)
|
if (TARGET_FDPIC)
|
{
|
{
|
rtx a = memory_address (Pmode, plus_constant (tramp, 8));
|
rtx a = memory_address (Pmode, plus_constant (tramp, 8));
|
addr = memory_address (Pmode, tramp);
|
addr = memory_address (Pmode, tramp);
|
emit_move_insn (gen_rtx_MEM (SImode, addr), a);
|
emit_move_insn (gen_rtx_MEM (SImode, addr), a);
|
i = 8;
|
i = 8;
|
}
|
}
|
|
|
addr = memory_address (Pmode, plus_constant (tramp, i + 2));
|
addr = memory_address (Pmode, plus_constant (tramp, i + 2));
|
emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
|
emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
|
emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
|
emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
|
addr = memory_address (Pmode, plus_constant (tramp, i + 6));
|
addr = memory_address (Pmode, plus_constant (tramp, i + 6));
|
emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
|
emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
|
|
|
addr = memory_address (Pmode, plus_constant (tramp, i + 10));
|
addr = memory_address (Pmode, plus_constant (tramp, i + 10));
|
emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
|
emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
|
emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
|
emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
|
addr = memory_address (Pmode, plus_constant (tramp, i + 14));
|
addr = memory_address (Pmode, plus_constant (tramp, i + 14));
|
emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
|
emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
|
}
|
}
|
|
|
/* Emit insns to move operands[1] into operands[0]. */
|
/* Emit insns to move operands[1] into operands[0]. */
|
|
|
void
|
void
|
emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
|
emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
|
{
|
{
|
rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
|
rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
|
|
|
gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
|
gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
|
if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
|
if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
|
operands[1] = force_reg (SImode, operands[1]);
|
operands[1] = force_reg (SImode, operands[1]);
|
else
|
else
|
operands[1] = legitimize_pic_address (operands[1], temp,
|
operands[1] = legitimize_pic_address (operands[1], temp,
|
TARGET_FDPIC ? OUR_FDPIC_REG
|
TARGET_FDPIC ? OUR_FDPIC_REG
|
: pic_offset_table_rtx);
|
: pic_offset_table_rtx);
|
}
|
}
|
|
|
/* Expand a move operation in mode MODE. The operands are in OPERANDS. */
|
/* Expand a move operation in mode MODE. The operands are in OPERANDS. */
|
|
|
void
|
void
|
expand_move (rtx *operands, enum machine_mode mode)
|
expand_move (rtx *operands, enum machine_mode mode)
|
{
|
{
|
rtx op = operands[1];
|
rtx op = operands[1];
|
if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
|
if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
|
&& SYMBOLIC_CONST (op))
|
&& SYMBOLIC_CONST (op))
|
emit_pic_move (operands, mode);
|
emit_pic_move (operands, mode);
|
/* Don't generate memory->memory or constant->memory moves, go through a
|
/* Don't generate memory->memory or constant->memory moves, go through a
|
register */
|
register */
|
else if ((reload_in_progress | reload_completed) == 0
|
else if ((reload_in_progress | reload_completed) == 0
|
&& GET_CODE (operands[0]) == MEM
|
&& GET_CODE (operands[0]) == MEM
|
&& GET_CODE (operands[1]) != REG)
|
&& GET_CODE (operands[1]) != REG)
|
operands[1] = force_reg (mode, operands[1]);
|
operands[1] = force_reg (mode, operands[1]);
|
}
|
}
|
|
|
/* Split one or more DImode RTL references into pairs of SImode
|
/* Split one or more DImode RTL references into pairs of SImode
|
references. The RTL can be REG, offsettable MEM, integer constant, or
|
references. The RTL can be REG, offsettable MEM, integer constant, or
|
CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
|
CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
|
split and "num" is its length. lo_half and hi_half are output arrays
|
split and "num" is its length. lo_half and hi_half are output arrays
|
that parallel "operands". */
|
that parallel "operands". */
|
|
|
void
|
void
|
split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
|
split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
|
{
|
{
|
while (num--)
|
while (num--)
|
{
|
{
|
rtx op = operands[num];
|
rtx op = operands[num];
|
|
|
/* simplify_subreg refuse to split volatile memory addresses,
|
/* simplify_subreg refuse to split volatile memory addresses,
|
but we still have to handle it. */
|
but we still have to handle it. */
|
if (GET_CODE (op) == MEM)
|
if (GET_CODE (op) == MEM)
|
{
|
{
|
lo_half[num] = adjust_address (op, SImode, 0);
|
lo_half[num] = adjust_address (op, SImode, 0);
|
hi_half[num] = adjust_address (op, SImode, 4);
|
hi_half[num] = adjust_address (op, SImode, 4);
|
}
|
}
|
else
|
else
|
{
|
{
|
lo_half[num] = simplify_gen_subreg (SImode, op,
|
lo_half[num] = simplify_gen_subreg (SImode, op,
|
GET_MODE (op) == VOIDmode
|
GET_MODE (op) == VOIDmode
|
? DImode : GET_MODE (op), 0);
|
? DImode : GET_MODE (op), 0);
|
hi_half[num] = simplify_gen_subreg (SImode, op,
|
hi_half[num] = simplify_gen_subreg (SImode, op,
|
GET_MODE (op) == VOIDmode
|
GET_MODE (op) == VOIDmode
|
? DImode : GET_MODE (op), 4);
|
? DImode : GET_MODE (op), 4);
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
bool
|
bool
|
bfin_longcall_p (rtx op, int call_cookie)
|
bfin_longcall_p (rtx op, int call_cookie)
|
{
|
{
|
gcc_assert (GET_CODE (op) == SYMBOL_REF);
|
gcc_assert (GET_CODE (op) == SYMBOL_REF);
|
if (call_cookie & CALL_SHORT)
|
if (call_cookie & CALL_SHORT)
|
return 0;
|
return 0;
|
if (call_cookie & CALL_LONG)
|
if (call_cookie & CALL_LONG)
|
return 1;
|
return 1;
|
if (TARGET_LONG_CALLS)
|
if (TARGET_LONG_CALLS)
|
return 1;
|
return 1;
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
|
/* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
|
COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
|
COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
|
SIBCALL is nonzero if this is a sibling call. */
|
SIBCALL is nonzero if this is a sibling call. */
|
|
|
void
|
void
|
bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
|
bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
|
{
|
{
|
rtx use = NULL, call;
|
rtx use = NULL, call;
|
rtx callee = XEXP (fnaddr, 0);
|
rtx callee = XEXP (fnaddr, 0);
|
int nelts = 2 + !!sibcall;
|
int nelts = 2 + !!sibcall;
|
rtx pat;
|
rtx pat;
|
rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
|
rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
|
int n;
|
int n;
|
|
|
/* In an untyped call, we can get NULL for operand 2. */
|
/* In an untyped call, we can get NULL for operand 2. */
|
if (cookie == NULL_RTX)
|
if (cookie == NULL_RTX)
|
cookie = const0_rtx;
|
cookie = const0_rtx;
|
|
|
/* Static functions and indirect calls don't need the pic register. */
|
/* Static functions and indirect calls don't need the pic register. */
|
if (!TARGET_FDPIC && flag_pic
|
if (!TARGET_FDPIC && flag_pic
|
&& GET_CODE (callee) == SYMBOL_REF
|
&& GET_CODE (callee) == SYMBOL_REF
|
&& !SYMBOL_REF_LOCAL_P (callee))
|
&& !SYMBOL_REF_LOCAL_P (callee))
|
use_reg (&use, pic_offset_table_rtx);
|
use_reg (&use, pic_offset_table_rtx);
|
|
|
if (TARGET_FDPIC)
|
if (TARGET_FDPIC)
|
{
|
{
|
if (GET_CODE (callee) != SYMBOL_REF
|
if (GET_CODE (callee) != SYMBOL_REF
|
|| bfin_longcall_p (callee, INTVAL (cookie)))
|
|| bfin_longcall_p (callee, INTVAL (cookie)))
|
{
|
{
|
rtx addr = callee;
|
rtx addr = callee;
|
if (! address_operand (addr, Pmode))
|
if (! address_operand (addr, Pmode))
|
addr = force_reg (Pmode, addr);
|
addr = force_reg (Pmode, addr);
|
|
|
fnaddr = gen_reg_rtx (SImode);
|
fnaddr = gen_reg_rtx (SImode);
|
emit_insn (gen_load_funcdescsi (fnaddr, addr));
|
emit_insn (gen_load_funcdescsi (fnaddr, addr));
|
fnaddr = gen_rtx_MEM (Pmode, fnaddr);
|
fnaddr = gen_rtx_MEM (Pmode, fnaddr);
|
|
|
picreg = gen_reg_rtx (SImode);
|
picreg = gen_reg_rtx (SImode);
|
emit_insn (gen_load_funcdescsi (picreg,
|
emit_insn (gen_load_funcdescsi (picreg,
|
plus_constant (addr, 4)));
|
plus_constant (addr, 4)));
|
}
|
}
|
|
|
nelts++;
|
nelts++;
|
}
|
}
|
else if ((!register_no_elim_operand (callee, Pmode)
|
else if ((!register_no_elim_operand (callee, Pmode)
|
&& GET_CODE (callee) != SYMBOL_REF)
|
&& GET_CODE (callee) != SYMBOL_REF)
|
|| (GET_CODE (callee) == SYMBOL_REF
|
|| (GET_CODE (callee) == SYMBOL_REF
|
&& (flag_pic
|
&& (flag_pic
|
|| bfin_longcall_p (callee, INTVAL (cookie)))))
|
|| bfin_longcall_p (callee, INTVAL (cookie)))))
|
{
|
{
|
callee = copy_to_mode_reg (Pmode, callee);
|
callee = copy_to_mode_reg (Pmode, callee);
|
fnaddr = gen_rtx_MEM (Pmode, callee);
|
fnaddr = gen_rtx_MEM (Pmode, callee);
|
}
|
}
|
call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
|
call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
|
|
|
if (retval)
|
if (retval)
|
call = gen_rtx_SET (VOIDmode, retval, call);
|
call = gen_rtx_SET (VOIDmode, retval, call);
|
|
|
pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
|
pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
|
n = 0;
|
n = 0;
|
XVECEXP (pat, 0, n++) = call;
|
XVECEXP (pat, 0, n++) = call;
|
if (TARGET_FDPIC)
|
if (TARGET_FDPIC)
|
XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
|
XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
|
XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
|
XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
|
if (sibcall)
|
if (sibcall)
|
XVECEXP (pat, 0, n++) = gen_rtx_RETURN (VOIDmode);
|
XVECEXP (pat, 0, n++) = gen_rtx_RETURN (VOIDmode);
|
call = emit_call_insn (pat);
|
call = emit_call_insn (pat);
|
if (use)
|
if (use)
|
CALL_INSN_FUNCTION_USAGE (call) = use;
|
CALL_INSN_FUNCTION_USAGE (call) = use;
|
}
|
}
|
|
|
/* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
|
/* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
|
|
|
int
|
int
|
hard_regno_mode_ok (int regno, enum machine_mode mode)
|
hard_regno_mode_ok (int regno, enum machine_mode mode)
|
{
|
{
|
/* Allow only dregs to store value of mode HI or QI */
|
/* Allow only dregs to store value of mode HI or QI */
|
enum reg_class class = REGNO_REG_CLASS (regno);
|
enum reg_class class = REGNO_REG_CLASS (regno);
|
|
|
if (mode == CCmode)
|
if (mode == CCmode)
|
return 0;
|
return 0;
|
|
|
if (mode == V2HImode)
|
if (mode == V2HImode)
|
return D_REGNO_P (regno);
|
return D_REGNO_P (regno);
|
if (class == CCREGS)
|
if (class == CCREGS)
|
return mode == BImode;
|
return mode == BImode;
|
if (mode == PDImode || mode == V2PDImode)
|
if (mode == PDImode || mode == V2PDImode)
|
return regno == REG_A0 || regno == REG_A1;
|
return regno == REG_A0 || regno == REG_A1;
|
if (mode == SImode
|
if (mode == SImode
|
&& TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
|
&& TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
|
return 1;
|
return 1;
|
|
|
return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
|
return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
|
}
|
}
|
|
|
/* Implements target hook vector_mode_supported_p. */
|
/* Implements target hook vector_mode_supported_p. */
|
|
|
static bool
|
static bool
|
bfin_vector_mode_supported_p (enum machine_mode mode)
|
bfin_vector_mode_supported_p (enum machine_mode mode)
|
{
|
{
|
return mode == V2HImode;
|
return mode == V2HImode;
|
}
|
}
|
|
|
/* Return the cost of moving data from a register in class CLASS1 to
|
/* Return the cost of moving data from a register in class CLASS1 to
|
one in class CLASS2. A cost of 2 is the default. */
|
one in class CLASS2. A cost of 2 is the default. */
|
|
|
int
|
int
|
bfin_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
|
bfin_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
|
enum reg_class class1, enum reg_class class2)
|
enum reg_class class1, enum reg_class class2)
|
{
|
{
|
/* These need secondary reloads, so they're more expensive. */
|
/* These need secondary reloads, so they're more expensive. */
|
if ((class1 == CCREGS && class2 != DREGS)
|
if ((class1 == CCREGS && class2 != DREGS)
|
|| (class1 != DREGS && class2 == CCREGS))
|
|| (class1 != DREGS && class2 == CCREGS))
|
return 4;
|
return 4;
|
|
|
/* If optimizing for size, always prefer reg-reg over reg-memory moves. */
|
/* If optimizing for size, always prefer reg-reg over reg-memory moves. */
|
if (optimize_size)
|
if (optimize_size)
|
return 2;
|
return 2;
|
|
|
/* There are some stalls involved when moving from a DREG to a different
|
/* There are some stalls involved when moving from a DREG to a different
|
class reg, and using the value in one of the following instructions.
|
class reg, and using the value in one of the following instructions.
|
Attempt to model this by slightly discouraging such moves. */
|
Attempt to model this by slightly discouraging such moves. */
|
if (class1 == DREGS && class2 != DREGS)
|
if (class1 == DREGS && class2 != DREGS)
|
return 2 * 2;
|
return 2 * 2;
|
|
|
return 2;
|
return 2;
|
}
|
}
|
|
|
/* Return the cost of moving data of mode M between a
|
/* Return the cost of moving data of mode M between a
|
register and memory. A value of 2 is the default; this cost is
|
register and memory. A value of 2 is the default; this cost is
|
relative to those in `REGISTER_MOVE_COST'.
|
relative to those in `REGISTER_MOVE_COST'.
|
|
|
??? In theory L1 memory has single-cycle latency. We should add a switch
|
??? In theory L1 memory has single-cycle latency. We should add a switch
|
that tells the compiler whether we expect to use only L1 memory for the
|
that tells the compiler whether we expect to use only L1 memory for the
|
program; it'll make the costs more accurate. */
|
program; it'll make the costs more accurate. */
|
|
|
int
|
int
|
bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
|
bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
|
enum reg_class class,
|
enum reg_class class,
|
int in ATTRIBUTE_UNUSED)
|
int in ATTRIBUTE_UNUSED)
|
{
|
{
|
/* Make memory accesses slightly more expensive than any register-register
|
/* Make memory accesses slightly more expensive than any register-register
|
move. Also, penalize non-DP registers, since they need secondary
|
move. Also, penalize non-DP registers, since they need secondary
|
reloads to load and store. */
|
reloads to load and store. */
|
if (! reg_class_subset_p (class, DPREGS))
|
if (! reg_class_subset_p (class, DPREGS))
|
return 10;
|
return 10;
|
|
|
return 8;
|
return 8;
|
}
|
}
|
|
|
/* Inform reload about cases where moving X with a mode MODE to a register in
|
/* Inform reload about cases where moving X with a mode MODE to a register in
|
CLASS requires an extra scratch register. Return the class needed for the
|
CLASS requires an extra scratch register. Return the class needed for the
|
scratch register. */
|
scratch register. */
|
|
|
static enum reg_class
|
static enum reg_class
|
bfin_secondary_reload (bool in_p, rtx x, enum reg_class class,
|
bfin_secondary_reload (bool in_p, rtx x, enum reg_class class,
|
enum machine_mode mode, secondary_reload_info *sri)
|
enum machine_mode mode, secondary_reload_info *sri)
|
{
|
{
|
/* If we have HImode or QImode, we can only use DREGS as secondary registers;
|
/* If we have HImode or QImode, we can only use DREGS as secondary registers;
|
in most other cases we can also use PREGS. */
|
in most other cases we can also use PREGS. */
|
enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
|
enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
|
enum reg_class x_class = NO_REGS;
|
enum reg_class x_class = NO_REGS;
|
enum rtx_code code = GET_CODE (x);
|
enum rtx_code code = GET_CODE (x);
|
|
|
if (code == SUBREG)
|
if (code == SUBREG)
|
x = SUBREG_REG (x), code = GET_CODE (x);
|
x = SUBREG_REG (x), code = GET_CODE (x);
|
if (REG_P (x))
|
if (REG_P (x))
|
{
|
{
|
int regno = REGNO (x);
|
int regno = REGNO (x);
|
if (regno >= FIRST_PSEUDO_REGISTER)
|
if (regno >= FIRST_PSEUDO_REGISTER)
|
regno = reg_renumber[regno];
|
regno = reg_renumber[regno];
|
|
|
if (regno == -1)
|
if (regno == -1)
|
code = MEM;
|
code = MEM;
|
else
|
else
|
x_class = REGNO_REG_CLASS (regno);
|
x_class = REGNO_REG_CLASS (regno);
|
}
|
}
|
|
|
/* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
|
/* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
|
This happens as a side effect of register elimination, and we need
|
This happens as a side effect of register elimination, and we need
|
a scratch register to do it. */
|
a scratch register to do it. */
|
if (fp_plus_const_operand (x, mode))
|
if (fp_plus_const_operand (x, mode))
|
{
|
{
|
rtx op2 = XEXP (x, 1);
|
rtx op2 = XEXP (x, 1);
|
int large_constant_p = ! CONST_7BIT_IMM_P (INTVAL (op2));
|
int large_constant_p = ! CONST_7BIT_IMM_P (INTVAL (op2));
|
|
|
if (class == PREGS || class == PREGS_CLOBBERED)
|
if (class == PREGS || class == PREGS_CLOBBERED)
|
return NO_REGS;
|
return NO_REGS;
|
/* If destination is a DREG, we can do this without a scratch register
|
/* If destination is a DREG, we can do this without a scratch register
|
if the constant is valid for an add instruction. */
|
if the constant is valid for an add instruction. */
|
if ((class == DREGS || class == DPREGS)
|
if ((class == DREGS || class == DPREGS)
|
&& ! large_constant_p)
|
&& ! large_constant_p)
|
return NO_REGS;
|
return NO_REGS;
|
/* Reloading to anything other than a DREG? Use a PREG scratch
|
/* Reloading to anything other than a DREG? Use a PREG scratch
|
register. */
|
register. */
|
sri->icode = CODE_FOR_reload_insi;
|
sri->icode = CODE_FOR_reload_insi;
|
return NO_REGS;
|
return NO_REGS;
|
}
|
}
|
|
|
/* Data can usually be moved freely between registers of most classes.
|
/* Data can usually be moved freely between registers of most classes.
|
AREGS are an exception; they can only move to or from another register
|
AREGS are an exception; they can only move to or from another register
|
in AREGS or one in DREGS. They can also be assigned the constant 0. */
|
in AREGS or one in DREGS. They can also be assigned the constant 0. */
|
if (x_class == AREGS)
|
if (x_class == AREGS)
|
return class == DREGS || class == AREGS ? NO_REGS : DREGS;
|
return class == DREGS || class == AREGS ? NO_REGS : DREGS;
|
|
|
if (class == AREGS)
|
if (class == AREGS)
|
{
|
{
|
if (x != const0_rtx && x_class != DREGS)
|
if (x != const0_rtx && x_class != DREGS)
|
return DREGS;
|
return DREGS;
|
else
|
else
|
return NO_REGS;
|
return NO_REGS;
|
}
|
}
|
|
|
/* CCREGS can only be moved from/to DREGS. */
|
/* CCREGS can only be moved from/to DREGS. */
|
if (class == CCREGS && x_class != DREGS)
|
if (class == CCREGS && x_class != DREGS)
|
return DREGS;
|
return DREGS;
|
if (x_class == CCREGS && class != DREGS)
|
if (x_class == CCREGS && class != DREGS)
|
return DREGS;
|
return DREGS;
|
|
|
/* All registers other than AREGS can load arbitrary constants. The only
|
/* All registers other than AREGS can load arbitrary constants. The only
|
case that remains is MEM. */
|
case that remains is MEM. */
|
if (code == MEM)
|
if (code == MEM)
|
if (! reg_class_subset_p (class, default_class))
|
if (! reg_class_subset_p (class, default_class))
|
return default_class;
|
return default_class;
|
return NO_REGS;
|
return NO_REGS;
|
}
|
}
|
|
|
/* Implement TARGET_HANDLE_OPTION. */
|
/* Implement TARGET_HANDLE_OPTION. */
|
|
|
static bool
|
static bool
|
bfin_handle_option (size_t code, const char *arg, int value)
|
bfin_handle_option (size_t code, const char *arg, int value)
|
{
|
{
|
switch (code)
|
switch (code)
|
{
|
{
|
case OPT_mshared_library_id_:
|
case OPT_mshared_library_id_:
|
if (value > MAX_LIBRARY_ID)
|
if (value > MAX_LIBRARY_ID)
|
error ("-mshared-library-id=%s is not between 0 and %d",
|
error ("-mshared-library-id=%s is not between 0 and %d",
|
arg, MAX_LIBRARY_ID);
|
arg, MAX_LIBRARY_ID);
|
bfin_lib_id_given = 1;
|
bfin_lib_id_given = 1;
|
return true;
|
return true;
|
|
|
default:
|
default:
|
return true;
|
return true;
|
}
|
}
|
}
|
}
|
|
|
static struct machine_function *
|
static struct machine_function *
|
bfin_init_machine_status (void)
|
bfin_init_machine_status (void)
|
{
|
{
|
struct machine_function *f;
|
struct machine_function *f;
|
|
|
f = ggc_alloc_cleared (sizeof (struct machine_function));
|
f = ggc_alloc_cleared (sizeof (struct machine_function));
|
|
|
return f;
|
return f;
|
}
|
}
|
|
|
/* Implement the macro OVERRIDE_OPTIONS. */
|
/* Implement the macro OVERRIDE_OPTIONS. */
|
|
|
void
|
void
|
override_options (void)
|
override_options (void)
|
{
|
{
|
if (TARGET_OMIT_LEAF_FRAME_POINTER)
|
if (TARGET_OMIT_LEAF_FRAME_POINTER)
|
flag_omit_frame_pointer = 1;
|
flag_omit_frame_pointer = 1;
|
|
|
/* Library identification */
|
/* Library identification */
|
if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
|
if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
|
error ("-mshared-library-id= specified without -mid-shared-library");
|
error ("-mshared-library-id= specified without -mid-shared-library");
|
|
|
if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
|
if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
|
flag_pic = 1;
|
flag_pic = 1;
|
|
|
if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
|
if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
|
error ("ID shared libraries and FD-PIC mode can't be used together.");
|
error ("ID shared libraries and FD-PIC mode can't be used together.");
|
|
|
/* There is no single unaligned SI op for PIC code. Sometimes we
|
/* There is no single unaligned SI op for PIC code. Sometimes we
|
need to use ".4byte" and sometimes we need to use ".picptr".
|
need to use ".4byte" and sometimes we need to use ".picptr".
|
See bfin_assemble_integer for details. */
|
See bfin_assemble_integer for details. */
|
if (TARGET_FDPIC)
|
if (TARGET_FDPIC)
|
targetm.asm_out.unaligned_op.si = 0;
|
targetm.asm_out.unaligned_op.si = 0;
|
|
|
/* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
|
/* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
|
since we don't support it and it'll just break. */
|
since we don't support it and it'll just break. */
|
if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
|
if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
|
flag_pic = 0;
|
flag_pic = 0;
|
|
|
flag_schedule_insns = 0;
|
flag_schedule_insns = 0;
|
|
|
init_machine_status = bfin_init_machine_status;
|
init_machine_status = bfin_init_machine_status;
|
}
|
}
|
|
|
/* Return the destination address of BRANCH.
|
/* Return the destination address of BRANCH.
|
We need to use this instead of get_attr_length, because the
|
We need to use this instead of get_attr_length, because the
|
cbranch_with_nops pattern conservatively sets its length to 6, and
|
cbranch_with_nops pattern conservatively sets its length to 6, and
|
we still prefer to use shorter sequences. */
|
we still prefer to use shorter sequences. */
|
|
|
static int
|
static int
|
branch_dest (rtx branch)
|
branch_dest (rtx branch)
|
{
|
{
|
rtx dest;
|
rtx dest;
|
int dest_uid;
|
int dest_uid;
|
rtx pat = PATTERN (branch);
|
rtx pat = PATTERN (branch);
|
if (GET_CODE (pat) == PARALLEL)
|
if (GET_CODE (pat) == PARALLEL)
|
pat = XVECEXP (pat, 0, 0);
|
pat = XVECEXP (pat, 0, 0);
|
dest = SET_SRC (pat);
|
dest = SET_SRC (pat);
|
if (GET_CODE (dest) == IF_THEN_ELSE)
|
if (GET_CODE (dest) == IF_THEN_ELSE)
|
dest = XEXP (dest, 1);
|
dest = XEXP (dest, 1);
|
dest = XEXP (dest, 0);
|
dest = XEXP (dest, 0);
|
dest_uid = INSN_UID (dest);
|
dest_uid = INSN_UID (dest);
|
return INSN_ADDRESSES (dest_uid);
|
return INSN_ADDRESSES (dest_uid);
|
}
|
}
|
|
|
/* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
|
/* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
|
it's a branch that's predicted taken. */
|
it's a branch that's predicted taken. */
|
|
|
static int
|
static int
|
cbranch_predicted_taken_p (rtx insn)
|
cbranch_predicted_taken_p (rtx insn)
|
{
|
{
|
rtx x = find_reg_note (insn, REG_BR_PROB, 0);
|
rtx x = find_reg_note (insn, REG_BR_PROB, 0);
|
|
|
if (x)
|
if (x)
|
{
|
{
|
int pred_val = INTVAL (XEXP (x, 0));
|
int pred_val = INTVAL (XEXP (x, 0));
|
|
|
return pred_val >= REG_BR_PROB_BASE / 2;
|
return pred_val >= REG_BR_PROB_BASE / 2;
|
}
|
}
|
|
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* Templates for use by asm_conditional_branch. */
|
/* Templates for use by asm_conditional_branch. */
|
|
|
static const char *ccbranch_templates[][3] = {
|
static const char *ccbranch_templates[][3] = {
|
{ "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
|
{ "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
|
{ "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
|
{ "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
|
{ "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
|
{ "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
|
{ "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
|
{ "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
|
};
|
};
|
|
|
/* Output INSN, which is a conditional branch instruction with operands
|
/* Output INSN, which is a conditional branch instruction with operands
|
OPERANDS.
|
OPERANDS.
|
|
|
We deal with the various forms of conditional branches that can be generated
|
We deal with the various forms of conditional branches that can be generated
|
by bfin_reorg to prevent the hardware from doing speculative loads, by
|
by bfin_reorg to prevent the hardware from doing speculative loads, by
|
- emitting a sufficient number of nops, if N_NOPS is nonzero, or
|
- emitting a sufficient number of nops, if N_NOPS is nonzero, or
|
- always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
|
- always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
|
Either of these is only necessary if the branch is short, otherwise the
|
Either of these is only necessary if the branch is short, otherwise the
|
template we use ends in an unconditional jump which flushes the pipeline
|
template we use ends in an unconditional jump which flushes the pipeline
|
anyway. */
|
anyway. */
|
|
|
void
|
void
|
asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
|
asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
|
{
|
{
|
int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
|
int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
|
/* Note : offset for instructions like if cc jmp; jump.[sl] offset
|
/* Note : offset for instructions like if cc jmp; jump.[sl] offset
|
is to be taken from start of if cc rather than jump.
|
is to be taken from start of if cc rather than jump.
|
Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
|
Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
|
*/
|
*/
|
int len = (offset >= -1024 && offset <= 1022 ? 0
|
int len = (offset >= -1024 && offset <= 1022 ? 0
|
: offset >= -4094 && offset <= 4096 ? 1
|
: offset >= -4094 && offset <= 4096 ? 1
|
: 2);
|
: 2);
|
int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
|
int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
|
int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
|
int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
|
output_asm_insn (ccbranch_templates[idx][len], operands);
|
output_asm_insn (ccbranch_templates[idx][len], operands);
|
gcc_assert (n_nops == 0 || !bp);
|
gcc_assert (n_nops == 0 || !bp);
|
if (len == 0)
|
if (len == 0)
|
while (n_nops-- > 0)
|
while (n_nops-- > 0)
|
output_asm_insn ("nop;", NULL);
|
output_asm_insn ("nop;", NULL);
|
}
|
}
|
|
|
/* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
|
/* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
|
stored in bfin_compare_op0 and bfin_compare_op1 already. */
|
stored in bfin_compare_op0 and bfin_compare_op1 already. */
|
|
|
rtx
|
rtx
|
bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
|
bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
|
{
|
{
|
enum rtx_code code1, code2;
|
enum rtx_code code1, code2;
|
rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
|
rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
|
rtx tem = bfin_cc_rtx;
|
rtx tem = bfin_cc_rtx;
|
enum rtx_code code = GET_CODE (cmp);
|
enum rtx_code code = GET_CODE (cmp);
|
|
|
/* If we have a BImode input, then we already have a compare result, and
|
/* If we have a BImode input, then we already have a compare result, and
|
do not need to emit another comparison. */
|
do not need to emit another comparison. */
|
if (GET_MODE (op0) == BImode)
|
if (GET_MODE (op0) == BImode)
|
{
|
{
|
gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
|
gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
|
tem = op0, code2 = code;
|
tem = op0, code2 = code;
|
}
|
}
|
else
|
else
|
{
|
{
|
switch (code) {
|
switch (code) {
|
/* bfin has these conditions */
|
/* bfin has these conditions */
|
case EQ:
|
case EQ:
|
case LT:
|
case LT:
|
case LE:
|
case LE:
|
case LEU:
|
case LEU:
|
case LTU:
|
case LTU:
|
code1 = code;
|
code1 = code;
|
code2 = NE;
|
code2 = NE;
|
break;
|
break;
|
default:
|
default:
|
code1 = reverse_condition (code);
|
code1 = reverse_condition (code);
|
code2 = EQ;
|
code2 = EQ;
|
break;
|
break;
|
}
|
}
|
emit_insn (gen_rtx_SET (BImode, tem,
|
emit_insn (gen_rtx_SET (BImode, tem,
|
gen_rtx_fmt_ee (code1, BImode, op0, op1)));
|
gen_rtx_fmt_ee (code1, BImode, op0, op1)));
|
}
|
}
|
|
|
return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
|
return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
|
}
|
}
|
|
|
/* Return nonzero iff C has exactly one bit set if it is interpreted
|
/* Return nonzero iff C has exactly one bit set if it is interpreted
|
as a 32 bit constant. */
|
as a 32 bit constant. */
|
|
|
int
|
int
|
log2constp (unsigned HOST_WIDE_INT c)
|
log2constp (unsigned HOST_WIDE_INT c)
|
{
|
{
|
c &= 0xFFFFFFFF;
|
c &= 0xFFFFFFFF;
|
return c != 0 && (c & (c-1)) == 0;
|
return c != 0 && (c & (c-1)) == 0;
|
}
|
}
|
|
|
/* Returns the number of consecutive least significant zeros in the binary
|
/* Returns the number of consecutive least significant zeros in the binary
|
representation of *V.
|
representation of *V.
|
We modify *V to contain the original value arithmetically shifted right by
|
We modify *V to contain the original value arithmetically shifted right by
|
the number of zeroes. */
|
the number of zeroes. */
|
|
|
static int
|
static int
|
shiftr_zero (HOST_WIDE_INT *v)
|
shiftr_zero (HOST_WIDE_INT *v)
|
{
|
{
|
unsigned HOST_WIDE_INT tmp = *v;
|
unsigned HOST_WIDE_INT tmp = *v;
|
unsigned HOST_WIDE_INT sgn;
|
unsigned HOST_WIDE_INT sgn;
|
int n = 0;
|
int n = 0;
|
|
|
if (tmp == 0)
|
if (tmp == 0)
|
return 0;
|
return 0;
|
|
|
sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
|
sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
|
while ((tmp & 0x1) == 0 && n <= 32)
|
while ((tmp & 0x1) == 0 && n <= 32)
|
{
|
{
|
tmp = (tmp >> 1) | sgn;
|
tmp = (tmp >> 1) | sgn;
|
n++;
|
n++;
|
}
|
}
|
*v = tmp;
|
*v = tmp;
|
return n;
|
return n;
|
}
|
}
|
|
|
/* After reload, split the load of an immediate constant. OPERANDS are the
|
/* After reload, split the load of an immediate constant. OPERANDS are the
|
operands of the movsi_insn pattern which we are splitting. We return
|
operands of the movsi_insn pattern which we are splitting. We return
|
nonzero if we emitted a sequence to load the constant, zero if we emitted
|
nonzero if we emitted a sequence to load the constant, zero if we emitted
|
nothing because we want to use the splitter's default sequence. */
|
nothing because we want to use the splitter's default sequence. */
|
|
|
int
|
int
|
split_load_immediate (rtx operands[])
|
split_load_immediate (rtx operands[])
|
{
|
{
|
HOST_WIDE_INT val = INTVAL (operands[1]);
|
HOST_WIDE_INT val = INTVAL (operands[1]);
|
HOST_WIDE_INT tmp;
|
HOST_WIDE_INT tmp;
|
HOST_WIDE_INT shifted = val;
|
HOST_WIDE_INT shifted = val;
|
HOST_WIDE_INT shifted_compl = ~val;
|
HOST_WIDE_INT shifted_compl = ~val;
|
int num_zero = shiftr_zero (&shifted);
|
int num_zero = shiftr_zero (&shifted);
|
int num_compl_zero = shiftr_zero (&shifted_compl);
|
int num_compl_zero = shiftr_zero (&shifted_compl);
|
unsigned int regno = REGNO (operands[0]);
|
unsigned int regno = REGNO (operands[0]);
|
enum reg_class class1 = REGNO_REG_CLASS (regno);
|
enum reg_class class1 = REGNO_REG_CLASS (regno);
|
|
|
/* This case takes care of single-bit set/clear constants, which we could
|
/* This case takes care of single-bit set/clear constants, which we could
|
also implement with BITSET/BITCLR. */
|
also implement with BITSET/BITCLR. */
|
if (num_zero
|
if (num_zero
|
&& shifted >= -32768 && shifted < 65536
|
&& shifted >= -32768 && shifted < 65536
|
&& (D_REGNO_P (regno)
|
&& (D_REGNO_P (regno)
|
|| (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
|
|| (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
|
{
|
{
|
emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
|
emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
|
emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
|
emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
|
return 1;
|
return 1;
|
}
|
}
|
|
|
tmp = val & 0xFFFF;
|
tmp = val & 0xFFFF;
|
tmp |= -(tmp & 0x8000);
|
tmp |= -(tmp & 0x8000);
|
|
|
/* If high word has one bit set or clear, try to use a bit operation. */
|
/* If high word has one bit set or clear, try to use a bit operation. */
|
if (D_REGNO_P (regno))
|
if (D_REGNO_P (regno))
|
{
|
{
|
if (log2constp (val & 0xFFFF0000))
|
if (log2constp (val & 0xFFFF0000))
|
{
|
{
|
emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
|
emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
|
emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
|
emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
|
return 1;
|
return 1;
|
}
|
}
|
else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
|
else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
|
{
|
{
|
emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
|
emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
|
emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
|
emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
|
}
|
}
|
}
|
}
|
|
|
if (D_REGNO_P (regno))
|
if (D_REGNO_P (regno))
|
{
|
{
|
if (CONST_7BIT_IMM_P (tmp))
|
if (CONST_7BIT_IMM_P (tmp))
|
{
|
{
|
emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
|
emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
|
emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
|
emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
|
return 1;
|
return 1;
|
}
|
}
|
|
|
if ((val & 0xFFFF0000) == 0)
|
if ((val & 0xFFFF0000) == 0)
|
{
|
{
|
emit_insn (gen_movsi (operands[0], const0_rtx));
|
emit_insn (gen_movsi (operands[0], const0_rtx));
|
emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
|
emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
|
return 1;
|
return 1;
|
}
|
}
|
|
|
if ((val & 0xFFFF0000) == 0xFFFF0000)
|
if ((val & 0xFFFF0000) == 0xFFFF0000)
|
{
|
{
|
emit_insn (gen_movsi (operands[0], constm1_rtx));
|
emit_insn (gen_movsi (operands[0], constm1_rtx));
|
emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
|
emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
|
return 1;
|
return 1;
|
}
|
}
|
}
|
}
|
|
|
/* Need DREGs for the remaining case. */
|
/* Need DREGs for the remaining case. */
|
if (regno > REG_R7)
|
if (regno > REG_R7)
|
return 0;
|
return 0;
|
|
|
if (optimize_size
|
if (optimize_size
|
&& num_compl_zero && CONST_7BIT_IMM_P (shifted_compl))
|
&& num_compl_zero && CONST_7BIT_IMM_P (shifted_compl))
|
{
|
{
|
/* If optimizing for size, generate a sequence that has more instructions
|
/* If optimizing for size, generate a sequence that has more instructions
|
but is shorter. */
|
but is shorter. */
|
emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
|
emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
|
emit_insn (gen_ashlsi3 (operands[0], operands[0],
|
emit_insn (gen_ashlsi3 (operands[0], operands[0],
|
GEN_INT (num_compl_zero)));
|
GEN_INT (num_compl_zero)));
|
emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
|
emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
|
return 1;
|
return 1;
|
}
|
}
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* Return true if the legitimate memory address for a memory operand of mode
|
/* Return true if the legitimate memory address for a memory operand of mode
|
MODE. Return false if not. */
|
MODE. Return false if not. */
|
|
|
static bool
|
static bool
|
bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
|
bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
|
{
|
{
|
unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
|
unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
|
int sz = GET_MODE_SIZE (mode);
|
int sz = GET_MODE_SIZE (mode);
|
int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
|
int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
|
/* The usual offsettable_memref machinery doesn't work so well for this
|
/* The usual offsettable_memref machinery doesn't work so well for this
|
port, so we deal with the problem here. */
|
port, so we deal with the problem here. */
|
unsigned HOST_WIDE_INT mask = sz == 8 ? 0x7ffe : 0x7fff;
|
unsigned HOST_WIDE_INT mask = sz == 8 ? 0x7ffe : 0x7fff;
|
return (v & ~(mask << shift)) == 0;
|
return (v & ~(mask << shift)) == 0;
|
}
|
}
|
|
|
static bool
|
static bool
|
bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
|
bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
|
enum rtx_code outer_code)
|
enum rtx_code outer_code)
|
{
|
{
|
if (strict)
|
if (strict)
|
return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
|
return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
|
else
|
else
|
return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
|
return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
|
}
|
}
|
|
|
bool
|
bool
|
bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
|
bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
|
{
|
{
|
switch (GET_CODE (x)) {
|
switch (GET_CODE (x)) {
|
case REG:
|
case REG:
|
if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
|
if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
|
return true;
|
return true;
|
break;
|
break;
|
case PLUS:
|
case PLUS:
|
if (REG_P (XEXP (x, 0))
|
if (REG_P (XEXP (x, 0))
|
&& bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
|
&& bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
|
&& ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
|
&& ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
|
|| (GET_CODE (XEXP (x, 1)) == CONST_INT
|
|| (GET_CODE (XEXP (x, 1)) == CONST_INT
|
&& bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
|
&& bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
|
return true;
|
return true;
|
break;
|
break;
|
case POST_INC:
|
case POST_INC:
|
case POST_DEC:
|
case POST_DEC:
|
if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
|
if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
|
&& REG_P (XEXP (x, 0))
|
&& REG_P (XEXP (x, 0))
|
&& bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
|
&& bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
|
return true;
|
return true;
|
case PRE_DEC:
|
case PRE_DEC:
|
if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
|
if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
|
&& XEXP (x, 0) == stack_pointer_rtx
|
&& XEXP (x, 0) == stack_pointer_rtx
|
&& REG_P (XEXP (x, 0))
|
&& REG_P (XEXP (x, 0))
|
&& bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
|
&& bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
|
return true;
|
return true;
|
break;
|
break;
|
default:
|
default:
|
break;
|
break;
|
}
|
}
|
return false;
|
return false;
|
}
|
}
|
|
|
static bool
|
static bool
|
bfin_rtx_costs (rtx x, int code, int outer_code, int *total)
|
bfin_rtx_costs (rtx x, int code, int outer_code, int *total)
|
{
|
{
|
int cost2 = COSTS_N_INSNS (1);
|
int cost2 = COSTS_N_INSNS (1);
|
|
|
switch (code)
|
switch (code)
|
{
|
{
|
case CONST_INT:
|
case CONST_INT:
|
if (outer_code == SET || outer_code == PLUS)
|
if (outer_code == SET || outer_code == PLUS)
|
*total = CONST_7BIT_IMM_P (INTVAL (x)) ? 0 : cost2;
|
*total = CONST_7BIT_IMM_P (INTVAL (x)) ? 0 : cost2;
|
else if (outer_code == AND)
|
else if (outer_code == AND)
|
*total = log2constp (~INTVAL (x)) ? 0 : cost2;
|
*total = log2constp (~INTVAL (x)) ? 0 : cost2;
|
else if (outer_code == LE || outer_code == LT || outer_code == EQ)
|
else if (outer_code == LE || outer_code == LT || outer_code == EQ)
|
*total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
|
*total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
|
else if (outer_code == LEU || outer_code == LTU)
|
else if (outer_code == LEU || outer_code == LTU)
|
*total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
|
*total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
|
else if (outer_code == MULT)
|
else if (outer_code == MULT)
|
*total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
|
*total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
|
else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
|
else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
|
*total = 0;
|
*total = 0;
|
else if (outer_code == ASHIFT || outer_code == ASHIFTRT
|
else if (outer_code == ASHIFT || outer_code == ASHIFTRT
|
|| outer_code == LSHIFTRT)
|
|| outer_code == LSHIFTRT)
|
*total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
|
*total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
|
else if (outer_code == IOR || outer_code == XOR)
|
else if (outer_code == IOR || outer_code == XOR)
|
*total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
|
*total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
|
else
|
else
|
*total = cost2;
|
*total = cost2;
|
return true;
|
return true;
|
|
|
case CONST:
|
case CONST:
|
case LABEL_REF:
|
case LABEL_REF:
|
case SYMBOL_REF:
|
case SYMBOL_REF:
|
case CONST_DOUBLE:
|
case CONST_DOUBLE:
|
*total = COSTS_N_INSNS (2);
|
*total = COSTS_N_INSNS (2);
|
return true;
|
return true;
|
|
|
case PLUS:
|
case PLUS:
|
if (GET_MODE (x) == Pmode)
|
if (GET_MODE (x) == Pmode)
|
{
|
{
|
if (GET_CODE (XEXP (x, 0)) == MULT
|
if (GET_CODE (XEXP (x, 0)) == MULT
|
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
|
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
|
{
|
{
|
HOST_WIDE_INT val = INTVAL (XEXP (XEXP (x, 0), 1));
|
HOST_WIDE_INT val = INTVAL (XEXP (XEXP (x, 0), 1));
|
if (val == 2 || val == 4)
|
if (val == 2 || val == 4)
|
{
|
{
|
*total = cost2;
|
*total = cost2;
|
*total += rtx_cost (XEXP (XEXP (x, 0), 0), outer_code);
|
*total += rtx_cost (XEXP (XEXP (x, 0), 0), outer_code);
|
*total += rtx_cost (XEXP (x, 1), outer_code);
|
*total += rtx_cost (XEXP (x, 1), outer_code);
|
return true;
|
return true;
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
/* fall through */
|
/* fall through */
|
|
|
case MINUS:
|
case MINUS:
|
case ASHIFT:
|
case ASHIFT:
|
case ASHIFTRT:
|
case ASHIFTRT:
|
case LSHIFTRT:
|
case LSHIFTRT:
|
if (GET_MODE (x) == DImode)
|
if (GET_MODE (x) == DImode)
|
*total = 6 * cost2;
|
*total = 6 * cost2;
|
return false;
|
return false;
|
|
|
case AND:
|
case AND:
|
case IOR:
|
case IOR:
|
case XOR:
|
case XOR:
|
if (GET_MODE (x) == DImode)
|
if (GET_MODE (x) == DImode)
|
*total = 2 * cost2;
|
*total = 2 * cost2;
|
return false;
|
return false;
|
|
|
case MULT:
|
case MULT:
|
if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD)
|
if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD)
|
*total = COSTS_N_INSNS (3);
|
*total = COSTS_N_INSNS (3);
|
return false;
|
return false;
|
|
|
case UDIV:
|
case UDIV:
|
case UMOD:
|
case UMOD:
|
*total = COSTS_N_INSNS (32);
|
*total = COSTS_N_INSNS (32);
|
return true;
|
return true;
|
|
|
case VEC_CONCAT:
|
case VEC_CONCAT:
|
case VEC_SELECT:
|
case VEC_SELECT:
|
if (outer_code == SET)
|
if (outer_code == SET)
|
*total = cost2;
|
*total = cost2;
|
return true;
|
return true;
|
|
|
default:
|
default:
|
return false;
|
return false;
|
}
|
}
|
}
|
}
|
|
|
static void
|
static void
|
bfin_internal_label (FILE *stream, const char *prefix, unsigned long num)
|
bfin_internal_label (FILE *stream, const char *prefix, unsigned long num)
|
{
|
{
|
fprintf (stream, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX, prefix, num);
|
fprintf (stream, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX, prefix, num);
|
}
|
}
|
|
|
/* Used for communication between {push,pop}_multiple_operation (which
|
/* Used for communication between {push,pop}_multiple_operation (which
|
we use not only as a predicate) and the corresponding output functions. */
|
we use not only as a predicate) and the corresponding output functions. */
|
static int first_preg_to_save, first_dreg_to_save;
|
static int first_preg_to_save, first_dreg_to_save;
|
|
|
int
|
int
|
push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
{
|
{
|
int lastdreg = 8, lastpreg = 6;
|
int lastdreg = 8, lastpreg = 6;
|
int i, group;
|
int i, group;
|
|
|
first_preg_to_save = lastpreg;
|
first_preg_to_save = lastpreg;
|
first_dreg_to_save = lastdreg;
|
first_dreg_to_save = lastdreg;
|
for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
|
for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
|
{
|
{
|
rtx t = XVECEXP (op, 0, i);
|
rtx t = XVECEXP (op, 0, i);
|
rtx src, dest;
|
rtx src, dest;
|
int regno;
|
int regno;
|
|
|
if (GET_CODE (t) != SET)
|
if (GET_CODE (t) != SET)
|
return 0;
|
return 0;
|
|
|
src = SET_SRC (t);
|
src = SET_SRC (t);
|
dest = SET_DEST (t);
|
dest = SET_DEST (t);
|
if (GET_CODE (dest) != MEM || ! REG_P (src))
|
if (GET_CODE (dest) != MEM || ! REG_P (src))
|
return 0;
|
return 0;
|
dest = XEXP (dest, 0);
|
dest = XEXP (dest, 0);
|
if (GET_CODE (dest) != PLUS
|
if (GET_CODE (dest) != PLUS
|
|| ! REG_P (XEXP (dest, 0))
|
|| ! REG_P (XEXP (dest, 0))
|
|| REGNO (XEXP (dest, 0)) != REG_SP
|
|| REGNO (XEXP (dest, 0)) != REG_SP
|
|| GET_CODE (XEXP (dest, 1)) != CONST_INT
|
|| GET_CODE (XEXP (dest, 1)) != CONST_INT
|
|| INTVAL (XEXP (dest, 1)) != -i * 4)
|
|| INTVAL (XEXP (dest, 1)) != -i * 4)
|
return 0;
|
return 0;
|
|
|
regno = REGNO (src);
|
regno = REGNO (src);
|
if (group == 0)
|
if (group == 0)
|
{
|
{
|
if (D_REGNO_P (regno))
|
if (D_REGNO_P (regno))
|
{
|
{
|
group = 1;
|
group = 1;
|
first_dreg_to_save = lastdreg = regno - REG_R0;
|
first_dreg_to_save = lastdreg = regno - REG_R0;
|
}
|
}
|
else if (regno >= REG_P0 && regno <= REG_P7)
|
else if (regno >= REG_P0 && regno <= REG_P7)
|
{
|
{
|
group = 2;
|
group = 2;
|
first_preg_to_save = lastpreg = regno - REG_P0;
|
first_preg_to_save = lastpreg = regno - REG_P0;
|
}
|
}
|
else
|
else
|
return 0;
|
return 0;
|
|
|
continue;
|
continue;
|
}
|
}
|
|
|
if (group == 1)
|
if (group == 1)
|
{
|
{
|
if (regno >= REG_P0 && regno <= REG_P7)
|
if (regno >= REG_P0 && regno <= REG_P7)
|
{
|
{
|
group = 2;
|
group = 2;
|
first_preg_to_save = lastpreg = regno - REG_P0;
|
first_preg_to_save = lastpreg = regno - REG_P0;
|
}
|
}
|
else if (regno != REG_R0 + lastdreg + 1)
|
else if (regno != REG_R0 + lastdreg + 1)
|
return 0;
|
return 0;
|
else
|
else
|
lastdreg++;
|
lastdreg++;
|
}
|
}
|
else if (group == 2)
|
else if (group == 2)
|
{
|
{
|
if (regno != REG_P0 + lastpreg + 1)
|
if (regno != REG_P0 + lastpreg + 1)
|
return 0;
|
return 0;
|
lastpreg++;
|
lastpreg++;
|
}
|
}
|
}
|
}
|
return 1;
|
return 1;
|
}
|
}
|
|
|
int
|
int
|
pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
{
|
{
|
int lastdreg = 8, lastpreg = 6;
|
int lastdreg = 8, lastpreg = 6;
|
int i, group;
|
int i, group;
|
|
|
for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
|
for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
|
{
|
{
|
rtx t = XVECEXP (op, 0, i);
|
rtx t = XVECEXP (op, 0, i);
|
rtx src, dest;
|
rtx src, dest;
|
int regno;
|
int regno;
|
|
|
if (GET_CODE (t) != SET)
|
if (GET_CODE (t) != SET)
|
return 0;
|
return 0;
|
|
|
src = SET_SRC (t);
|
src = SET_SRC (t);
|
dest = SET_DEST (t);
|
dest = SET_DEST (t);
|
if (GET_CODE (src) != MEM || ! REG_P (dest))
|
if (GET_CODE (src) != MEM || ! REG_P (dest))
|
return 0;
|
return 0;
|
src = XEXP (src, 0);
|
src = XEXP (src, 0);
|
|
|
if (i == 1)
|
if (i == 1)
|
{
|
{
|
if (! REG_P (src) || REGNO (src) != REG_SP)
|
if (! REG_P (src) || REGNO (src) != REG_SP)
|
return 0;
|
return 0;
|
}
|
}
|
else if (GET_CODE (src) != PLUS
|
else if (GET_CODE (src) != PLUS
|
|| ! REG_P (XEXP (src, 0))
|
|| ! REG_P (XEXP (src, 0))
|
|| REGNO (XEXP (src, 0)) != REG_SP
|
|| REGNO (XEXP (src, 0)) != REG_SP
|
|| GET_CODE (XEXP (src, 1)) != CONST_INT
|
|| GET_CODE (XEXP (src, 1)) != CONST_INT
|
|| INTVAL (XEXP (src, 1)) != (i - 1) * 4)
|
|| INTVAL (XEXP (src, 1)) != (i - 1) * 4)
|
return 0;
|
return 0;
|
|
|
regno = REGNO (dest);
|
regno = REGNO (dest);
|
if (group == 0)
|
if (group == 0)
|
{
|
{
|
if (regno == REG_R7)
|
if (regno == REG_R7)
|
{
|
{
|
group = 1;
|
group = 1;
|
lastdreg = 7;
|
lastdreg = 7;
|
}
|
}
|
else if (regno != REG_P0 + lastpreg - 1)
|
else if (regno != REG_P0 + lastpreg - 1)
|
return 0;
|
return 0;
|
else
|
else
|
lastpreg--;
|
lastpreg--;
|
}
|
}
|
else if (group == 1)
|
else if (group == 1)
|
{
|
{
|
if (regno != REG_R0 + lastdreg - 1)
|
if (regno != REG_R0 + lastdreg - 1)
|
return 0;
|
return 0;
|
else
|
else
|
lastdreg--;
|
lastdreg--;
|
}
|
}
|
}
|
}
|
first_dreg_to_save = lastdreg;
|
first_dreg_to_save = lastdreg;
|
first_preg_to_save = lastpreg;
|
first_preg_to_save = lastpreg;
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Emit assembly code for one multi-register push described by INSN, with
|
/* Emit assembly code for one multi-register push described by INSN, with
|
operands in OPERANDS. */
|
operands in OPERANDS. */
|
|
|
void
|
void
|
output_push_multiple (rtx insn, rtx *operands)
|
output_push_multiple (rtx insn, rtx *operands)
|
{
|
{
|
char buf[80];
|
char buf[80];
|
int ok;
|
int ok;
|
|
|
/* Validate the insn again, and compute first_[dp]reg_to_save. */
|
/* Validate the insn again, and compute first_[dp]reg_to_save. */
|
ok = push_multiple_operation (PATTERN (insn), VOIDmode);
|
ok = push_multiple_operation (PATTERN (insn), VOIDmode);
|
gcc_assert (ok);
|
gcc_assert (ok);
|
|
|
if (first_dreg_to_save == 8)
|
if (first_dreg_to_save == 8)
|
sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
|
sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
|
else if (first_preg_to_save == 6)
|
else if (first_preg_to_save == 6)
|
sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
|
sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
|
else
|
else
|
sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
|
sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
|
first_dreg_to_save, first_preg_to_save);
|
first_dreg_to_save, first_preg_to_save);
|
|
|
output_asm_insn (buf, operands);
|
output_asm_insn (buf, operands);
|
}
|
}
|
|
|
/* Emit assembly code for one multi-register pop described by INSN, with
|
/* Emit assembly code for one multi-register pop described by INSN, with
|
operands in OPERANDS. */
|
operands in OPERANDS. */
|
|
|
void
|
void
|
output_pop_multiple (rtx insn, rtx *operands)
|
output_pop_multiple (rtx insn, rtx *operands)
|
{
|
{
|
char buf[80];
|
char buf[80];
|
int ok;
|
int ok;
|
|
|
/* Validate the insn again, and compute first_[dp]reg_to_save. */
|
/* Validate the insn again, and compute first_[dp]reg_to_save. */
|
ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
|
ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
|
gcc_assert (ok);
|
gcc_assert (ok);
|
|
|
if (first_dreg_to_save == 8)
|
if (first_dreg_to_save == 8)
|
sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
|
sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
|
else if (first_preg_to_save == 6)
|
else if (first_preg_to_save == 6)
|
sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
|
sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
|
else
|
else
|
sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
|
sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
|
first_dreg_to_save, first_preg_to_save);
|
first_dreg_to_save, first_preg_to_save);
|
|
|
output_asm_insn (buf, operands);
|
output_asm_insn (buf, operands);
|
}
|
}
|
|
|
/* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
|
/* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
|
|
|
static void
|
static void
|
single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
|
single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
|
{
|
{
|
rtx scratch = gen_reg_rtx (mode);
|
rtx scratch = gen_reg_rtx (mode);
|
rtx srcmem, dstmem;
|
rtx srcmem, dstmem;
|
|
|
srcmem = adjust_address_nv (src, mode, offset);
|
srcmem = adjust_address_nv (src, mode, offset);
|
dstmem = adjust_address_nv (dst, mode, offset);
|
dstmem = adjust_address_nv (dst, mode, offset);
|
emit_move_insn (scratch, srcmem);
|
emit_move_insn (scratch, srcmem);
|
emit_move_insn (dstmem, scratch);
|
emit_move_insn (dstmem, scratch);
|
}
|
}
|
|
|
/* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
|
/* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
|
alignment ALIGN_EXP. Return true if successful, false if we should fall
|
alignment ALIGN_EXP. Return true if successful, false if we should fall
|
back on a different method. */
|
back on a different method. */
|
|
|
bool
|
bool
|
bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
|
bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
|
{
|
{
|
rtx srcreg, destreg, countreg;
|
rtx srcreg, destreg, countreg;
|
HOST_WIDE_INT align = 0;
|
HOST_WIDE_INT align = 0;
|
unsigned HOST_WIDE_INT count = 0;
|
unsigned HOST_WIDE_INT count = 0;
|
|
|
if (GET_CODE (align_exp) == CONST_INT)
|
if (GET_CODE (align_exp) == CONST_INT)
|
align = INTVAL (align_exp);
|
align = INTVAL (align_exp);
|
if (GET_CODE (count_exp) == CONST_INT)
|
if (GET_CODE (count_exp) == CONST_INT)
|
{
|
{
|
count = INTVAL (count_exp);
|
count = INTVAL (count_exp);
|
#if 0
|
#if 0
|
if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
|
if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
|
return false;
|
return false;
|
#endif
|
#endif
|
}
|
}
|
|
|
/* If optimizing for size, only do single copies inline. */
|
/* If optimizing for size, only do single copies inline. */
|
if (optimize_size)
|
if (optimize_size)
|
{
|
{
|
if (count == 2 && align < 2)
|
if (count == 2 && align < 2)
|
return false;
|
return false;
|
if (count == 4 && align < 4)
|
if (count == 4 && align < 4)
|
return false;
|
return false;
|
if (count != 1 && count != 2 && count != 4)
|
if (count != 1 && count != 2 && count != 4)
|
return false;
|
return false;
|
}
|
}
|
if (align < 2 && count != 1)
|
if (align < 2 && count != 1)
|
return false;
|
return false;
|
|
|
destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
|
destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
|
if (destreg != XEXP (dst, 0))
|
if (destreg != XEXP (dst, 0))
|
dst = replace_equiv_address_nv (dst, destreg);
|
dst = replace_equiv_address_nv (dst, destreg);
|
srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
|
srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
|
if (srcreg != XEXP (src, 0))
|
if (srcreg != XEXP (src, 0))
|
src = replace_equiv_address_nv (src, srcreg);
|
src = replace_equiv_address_nv (src, srcreg);
|
|
|
if (count != 0 && align >= 2)
|
if (count != 0 && align >= 2)
|
{
|
{
|
unsigned HOST_WIDE_INT offset = 0;
|
unsigned HOST_WIDE_INT offset = 0;
|
|
|
if (align >= 4)
|
if (align >= 4)
|
{
|
{
|
if ((count & ~3) == 4)
|
if ((count & ~3) == 4)
|
{
|
{
|
single_move_for_movmem (dst, src, SImode, offset);
|
single_move_for_movmem (dst, src, SImode, offset);
|
offset = 4;
|
offset = 4;
|
}
|
}
|
else if (count & ~3)
|
else if (count & ~3)
|
{
|
{
|
HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
|
HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
|
countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
|
countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
|
|
|
emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
|
emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
|
}
|
}
|
if (count & 2)
|
if (count & 2)
|
{
|
{
|
single_move_for_movmem (dst, src, HImode, offset);
|
single_move_for_movmem (dst, src, HImode, offset);
|
offset += 2;
|
offset += 2;
|
}
|
}
|
}
|
}
|
else
|
else
|
{
|
{
|
if ((count & ~1) == 2)
|
if ((count & ~1) == 2)
|
{
|
{
|
single_move_for_movmem (dst, src, HImode, offset);
|
single_move_for_movmem (dst, src, HImode, offset);
|
offset = 2;
|
offset = 2;
|
}
|
}
|
else if (count & ~1)
|
else if (count & ~1)
|
{
|
{
|
HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
|
HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
|
countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
|
countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
|
|
|
emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
|
emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
|
}
|
}
|
}
|
}
|
if (count & 1)
|
if (count & 1)
|
{
|
{
|
single_move_for_movmem (dst, src, QImode, offset);
|
single_move_for_movmem (dst, src, QImode, offset);
|
}
|
}
|
return true;
|
return true;
|
}
|
}
|
return false;
|
return false;
|
}
|
}
|
|
|
|
|
static int
|
static int
|
bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
|
bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
|
{
|
{
|
enum attr_type insn_type, dep_insn_type;
|
enum attr_type insn_type, dep_insn_type;
|
int dep_insn_code_number;
|
int dep_insn_code_number;
|
|
|
/* Anti and output dependencies have zero cost. */
|
/* Anti and output dependencies have zero cost. */
|
if (REG_NOTE_KIND (link) != 0)
|
if (REG_NOTE_KIND (link) != 0)
|
return 0;
|
return 0;
|
|
|
dep_insn_code_number = recog_memoized (dep_insn);
|
dep_insn_code_number = recog_memoized (dep_insn);
|
|
|
/* If we can't recognize the insns, we can't really do anything. */
|
/* If we can't recognize the insns, we can't really do anything. */
|
if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
|
if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
|
return cost;
|
return cost;
|
|
|
insn_type = get_attr_type (insn);
|
insn_type = get_attr_type (insn);
|
dep_insn_type = get_attr_type (dep_insn);
|
dep_insn_type = get_attr_type (dep_insn);
|
|
|
if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
|
if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
|
{
|
{
|
rtx pat = PATTERN (dep_insn);
|
rtx pat = PATTERN (dep_insn);
|
rtx dest = SET_DEST (pat);
|
rtx dest = SET_DEST (pat);
|
rtx src = SET_SRC (pat);
|
rtx src = SET_SRC (pat);
|
if (! ADDRESS_REGNO_P (REGNO (dest)) || ! D_REGNO_P (REGNO (src)))
|
if (! ADDRESS_REGNO_P (REGNO (dest)) || ! D_REGNO_P (REGNO (src)))
|
return cost;
|
return cost;
|
return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
|
return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
|
}
|
}
|
|
|
return cost;
|
return cost;
|
}
|
}
|
|
|
|
|
/* Increment the counter for the number of loop instructions in the
|
/* Increment the counter for the number of loop instructions in the
|
current function. */
|
current function. */
|
|
|
void
|
void
|
bfin_hardware_loop (void)
|
bfin_hardware_loop (void)
|
{
|
{
|
cfun->machine->has_hardware_loops++;
|
cfun->machine->has_hardware_loops++;
|
}
|
}
|
|
|
/* Maximum loop nesting depth. */
|
/* Maximum loop nesting depth. */
|
#define MAX_LOOP_DEPTH 2
|
#define MAX_LOOP_DEPTH 2
|
|
|
/* Maximum size of a loop. */
|
/* Maximum size of a loop. */
|
#define MAX_LOOP_LENGTH 2042
|
#define MAX_LOOP_LENGTH 2042
|
|
|
/* We need to keep a vector of loops */
|
/* We need to keep a vector of loops */
|
typedef struct loop_info *loop_info;
|
typedef struct loop_info *loop_info;
|
DEF_VEC_P (loop_info);
|
DEF_VEC_P (loop_info);
|
DEF_VEC_ALLOC_P (loop_info,heap);
|
DEF_VEC_ALLOC_P (loop_info,heap);
|
|
|
/* Information about a loop we have found (or are in the process of
|
/* Information about a loop we have found (or are in the process of
|
finding). */
|
finding). */
|
struct loop_info GTY (())
|
struct loop_info GTY (())
|
{
|
{
|
/* loop number, for dumps */
|
/* loop number, for dumps */
|
int loop_no;
|
int loop_no;
|
|
|
/* Predecessor block of the loop. This is the one that falls into
|
/* Predecessor block of the loop. This is the one that falls into
|
the loop and contains the initialization instruction. */
|
the loop and contains the initialization instruction. */
|
basic_block predecessor;
|
basic_block predecessor;
|
|
|
/* First block in the loop. This is the one branched to by the loop_end
|
/* First block in the loop. This is the one branched to by the loop_end
|
insn. */
|
insn. */
|
basic_block head;
|
basic_block head;
|
|
|
/* Last block in the loop (the one with the loop_end insn). */
|
/* Last block in the loop (the one with the loop_end insn). */
|
basic_block tail;
|
basic_block tail;
|
|
|
/* The successor block of the loop. This is the one the loop_end insn
|
/* The successor block of the loop. This is the one the loop_end insn
|
falls into. */
|
falls into. */
|
basic_block successor;
|
basic_block successor;
|
|
|
/* The last instruction in the tail. */
|
/* The last instruction in the tail. */
|
rtx last_insn;
|
rtx last_insn;
|
|
|
/* The loop_end insn. */
|
/* The loop_end insn. */
|
rtx loop_end;
|
rtx loop_end;
|
|
|
/* The iteration register. */
|
/* The iteration register. */
|
rtx iter_reg;
|
rtx iter_reg;
|
|
|
/* The new initialization insn. */
|
/* The new initialization insn. */
|
rtx init;
|
rtx init;
|
|
|
/* The new initialization instruction. */
|
/* The new initialization instruction. */
|
rtx loop_init;
|
rtx loop_init;
|
|
|
/* The new label placed at the beginning of the loop. */
|
/* The new label placed at the beginning of the loop. */
|
rtx start_label;
|
rtx start_label;
|
|
|
/* The new label placed at the end of the loop. */
|
/* The new label placed at the end of the loop. */
|
rtx end_label;
|
rtx end_label;
|
|
|
/* The length of the loop. */
|
/* The length of the loop. */
|
int length;
|
int length;
|
|
|
/* The nesting depth of the loop. */
|
/* The nesting depth of the loop. */
|
int depth;
|
int depth;
|
|
|
/* Nonzero if we can't optimize this loop. */
|
/* Nonzero if we can't optimize this loop. */
|
int bad;
|
int bad;
|
|
|
/* True if we have visited this loop. */
|
/* True if we have visited this loop. */
|
int visited;
|
int visited;
|
|
|
/* True if this loop body clobbers any of LC0, LT0, or LB0. */
|
/* True if this loop body clobbers any of LC0, LT0, or LB0. */
|
int clobber_loop0;
|
int clobber_loop0;
|
|
|
/* True if this loop body clobbers any of LC1, LT1, or LB1. */
|
/* True if this loop body clobbers any of LC1, LT1, or LB1. */
|
int clobber_loop1;
|
int clobber_loop1;
|
|
|
/* Next loop in the graph. */
|
/* Next loop in the graph. */
|
struct loop_info *next;
|
struct loop_info *next;
|
|
|
/* Immediate outer loop of this loop. */
|
/* Immediate outer loop of this loop. */
|
struct loop_info *outer;
|
struct loop_info *outer;
|
|
|
/* Vector of blocks only within the loop, including those within
|
/* Vector of blocks only within the loop, including those within
|
inner loops. */
|
inner loops. */
|
VEC (basic_block,heap) *blocks;
|
VEC (basic_block,heap) *blocks;
|
|
|
/* Same information in a bitmap. */
|
/* Same information in a bitmap. */
|
bitmap block_bitmap;
|
bitmap block_bitmap;
|
|
|
/* Vector of inner loops within this loop */
|
/* Vector of inner loops within this loop */
|
VEC (loop_info,heap) *loops;
|
VEC (loop_info,heap) *loops;
|
};
|
};
|
|
|
static void
|
static void
|
bfin_dump_loops (loop_info loops)
|
bfin_dump_loops (loop_info loops)
|
{
|
{
|
loop_info loop;
|
loop_info loop;
|
|
|
for (loop = loops; loop; loop = loop->next)
|
for (loop = loops; loop; loop = loop->next)
|
{
|
{
|
loop_info i;
|
loop_info i;
|
basic_block b;
|
basic_block b;
|
unsigned ix;
|
unsigned ix;
|
|
|
fprintf (dump_file, ";; loop %d: ", loop->loop_no);
|
fprintf (dump_file, ";; loop %d: ", loop->loop_no);
|
if (loop->bad)
|
if (loop->bad)
|
fprintf (dump_file, "(bad) ");
|
fprintf (dump_file, "(bad) ");
|
fprintf (dump_file, "{head:%d, depth:%d}", loop->head->index, loop->depth);
|
fprintf (dump_file, "{head:%d, depth:%d}", loop->head->index, loop->depth);
|
|
|
fprintf (dump_file, " blocks: [ ");
|
fprintf (dump_file, " blocks: [ ");
|
for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, b); ix++)
|
for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, b); ix++)
|
fprintf (dump_file, "%d ", b->index);
|
fprintf (dump_file, "%d ", b->index);
|
fprintf (dump_file, "] ");
|
fprintf (dump_file, "] ");
|
|
|
fprintf (dump_file, " inner loops: [ ");
|
fprintf (dump_file, " inner loops: [ ");
|
for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, i); ix++)
|
for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, i); ix++)
|
fprintf (dump_file, "%d ", i->loop_no);
|
fprintf (dump_file, "%d ", i->loop_no);
|
fprintf (dump_file, "]\n");
|
fprintf (dump_file, "]\n");
|
}
|
}
|
fprintf (dump_file, "\n");
|
fprintf (dump_file, "\n");
|
}
|
}
|
|
|
/* Scan the blocks of LOOP (and its inferiors) looking for basic block
|
/* Scan the blocks of LOOP (and its inferiors) looking for basic block
|
BB. Return true, if we find it. */
|
BB. Return true, if we find it. */
|
|
|
static bool
|
static bool
|
bfin_bb_in_loop (loop_info loop, basic_block bb)
|
bfin_bb_in_loop (loop_info loop, basic_block bb)
|
{
|
{
|
return bitmap_bit_p (loop->block_bitmap, bb->index);
|
return bitmap_bit_p (loop->block_bitmap, bb->index);
|
}
|
}
|
|
|
/* Scan the blocks of LOOP (and its inferiors) looking for uses of
|
/* Scan the blocks of LOOP (and its inferiors) looking for uses of
|
REG. Return true, if we find any. Don't count the loop's loop_end
|
REG. Return true, if we find any. Don't count the loop's loop_end
|
insn if it matches LOOP_END. */
|
insn if it matches LOOP_END. */
|
|
|
static bool
|
static bool
|
bfin_scan_loop (loop_info loop, rtx reg, rtx loop_end)
|
bfin_scan_loop (loop_info loop, rtx reg, rtx loop_end)
|
{
|
{
|
unsigned ix;
|
unsigned ix;
|
basic_block bb;
|
basic_block bb;
|
|
|
for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
|
for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
|
{
|
{
|
rtx insn;
|
rtx insn;
|
|
|
for (insn = BB_HEAD (bb);
|
for (insn = BB_HEAD (bb);
|
insn != NEXT_INSN (BB_END (bb));
|
insn != NEXT_INSN (BB_END (bb));
|
insn = NEXT_INSN (insn))
|
insn = NEXT_INSN (insn))
|
{
|
{
|
if (!INSN_P (insn))
|
if (!INSN_P (insn))
|
continue;
|
continue;
|
if (insn == loop_end)
|
if (insn == loop_end)
|
continue;
|
continue;
|
if (reg_mentioned_p (reg, PATTERN (insn)))
|
if (reg_mentioned_p (reg, PATTERN (insn)))
|
return true;
|
return true;
|
}
|
}
|
}
|
}
|
return false;
|
return false;
|
}
|
}
|
|
|
/* Optimize LOOP. */
|
/* Optimize LOOP. */
|
|
|
static void
|
static void
|
bfin_optimize_loop (loop_info loop)
|
bfin_optimize_loop (loop_info loop)
|
{
|
{
|
basic_block bb;
|
basic_block bb;
|
loop_info inner;
|
loop_info inner;
|
rtx insn, init_insn, last_insn, nop_insn;
|
rtx insn, init_insn, last_insn, nop_insn;
|
rtx loop_init, start_label, end_label;
|
rtx loop_init, start_label, end_label;
|
rtx reg_lc0, reg_lc1, reg_lt0, reg_lt1, reg_lb0, reg_lb1;
|
rtx reg_lc0, reg_lc1, reg_lt0, reg_lt1, reg_lb0, reg_lb1;
|
rtx iter_reg;
|
rtx iter_reg;
|
rtx lc_reg, lt_reg, lb_reg;
|
rtx lc_reg, lt_reg, lb_reg;
|
rtx seq;
|
rtx seq;
|
int length;
|
int length;
|
unsigned ix;
|
unsigned ix;
|
int inner_depth = 0;
|
int inner_depth = 0;
|
|
|
if (loop->visited)
|
if (loop->visited)
|
return;
|
return;
|
|
|
loop->visited = 1;
|
loop->visited = 1;
|
|
|
if (loop->bad)
|
if (loop->bad)
|
{
|
{
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file, ";; loop %d bad when found\n", loop->loop_no);
|
fprintf (dump_file, ";; loop %d bad when found\n", loop->loop_no);
|
goto bad_loop;
|
goto bad_loop;
|
}
|
}
|
|
|
/* Every loop contains in its list of inner loops every loop nested inside
|
/* Every loop contains in its list of inner loops every loop nested inside
|
it, even if there are intermediate loops. This works because we're doing
|
it, even if there are intermediate loops. This works because we're doing
|
a depth-first search here and never visit a loop more than once. */
|
a depth-first search here and never visit a loop more than once. */
|
for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, inner); ix++)
|
for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, inner); ix++)
|
{
|
{
|
bfin_optimize_loop (inner);
|
bfin_optimize_loop (inner);
|
|
|
if (!inner->bad && inner_depth < inner->depth)
|
if (!inner->bad && inner_depth < inner->depth)
|
{
|
{
|
inner_depth = inner->depth;
|
inner_depth = inner->depth;
|
|
|
loop->clobber_loop0 |= inner->clobber_loop0;
|
loop->clobber_loop0 |= inner->clobber_loop0;
|
loop->clobber_loop1 |= inner->clobber_loop1;
|
loop->clobber_loop1 |= inner->clobber_loop1;
|
}
|
}
|
}
|
}
|
|
|
loop->depth = inner_depth + 1;
|
loop->depth = inner_depth + 1;
|
if (loop->depth > MAX_LOOP_DEPTH)
|
if (loop->depth > MAX_LOOP_DEPTH)
|
{
|
{
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file, ";; loop %d too deep\n", loop->loop_no);
|
fprintf (dump_file, ";; loop %d too deep\n", loop->loop_no);
|
goto bad_loop;
|
goto bad_loop;
|
}
|
}
|
|
|
/* Get the loop iteration register. */
|
/* Get the loop iteration register. */
|
iter_reg = loop->iter_reg;
|
iter_reg = loop->iter_reg;
|
|
|
if (!DPREG_P (iter_reg))
|
if (!DPREG_P (iter_reg))
|
{
|
{
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file, ";; loop %d iteration count NOT in PREG or DREG\n",
|
fprintf (dump_file, ";; loop %d iteration count NOT in PREG or DREG\n",
|
loop->loop_no);
|
loop->loop_no);
|
goto bad_loop;
|
goto bad_loop;
|
}
|
}
|
|
|
/* Check if start_label appears before loop_end and calculate the
|
/* Check if start_label appears before loop_end and calculate the
|
offset between them. We calculate the length of instructions
|
offset between them. We calculate the length of instructions
|
conservatively. */
|
conservatively. */
|
length = 0;
|
length = 0;
|
for (insn = loop->start_label;
|
for (insn = loop->start_label;
|
insn && insn != loop->loop_end;
|
insn && insn != loop->loop_end;
|
insn = NEXT_INSN (insn))
|
insn = NEXT_INSN (insn))
|
{
|
{
|
if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
|
if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
|
{
|
{
|
if (TARGET_CSYNC_ANOMALY)
|
if (TARGET_CSYNC_ANOMALY)
|
length += 8;
|
length += 8;
|
else if (TARGET_SPECLD_ANOMALY)
|
else if (TARGET_SPECLD_ANOMALY)
|
length += 6;
|
length += 6;
|
}
|
}
|
else if (LABEL_P (insn))
|
else if (LABEL_P (insn))
|
{
|
{
|
if (TARGET_CSYNC_ANOMALY)
|
if (TARGET_CSYNC_ANOMALY)
|
length += 4;
|
length += 4;
|
}
|
}
|
|
|
if (INSN_P (insn))
|
if (INSN_P (insn))
|
length += get_attr_length (insn);
|
length += get_attr_length (insn);
|
}
|
}
|
|
|
if (!insn)
|
if (!insn)
|
{
|
{
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
|
fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
|
loop->loop_no);
|
loop->loop_no);
|
goto bad_loop;
|
goto bad_loop;
|
}
|
}
|
|
|
loop->length = length;
|
loop->length = length;
|
if (loop->length > MAX_LOOP_LENGTH)
|
if (loop->length > MAX_LOOP_LENGTH)
|
{
|
{
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
|
fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
|
goto bad_loop;
|
goto bad_loop;
|
}
|
}
|
|
|
/* Scan all the blocks to make sure they don't use iter_reg. */
|
/* Scan all the blocks to make sure they don't use iter_reg. */
|
if (bfin_scan_loop (loop, iter_reg, loop->loop_end))
|
if (bfin_scan_loop (loop, iter_reg, loop->loop_end))
|
{
|
{
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file, ";; loop %d uses iterator\n", loop->loop_no);
|
fprintf (dump_file, ";; loop %d uses iterator\n", loop->loop_no);
|
goto bad_loop;
|
goto bad_loop;
|
}
|
}
|
|
|
/* Scan all the insns to see if the loop body clobber
|
/* Scan all the insns to see if the loop body clobber
|
any hardware loop registers. */
|
any hardware loop registers. */
|
|
|
reg_lc0 = gen_rtx_REG (SImode, REG_LC0);
|
reg_lc0 = gen_rtx_REG (SImode, REG_LC0);
|
reg_lc1 = gen_rtx_REG (SImode, REG_LC1);
|
reg_lc1 = gen_rtx_REG (SImode, REG_LC1);
|
reg_lt0 = gen_rtx_REG (SImode, REG_LT0);
|
reg_lt0 = gen_rtx_REG (SImode, REG_LT0);
|
reg_lt1 = gen_rtx_REG (SImode, REG_LT1);
|
reg_lt1 = gen_rtx_REG (SImode, REG_LT1);
|
reg_lb0 = gen_rtx_REG (SImode, REG_LB0);
|
reg_lb0 = gen_rtx_REG (SImode, REG_LB0);
|
reg_lb1 = gen_rtx_REG (SImode, REG_LB1);
|
reg_lb1 = gen_rtx_REG (SImode, REG_LB1);
|
|
|
for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
|
for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
|
{
|
{
|
rtx insn;
|
rtx insn;
|
|
|
for (insn = BB_HEAD (bb);
|
for (insn = BB_HEAD (bb);
|
insn != NEXT_INSN (BB_END (bb));
|
insn != NEXT_INSN (BB_END (bb));
|
insn = NEXT_INSN (insn))
|
insn = NEXT_INSN (insn))
|
{
|
{
|
if (!INSN_P (insn))
|
if (!INSN_P (insn))
|
continue;
|
continue;
|
|
|
if (reg_set_p (reg_lc0, insn)
|
if (reg_set_p (reg_lc0, insn)
|
|| reg_set_p (reg_lt0, insn)
|
|| reg_set_p (reg_lt0, insn)
|
|| reg_set_p (reg_lb0, insn))
|
|| reg_set_p (reg_lb0, insn))
|
loop->clobber_loop0 = 1;
|
loop->clobber_loop0 = 1;
|
|
|
if (reg_set_p (reg_lc1, insn)
|
if (reg_set_p (reg_lc1, insn)
|
|| reg_set_p (reg_lt1, insn)
|
|| reg_set_p (reg_lt1, insn)
|
|| reg_set_p (reg_lb1, insn))
|
|| reg_set_p (reg_lb1, insn))
|
loop->clobber_loop1 |= 1;
|
loop->clobber_loop1 |= 1;
|
}
|
}
|
}
|
}
|
|
|
if ((loop->clobber_loop0 && loop->clobber_loop1)
|
if ((loop->clobber_loop0 && loop->clobber_loop1)
|
|| (loop->depth == MAX_LOOP_DEPTH && loop->clobber_loop0))
|
|| (loop->depth == MAX_LOOP_DEPTH && loop->clobber_loop0))
|
{
|
{
|
loop->depth = MAX_LOOP_DEPTH + 1;
|
loop->depth = MAX_LOOP_DEPTH + 1;
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file, ";; loop %d no loop reg available\n",
|
fprintf (dump_file, ";; loop %d no loop reg available\n",
|
loop->loop_no);
|
loop->loop_no);
|
goto bad_loop;
|
goto bad_loop;
|
}
|
}
|
|
|
/* There should be an instruction before the loop_end instruction
|
/* There should be an instruction before the loop_end instruction
|
in the same basic block. And the instruction must not be
|
in the same basic block. And the instruction must not be
|
- JUMP
|
- JUMP
|
- CONDITIONAL BRANCH
|
- CONDITIONAL BRANCH
|
- CALL
|
- CALL
|
- CSYNC
|
- CSYNC
|
- SSYNC
|
- SSYNC
|
- Returns (RTS, RTN, etc.) */
|
- Returns (RTS, RTN, etc.) */
|
|
|
bb = loop->tail;
|
bb = loop->tail;
|
last_insn = PREV_INSN (loop->loop_end);
|
last_insn = PREV_INSN (loop->loop_end);
|
|
|
while (1)
|
while (1)
|
{
|
{
|
for (; last_insn != PREV_INSN (BB_HEAD (bb));
|
for (; last_insn != PREV_INSN (BB_HEAD (bb));
|
last_insn = PREV_INSN (last_insn))
|
last_insn = PREV_INSN (last_insn))
|
if (INSN_P (last_insn))
|
if (INSN_P (last_insn))
|
break;
|
break;
|
|
|
if (last_insn != PREV_INSN (BB_HEAD (bb)))
|
if (last_insn != PREV_INSN (BB_HEAD (bb)))
|
break;
|
break;
|
|
|
if (single_pred_p (bb)
|
if (single_pred_p (bb)
|
&& single_pred (bb) != ENTRY_BLOCK_PTR)
|
&& single_pred (bb) != ENTRY_BLOCK_PTR)
|
{
|
{
|
bb = single_pred (bb);
|
bb = single_pred (bb);
|
last_insn = BB_END (bb);
|
last_insn = BB_END (bb);
|
continue;
|
continue;
|
}
|
}
|
else
|
else
|
{
|
{
|
last_insn = NULL_RTX;
|
last_insn = NULL_RTX;
|
break;
|
break;
|
}
|
}
|
}
|
}
|
|
|
if (!last_insn)
|
if (!last_insn)
|
{
|
{
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file, ";; loop %d has no last instruction\n",
|
fprintf (dump_file, ";; loop %d has no last instruction\n",
|
loop->loop_no);
|
loop->loop_no);
|
goto bad_loop;
|
goto bad_loop;
|
}
|
}
|
|
|
if (JUMP_P (last_insn))
|
if (JUMP_P (last_insn))
|
{
|
{
|
loop_info inner = bb->aux;
|
loop_info inner = bb->aux;
|
if (inner
|
if (inner
|
&& inner->outer == loop
|
&& inner->outer == loop
|
&& inner->loop_end == last_insn
|
&& inner->loop_end == last_insn
|
&& inner->depth == 1)
|
&& inner->depth == 1)
|
/* This jump_insn is the exact loop_end of an inner loop
|
/* This jump_insn is the exact loop_end of an inner loop
|
and to be optimized away. So use the inner's last_insn. */
|
and to be optimized away. So use the inner's last_insn. */
|
last_insn = inner->last_insn;
|
last_insn = inner->last_insn;
|
else
|
else
|
{
|
{
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file, ";; loop %d has bad last instruction\n",
|
fprintf (dump_file, ";; loop %d has bad last instruction\n",
|
loop->loop_no);
|
loop->loop_no);
|
goto bad_loop;
|
goto bad_loop;
|
}
|
}
|
}
|
}
|
else if (CALL_P (last_insn)
|
else if (CALL_P (last_insn)
|
|| get_attr_type (last_insn) == TYPE_SYNC
|
|| get_attr_type (last_insn) == TYPE_SYNC
|
|| recog_memoized (last_insn) == CODE_FOR_return_internal)
|
|| recog_memoized (last_insn) == CODE_FOR_return_internal)
|
{
|
{
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file, ";; loop %d has bad last instruction\n",
|
fprintf (dump_file, ";; loop %d has bad last instruction\n",
|
loop->loop_no);
|
loop->loop_no);
|
goto bad_loop;
|
goto bad_loop;
|
}
|
}
|
|
|
if (GET_CODE (PATTERN (last_insn)) == ASM_INPUT
|
if (GET_CODE (PATTERN (last_insn)) == ASM_INPUT
|
|| asm_noperands (PATTERN (last_insn)) >= 0
|
|| asm_noperands (PATTERN (last_insn)) >= 0
|
|| get_attr_seq_insns (last_insn) == SEQ_INSNS_MULTI)
|
|| get_attr_seq_insns (last_insn) == SEQ_INSNS_MULTI)
|
{
|
{
|
nop_insn = emit_insn_after (gen_nop (), last_insn);
|
nop_insn = emit_insn_after (gen_nop (), last_insn);
|
last_insn = nop_insn;
|
last_insn = nop_insn;
|
}
|
}
|
|
|
loop->last_insn = last_insn;
|
loop->last_insn = last_insn;
|
|
|
/* The loop is good for replacement. */
|
/* The loop is good for replacement. */
|
start_label = loop->start_label;
|
start_label = loop->start_label;
|
end_label = gen_label_rtx ();
|
end_label = gen_label_rtx ();
|
iter_reg = loop->iter_reg;
|
iter_reg = loop->iter_reg;
|
|
|
if (loop->depth == 1 && !loop->clobber_loop1)
|
if (loop->depth == 1 && !loop->clobber_loop1)
|
{
|
{
|
lc_reg = reg_lc1;
|
lc_reg = reg_lc1;
|
lt_reg = reg_lt1;
|
lt_reg = reg_lt1;
|
lb_reg = reg_lb1;
|
lb_reg = reg_lb1;
|
loop->clobber_loop1 = 1;
|
loop->clobber_loop1 = 1;
|
}
|
}
|
else
|
else
|
{
|
{
|
lc_reg = reg_lc0;
|
lc_reg = reg_lc0;
|
lt_reg = reg_lt0;
|
lt_reg = reg_lt0;
|
lb_reg = reg_lb0;
|
lb_reg = reg_lb0;
|
loop->clobber_loop0 = 1;
|
loop->clobber_loop0 = 1;
|
}
|
}
|
|
|
/* If iter_reg is a DREG, we need generate an instruction to load
|
/* If iter_reg is a DREG, we need generate an instruction to load
|
the loop count into LC register. */
|
the loop count into LC register. */
|
if (D_REGNO_P (REGNO (iter_reg)))
|
if (D_REGNO_P (REGNO (iter_reg)))
|
{
|
{
|
init_insn = gen_movsi (lc_reg, iter_reg);
|
init_insn = gen_movsi (lc_reg, iter_reg);
|
loop_init = gen_lsetup_without_autoinit (lt_reg, start_label,
|
loop_init = gen_lsetup_without_autoinit (lt_reg, start_label,
|
lb_reg, end_label,
|
lb_reg, end_label,
|
lc_reg);
|
lc_reg);
|
}
|
}
|
else if (P_REGNO_P (REGNO (iter_reg)))
|
else if (P_REGNO_P (REGNO (iter_reg)))
|
{
|
{
|
init_insn = NULL_RTX;
|
init_insn = NULL_RTX;
|
loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
|
loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
|
lb_reg, end_label,
|
lb_reg, end_label,
|
lc_reg, iter_reg);
|
lc_reg, iter_reg);
|
}
|
}
|
else
|
else
|
gcc_unreachable ();
|
gcc_unreachable ();
|
|
|
loop->init = init_insn;
|
loop->init = init_insn;
|
loop->end_label = end_label;
|
loop->end_label = end_label;
|
loop->loop_init = loop_init;
|
loop->loop_init = loop_init;
|
|
|
if (dump_file)
|
if (dump_file)
|
{
|
{
|
fprintf (dump_file, ";; replacing loop %d initializer with\n",
|
fprintf (dump_file, ";; replacing loop %d initializer with\n",
|
loop->loop_no);
|
loop->loop_no);
|
print_rtl_single (dump_file, loop->loop_init);
|
print_rtl_single (dump_file, loop->loop_init);
|
fprintf (dump_file, ";; replacing loop %d terminator with\n",
|
fprintf (dump_file, ";; replacing loop %d terminator with\n",
|
loop->loop_no);
|
loop->loop_no);
|
print_rtl_single (dump_file, loop->loop_end);
|
print_rtl_single (dump_file, loop->loop_end);
|
}
|
}
|
|
|
start_sequence ();
|
start_sequence ();
|
|
|
if (loop->init != NULL_RTX)
|
if (loop->init != NULL_RTX)
|
emit_insn (loop->init);
|
emit_insn (loop->init);
|
emit_insn(loop->loop_init);
|
emit_insn(loop->loop_init);
|
emit_label (loop->start_label);
|
emit_label (loop->start_label);
|
|
|
seq = get_insns ();
|
seq = get_insns ();
|
end_sequence ();
|
end_sequence ();
|
|
|
emit_insn_after (seq, BB_END (loop->predecessor));
|
emit_insn_after (seq, BB_END (loop->predecessor));
|
delete_insn (loop->loop_end);
|
delete_insn (loop->loop_end);
|
|
|
/* Insert the loop end label before the last instruction of the loop. */
|
/* Insert the loop end label before the last instruction of the loop. */
|
emit_label_before (loop->end_label, loop->last_insn);
|
emit_label_before (loop->end_label, loop->last_insn);
|
|
|
return;
|
return;
|
|
|
bad_loop:
|
bad_loop:
|
|
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file, ";; loop %d is bad\n", loop->loop_no);
|
fprintf (dump_file, ";; loop %d is bad\n", loop->loop_no);
|
|
|
loop->bad = 1;
|
loop->bad = 1;
|
|
|
if (DPREG_P (loop->iter_reg))
|
if (DPREG_P (loop->iter_reg))
|
{
|
{
|
/* If loop->iter_reg is a DREG or PREG, we can split it here
|
/* If loop->iter_reg is a DREG or PREG, we can split it here
|
without scratch register. */
|
without scratch register. */
|
rtx insn;
|
rtx insn;
|
|
|
emit_insn_before (gen_addsi3 (loop->iter_reg,
|
emit_insn_before (gen_addsi3 (loop->iter_reg,
|
loop->iter_reg,
|
loop->iter_reg,
|
constm1_rtx),
|
constm1_rtx),
|
loop->loop_end);
|
loop->loop_end);
|
|
|
emit_insn_before (gen_cmpsi (loop->iter_reg, const0_rtx),
|
emit_insn_before (gen_cmpsi (loop->iter_reg, const0_rtx),
|
loop->loop_end);
|
loop->loop_end);
|
|
|
insn = emit_jump_insn_before (gen_bne (loop->start_label),
|
insn = emit_jump_insn_before (gen_bne (loop->start_label),
|
loop->loop_end);
|
loop->loop_end);
|
|
|
JUMP_LABEL (insn) = loop->start_label;
|
JUMP_LABEL (insn) = loop->start_label;
|
LABEL_NUSES (loop->start_label)++;
|
LABEL_NUSES (loop->start_label)++;
|
delete_insn (loop->loop_end);
|
delete_insn (loop->loop_end);
|
}
|
}
|
}
|
}
|
|
|
/* Called from bfin_reorg_loops when a potential loop end is found. LOOP is
|
/* Called from bfin_reorg_loops when a potential loop end is found. LOOP is
|
a newly set up structure describing the loop, it is this function's
|
a newly set up structure describing the loop, it is this function's
|
responsibility to fill most of it. TAIL_BB and TAIL_INSN point to the
|
responsibility to fill most of it. TAIL_BB and TAIL_INSN point to the
|
loop_end insn and its enclosing basic block. */
|
loop_end insn and its enclosing basic block. */
|
|
|
static void
|
static void
|
bfin_discover_loop (loop_info loop, basic_block tail_bb, rtx tail_insn)
|
bfin_discover_loop (loop_info loop, basic_block tail_bb, rtx tail_insn)
|
{
|
{
|
unsigned dwork = 0;
|
unsigned dwork = 0;
|
basic_block bb;
|
basic_block bb;
|
VEC (basic_block,heap) *works = VEC_alloc (basic_block,heap,20);
|
VEC (basic_block,heap) *works = VEC_alloc (basic_block,heap,20);
|
|
|
loop->tail = tail_bb;
|
loop->tail = tail_bb;
|
loop->head = BRANCH_EDGE (tail_bb)->dest;
|
loop->head = BRANCH_EDGE (tail_bb)->dest;
|
loop->successor = FALLTHRU_EDGE (tail_bb)->dest;
|
loop->successor = FALLTHRU_EDGE (tail_bb)->dest;
|
loop->predecessor = NULL;
|
loop->predecessor = NULL;
|
loop->loop_end = tail_insn;
|
loop->loop_end = tail_insn;
|
loop->last_insn = NULL_RTX;
|
loop->last_insn = NULL_RTX;
|
loop->iter_reg = SET_DEST (XVECEXP (PATTERN (tail_insn), 0, 1));
|
loop->iter_reg = SET_DEST (XVECEXP (PATTERN (tail_insn), 0, 1));
|
loop->depth = loop->length = 0;
|
loop->depth = loop->length = 0;
|
loop->visited = 0;
|
loop->visited = 0;
|
loop->clobber_loop0 = loop->clobber_loop1 = 0;
|
loop->clobber_loop0 = loop->clobber_loop1 = 0;
|
loop->outer = NULL;
|
loop->outer = NULL;
|
loop->loops = NULL;
|
loop->loops = NULL;
|
|
|
loop->init = loop->loop_init = NULL_RTX;
|
loop->init = loop->loop_init = NULL_RTX;
|
loop->start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (tail_insn), 0, 0)), 1), 0);
|
loop->start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (tail_insn), 0, 0)), 1), 0);
|
loop->end_label = NULL_RTX;
|
loop->end_label = NULL_RTX;
|
loop->bad = 0;
|
loop->bad = 0;
|
|
|
VEC_safe_push (basic_block, heap, works, loop->head);
|
VEC_safe_push (basic_block, heap, works, loop->head);
|
|
|
while (VEC_iterate (basic_block, works, dwork++, bb))
|
while (VEC_iterate (basic_block, works, dwork++, bb))
|
{
|
{
|
edge e;
|
edge e;
|
edge_iterator ei;
|
edge_iterator ei;
|
if (bb == EXIT_BLOCK_PTR)
|
if (bb == EXIT_BLOCK_PTR)
|
{
|
{
|
/* We've reached the exit block. The loop must be bad. */
|
/* We've reached the exit block. The loop must be bad. */
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file,
|
fprintf (dump_file,
|
";; Loop is bad - reached exit block while scanning\n");
|
";; Loop is bad - reached exit block while scanning\n");
|
loop->bad = 1;
|
loop->bad = 1;
|
break;
|
break;
|
}
|
}
|
|
|
if (bitmap_bit_p (loop->block_bitmap, bb->index))
|
if (bitmap_bit_p (loop->block_bitmap, bb->index))
|
continue;
|
continue;
|
|
|
/* We've not seen this block before. Add it to the loop's
|
/* We've not seen this block before. Add it to the loop's
|
list and then add each successor to the work list. */
|
list and then add each successor to the work list. */
|
|
|
VEC_safe_push (basic_block, heap, loop->blocks, bb);
|
VEC_safe_push (basic_block, heap, loop->blocks, bb);
|
bitmap_set_bit (loop->block_bitmap, bb->index);
|
bitmap_set_bit (loop->block_bitmap, bb->index);
|
|
|
if (bb != tail_bb)
|
if (bb != tail_bb)
|
{
|
{
|
FOR_EACH_EDGE (e, ei, bb->succs)
|
FOR_EACH_EDGE (e, ei, bb->succs)
|
{
|
{
|
basic_block succ = EDGE_SUCC (bb, ei.index)->dest;
|
basic_block succ = EDGE_SUCC (bb, ei.index)->dest;
|
if (!REGNO_REG_SET_P (succ->il.rtl->global_live_at_start,
|
if (!REGNO_REG_SET_P (succ->il.rtl->global_live_at_start,
|
REGNO (loop->iter_reg)))
|
REGNO (loop->iter_reg)))
|
continue;
|
continue;
|
if (!VEC_space (basic_block, works, 1))
|
if (!VEC_space (basic_block, works, 1))
|
{
|
{
|
if (dwork)
|
if (dwork)
|
{
|
{
|
VEC_block_remove (basic_block, works, 0, dwork);
|
VEC_block_remove (basic_block, works, 0, dwork);
|
dwork = 0;
|
dwork = 0;
|
}
|
}
|
else
|
else
|
VEC_reserve (basic_block, heap, works, 1);
|
VEC_reserve (basic_block, heap, works, 1);
|
}
|
}
|
VEC_quick_push (basic_block, works, succ);
|
VEC_quick_push (basic_block, works, succ);
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
if (!loop->bad)
|
if (!loop->bad)
|
{
|
{
|
/* Make sure we only have one entry point. */
|
/* Make sure we only have one entry point. */
|
if (EDGE_COUNT (loop->head->preds) == 2)
|
if (EDGE_COUNT (loop->head->preds) == 2)
|
{
|
{
|
loop->predecessor = EDGE_PRED (loop->head, 0)->src;
|
loop->predecessor = EDGE_PRED (loop->head, 0)->src;
|
if (loop->predecessor == loop->tail)
|
if (loop->predecessor == loop->tail)
|
/* We wanted the other predecessor. */
|
/* We wanted the other predecessor. */
|
loop->predecessor = EDGE_PRED (loop->head, 1)->src;
|
loop->predecessor = EDGE_PRED (loop->head, 1)->src;
|
|
|
/* We can only place a loop insn on a fall through edge of a
|
/* We can only place a loop insn on a fall through edge of a
|
single exit block. */
|
single exit block. */
|
if (EDGE_COUNT (loop->predecessor->succs) != 1
|
if (EDGE_COUNT (loop->predecessor->succs) != 1
|
|| !(EDGE_SUCC (loop->predecessor, 0)->flags & EDGE_FALLTHRU)
|
|| !(EDGE_SUCC (loop->predecessor, 0)->flags & EDGE_FALLTHRU)
|
/* If loop->predecessor is in loop, loop->head is not really
|
/* If loop->predecessor is in loop, loop->head is not really
|
the head of the loop. */
|
the head of the loop. */
|
|| bfin_bb_in_loop (loop, loop->predecessor))
|
|| bfin_bb_in_loop (loop, loop->predecessor))
|
loop->predecessor = NULL;
|
loop->predecessor = NULL;
|
}
|
}
|
|
|
if (loop->predecessor == NULL)
|
if (loop->predecessor == NULL)
|
{
|
{
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file, ";; loop has bad predecessor\n");
|
fprintf (dump_file, ";; loop has bad predecessor\n");
|
loop->bad = 1;
|
loop->bad = 1;
|
}
|
}
|
}
|
}
|
|
|
#ifdef ENABLE_CHECKING
|
#ifdef ENABLE_CHECKING
|
/* Make sure nothing jumps into this loop. This shouldn't happen as we
|
/* Make sure nothing jumps into this loop. This shouldn't happen as we
|
wouldn't have generated the counted loop patterns in such a case.
|
wouldn't have generated the counted loop patterns in such a case.
|
However, this test must be done after the test above to detect loops
|
However, this test must be done after the test above to detect loops
|
with invalid headers. */
|
with invalid headers. */
|
if (!loop->bad)
|
if (!loop->bad)
|
for (dwork = 0; VEC_iterate (basic_block, loop->blocks, dwork, bb); dwork++)
|
for (dwork = 0; VEC_iterate (basic_block, loop->blocks, dwork, bb); dwork++)
|
{
|
{
|
edge e;
|
edge e;
|
edge_iterator ei;
|
edge_iterator ei;
|
if (bb == loop->head)
|
if (bb == loop->head)
|
continue;
|
continue;
|
FOR_EACH_EDGE (e, ei, bb->preds)
|
FOR_EACH_EDGE (e, ei, bb->preds)
|
{
|
{
|
basic_block pred = EDGE_PRED (bb, ei.index)->src;
|
basic_block pred = EDGE_PRED (bb, ei.index)->src;
|
if (!bfin_bb_in_loop (loop, pred))
|
if (!bfin_bb_in_loop (loop, pred))
|
abort ();
|
abort ();
|
}
|
}
|
}
|
}
|
#endif
|
#endif
|
VEC_free (basic_block, heap, works);
|
VEC_free (basic_block, heap, works);
|
}
|
}
|
|
|
static void
|
static void
|
bfin_reorg_loops (FILE *dump_file)
|
bfin_reorg_loops (FILE *dump_file)
|
{
|
{
|
bitmap_obstack stack;
|
bitmap_obstack stack;
|
bitmap tmp_bitmap;
|
bitmap tmp_bitmap;
|
basic_block bb;
|
basic_block bb;
|
loop_info loops = NULL;
|
loop_info loops = NULL;
|
loop_info loop;
|
loop_info loop;
|
int nloops = 0;
|
int nloops = 0;
|
|
|
bitmap_obstack_initialize (&stack);
|
bitmap_obstack_initialize (&stack);
|
|
|
/* Find all the possible loop tails. This means searching for every
|
/* Find all the possible loop tails. This means searching for every
|
loop_end instruction. For each one found, create a loop_info
|
loop_end instruction. For each one found, create a loop_info
|
structure and add the head block to the work list. */
|
structure and add the head block to the work list. */
|
FOR_EACH_BB (bb)
|
FOR_EACH_BB (bb)
|
{
|
{
|
rtx tail = BB_END (bb);
|
rtx tail = BB_END (bb);
|
|
|
while (GET_CODE (tail) == NOTE)
|
while (GET_CODE (tail) == NOTE)
|
tail = PREV_INSN (tail);
|
tail = PREV_INSN (tail);
|
|
|
bb->aux = NULL;
|
bb->aux = NULL;
|
|
|
if (INSN_P (tail) && recog_memoized (tail) == CODE_FOR_loop_end)
|
if (INSN_P (tail) && recog_memoized (tail) == CODE_FOR_loop_end)
|
{
|
{
|
/* A possible loop end */
|
/* A possible loop end */
|
|
|
loop = XNEW (struct loop_info);
|
loop = XNEW (struct loop_info);
|
loop->next = loops;
|
loop->next = loops;
|
loops = loop;
|
loops = loop;
|
loop->loop_no = nloops++;
|
loop->loop_no = nloops++;
|
loop->blocks = VEC_alloc (basic_block, heap, 20);
|
loop->blocks = VEC_alloc (basic_block, heap, 20);
|
loop->block_bitmap = BITMAP_ALLOC (&stack);
|
loop->block_bitmap = BITMAP_ALLOC (&stack);
|
bb->aux = loop;
|
bb->aux = loop;
|
|
|
if (dump_file)
|
if (dump_file)
|
{
|
{
|
fprintf (dump_file, ";; potential loop %d ending at\n",
|
fprintf (dump_file, ";; potential loop %d ending at\n",
|
loop->loop_no);
|
loop->loop_no);
|
print_rtl_single (dump_file, tail);
|
print_rtl_single (dump_file, tail);
|
}
|
}
|
|
|
bfin_discover_loop (loop, bb, tail);
|
bfin_discover_loop (loop, bb, tail);
|
}
|
}
|
}
|
}
|
|
|
tmp_bitmap = BITMAP_ALLOC (&stack);
|
tmp_bitmap = BITMAP_ALLOC (&stack);
|
/* Compute loop nestings. */
|
/* Compute loop nestings. */
|
for (loop = loops; loop; loop = loop->next)
|
for (loop = loops; loop; loop = loop->next)
|
{
|
{
|
loop_info other;
|
loop_info other;
|
if (loop->bad)
|
if (loop->bad)
|
continue;
|
continue;
|
|
|
for (other = loop->next; other; other = other->next)
|
for (other = loop->next; other; other = other->next)
|
{
|
{
|
if (other->bad)
|
if (other->bad)
|
continue;
|
continue;
|
|
|
bitmap_and (tmp_bitmap, other->block_bitmap, loop->block_bitmap);
|
bitmap_and (tmp_bitmap, other->block_bitmap, loop->block_bitmap);
|
if (bitmap_empty_p (tmp_bitmap))
|
if (bitmap_empty_p (tmp_bitmap))
|
continue;
|
continue;
|
if (bitmap_equal_p (tmp_bitmap, other->block_bitmap))
|
if (bitmap_equal_p (tmp_bitmap, other->block_bitmap))
|
{
|
{
|
other->outer = loop;
|
other->outer = loop;
|
VEC_safe_push (loop_info, heap, loop->loops, other);
|
VEC_safe_push (loop_info, heap, loop->loops, other);
|
}
|
}
|
else if (bitmap_equal_p (tmp_bitmap, loop->block_bitmap))
|
else if (bitmap_equal_p (tmp_bitmap, loop->block_bitmap))
|
{
|
{
|
loop->outer = other;
|
loop->outer = other;
|
VEC_safe_push (loop_info, heap, other->loops, loop);
|
VEC_safe_push (loop_info, heap, other->loops, loop);
|
}
|
}
|
else
|
else
|
{
|
{
|
loop->bad = other->bad = 1;
|
loop->bad = other->bad = 1;
|
}
|
}
|
}
|
}
|
}
|
}
|
BITMAP_FREE (tmp_bitmap);
|
BITMAP_FREE (tmp_bitmap);
|
|
|
if (dump_file)
|
if (dump_file)
|
{
|
{
|
fprintf (dump_file, ";; All loops found:\n\n");
|
fprintf (dump_file, ";; All loops found:\n\n");
|
bfin_dump_loops (loops);
|
bfin_dump_loops (loops);
|
}
|
}
|
|
|
/* Now apply the optimizations. */
|
/* Now apply the optimizations. */
|
for (loop = loops; loop; loop = loop->next)
|
for (loop = loops; loop; loop = loop->next)
|
bfin_optimize_loop (loop);
|
bfin_optimize_loop (loop);
|
|
|
if (dump_file)
|
if (dump_file)
|
{
|
{
|
fprintf (dump_file, ";; After hardware loops optimization:\n\n");
|
fprintf (dump_file, ";; After hardware loops optimization:\n\n");
|
bfin_dump_loops (loops);
|
bfin_dump_loops (loops);
|
}
|
}
|
|
|
/* Free up the loop structures */
|
/* Free up the loop structures */
|
while (loops)
|
while (loops)
|
{
|
{
|
loop = loops;
|
loop = loops;
|
loops = loop->next;
|
loops = loop->next;
|
VEC_free (loop_info, heap, loop->loops);
|
VEC_free (loop_info, heap, loop->loops);
|
VEC_free (basic_block, heap, loop->blocks);
|
VEC_free (basic_block, heap, loop->blocks);
|
BITMAP_FREE (loop->block_bitmap);
|
BITMAP_FREE (loop->block_bitmap);
|
XDELETE (loop);
|
XDELETE (loop);
|
}
|
}
|
|
|
if (dump_file)
|
if (dump_file)
|
print_rtl (dump_file, get_insns ());
|
print_rtl (dump_file, get_insns ());
|
}
|
}
|
|
|
|
|
/* We use the machine specific reorg pass for emitting CSYNC instructions
|
/* We use the machine specific reorg pass for emitting CSYNC instructions
|
after conditional branches as needed.
|
after conditional branches as needed.
|
|
|
The Blackfin is unusual in that a code sequence like
|
The Blackfin is unusual in that a code sequence like
|
if cc jump label
|
if cc jump label
|
r0 = (p0)
|
r0 = (p0)
|
may speculatively perform the load even if the condition isn't true. This
|
may speculatively perform the load even if the condition isn't true. This
|
happens for a branch that is predicted not taken, because the pipeline
|
happens for a branch that is predicted not taken, because the pipeline
|
isn't flushed or stalled, so the early stages of the following instructions,
|
isn't flushed or stalled, so the early stages of the following instructions,
|
which perform the memory reference, are allowed to execute before the
|
which perform the memory reference, are allowed to execute before the
|
jump condition is evaluated.
|
jump condition is evaluated.
|
Therefore, we must insert additional instructions in all places where this
|
Therefore, we must insert additional instructions in all places where this
|
could lead to incorrect behavior. The manual recommends CSYNC, while
|
could lead to incorrect behavior. The manual recommends CSYNC, while
|
VDSP seems to use NOPs (even though its corresponding compiler option is
|
VDSP seems to use NOPs (even though its corresponding compiler option is
|
named CSYNC).
|
named CSYNC).
|
|
|
When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
|
When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
|
When optimizing for size, we turn the branch into a predicted taken one.
|
When optimizing for size, we turn the branch into a predicted taken one.
|
This may be slower due to mispredicts, but saves code size. */
|
This may be slower due to mispredicts, but saves code size. */
|
|
|
static void
|
static void
|
bfin_reorg (void)
|
bfin_reorg (void)
|
{
|
{
|
rtx insn, last_condjump = NULL_RTX;
|
rtx insn, last_condjump = NULL_RTX;
|
int cycles_since_jump = INT_MAX;
|
int cycles_since_jump = INT_MAX;
|
|
|
/* Doloop optimization */
|
/* Doloop optimization */
|
if (cfun->machine->has_hardware_loops)
|
if (cfun->machine->has_hardware_loops)
|
bfin_reorg_loops (dump_file);
|
bfin_reorg_loops (dump_file);
|
|
|
if (! TARGET_SPECLD_ANOMALY && ! TARGET_CSYNC_ANOMALY)
|
if (! TARGET_SPECLD_ANOMALY && ! TARGET_CSYNC_ANOMALY)
|
return;
|
return;
|
|
|
/* First pass: find predicted-false branches; if something after them
|
/* First pass: find predicted-false branches; if something after them
|
needs nops, insert them or change the branch to predict true. */
|
needs nops, insert them or change the branch to predict true. */
|
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
{
|
{
|
rtx pat;
|
rtx pat;
|
|
|
if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
|
if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
|
continue;
|
continue;
|
|
|
pat = PATTERN (insn);
|
pat = PATTERN (insn);
|
if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
|
if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
|
|| GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
|
|| GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
|
|| GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
|
|| GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
|
continue;
|
continue;
|
|
|
if (JUMP_P (insn))
|
if (JUMP_P (insn))
|
{
|
{
|
if (any_condjump_p (insn)
|
if (any_condjump_p (insn)
|
&& ! cbranch_predicted_taken_p (insn))
|
&& ! cbranch_predicted_taken_p (insn))
|
{
|
{
|
last_condjump = insn;
|
last_condjump = insn;
|
cycles_since_jump = 0;
|
cycles_since_jump = 0;
|
}
|
}
|
else
|
else
|
cycles_since_jump = INT_MAX;
|
cycles_since_jump = INT_MAX;
|
}
|
}
|
else if (INSN_P (insn))
|
else if (INSN_P (insn))
|
{
|
{
|
enum attr_type type = get_attr_type (insn);
|
enum attr_type type = get_attr_type (insn);
|
int delay_needed = 0;
|
int delay_needed = 0;
|
if (cycles_since_jump < INT_MAX)
|
if (cycles_since_jump < INT_MAX)
|
cycles_since_jump++;
|
cycles_since_jump++;
|
|
|
if (type == TYPE_MCLD && TARGET_SPECLD_ANOMALY)
|
if (type == TYPE_MCLD && TARGET_SPECLD_ANOMALY)
|
{
|
{
|
rtx pat = single_set (insn);
|
rtx pat = single_set (insn);
|
if (may_trap_p (SET_SRC (pat)))
|
if (may_trap_p (SET_SRC (pat)))
|
delay_needed = 3;
|
delay_needed = 3;
|
}
|
}
|
else if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
|
else if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
|
delay_needed = 4;
|
delay_needed = 4;
|
|
|
if (delay_needed > cycles_since_jump)
|
if (delay_needed > cycles_since_jump)
|
{
|
{
|
rtx pat;
|
rtx pat;
|
int num_clobbers;
|
int num_clobbers;
|
rtx *op = recog_data.operand;
|
rtx *op = recog_data.operand;
|
|
|
delay_needed -= cycles_since_jump;
|
delay_needed -= cycles_since_jump;
|
|
|
extract_insn (last_condjump);
|
extract_insn (last_condjump);
|
if (optimize_size)
|
if (optimize_size)
|
{
|
{
|
pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
|
pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
|
op[3]);
|
op[3]);
|
cycles_since_jump = INT_MAX;
|
cycles_since_jump = INT_MAX;
|
}
|
}
|
else
|
else
|
/* Do not adjust cycles_since_jump in this case, so that
|
/* Do not adjust cycles_since_jump in this case, so that
|
we'll increase the number of NOPs for a subsequent insn
|
we'll increase the number of NOPs for a subsequent insn
|
if necessary. */
|
if necessary. */
|
pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
|
pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
|
GEN_INT (delay_needed));
|
GEN_INT (delay_needed));
|
PATTERN (last_condjump) = pat;
|
PATTERN (last_condjump) = pat;
|
INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
|
INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
|
}
|
}
|
}
|
}
|
}
|
}
|
/* Second pass: for predicted-true branches, see if anything at the
|
/* Second pass: for predicted-true branches, see if anything at the
|
branch destination needs extra nops. */
|
branch destination needs extra nops. */
|
if (! TARGET_CSYNC_ANOMALY)
|
if (! TARGET_CSYNC_ANOMALY)
|
return;
|
return;
|
|
|
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
{
|
{
|
if (JUMP_P (insn)
|
if (JUMP_P (insn)
|
&& any_condjump_p (insn)
|
&& any_condjump_p (insn)
|
&& (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
|
&& (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
|
|| cbranch_predicted_taken_p (insn)))
|
|| cbranch_predicted_taken_p (insn)))
|
{
|
{
|
rtx target = JUMP_LABEL (insn);
|
rtx target = JUMP_LABEL (insn);
|
rtx label = target;
|
rtx label = target;
|
cycles_since_jump = 0;
|
cycles_since_jump = 0;
|
for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
|
for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
|
{
|
{
|
rtx pat;
|
rtx pat;
|
|
|
if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
|
if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
|
continue;
|
continue;
|
|
|
pat = PATTERN (target);
|
pat = PATTERN (target);
|
if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
|
if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
|
|| GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
|
|| GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
|
|| GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
|
|| GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
|
continue;
|
continue;
|
|
|
if (INSN_P (target))
|
if (INSN_P (target))
|
{
|
{
|
enum attr_type type = get_attr_type (target);
|
enum attr_type type = get_attr_type (target);
|
int delay_needed = 0;
|
int delay_needed = 0;
|
if (cycles_since_jump < INT_MAX)
|
if (cycles_since_jump < INT_MAX)
|
cycles_since_jump++;
|
cycles_since_jump++;
|
|
|
if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
|
if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
|
delay_needed = 2;
|
delay_needed = 2;
|
|
|
if (delay_needed > cycles_since_jump)
|
if (delay_needed > cycles_since_jump)
|
{
|
{
|
rtx prev = prev_real_insn (label);
|
rtx prev = prev_real_insn (label);
|
delay_needed -= cycles_since_jump;
|
delay_needed -= cycles_since_jump;
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file, "Adding %d nops after %d\n",
|
fprintf (dump_file, "Adding %d nops after %d\n",
|
delay_needed, INSN_UID (label));
|
delay_needed, INSN_UID (label));
|
if (JUMP_P (prev)
|
if (JUMP_P (prev)
|
&& INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
|
&& INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
|
{
|
{
|
rtx x;
|
rtx x;
|
HOST_WIDE_INT v;
|
HOST_WIDE_INT v;
|
|
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file,
|
fprintf (dump_file,
|
"Reducing nops on insn %d.\n",
|
"Reducing nops on insn %d.\n",
|
INSN_UID (prev));
|
INSN_UID (prev));
|
x = PATTERN (prev);
|
x = PATTERN (prev);
|
x = XVECEXP (x, 0, 1);
|
x = XVECEXP (x, 0, 1);
|
v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
|
v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
|
XVECEXP (x, 0, 0) = GEN_INT (v);
|
XVECEXP (x, 0, 0) = GEN_INT (v);
|
}
|
}
|
while (delay_needed-- > 0)
|
while (delay_needed-- > 0)
|
emit_insn_after (gen_nop (), label);
|
emit_insn_after (gen_nop (), label);
|
break;
|
break;
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
/* Handle interrupt_handler, exception_handler and nmi_handler function
|
/* Handle interrupt_handler, exception_handler and nmi_handler function
|
attributes; arguments as in struct attribute_spec.handler. */
|
attributes; arguments as in struct attribute_spec.handler. */
|
|
|
static tree
|
static tree
|
handle_int_attribute (tree *node, tree name,
|
handle_int_attribute (tree *node, tree name,
|
tree args ATTRIBUTE_UNUSED,
|
tree args ATTRIBUTE_UNUSED,
|
int flags ATTRIBUTE_UNUSED,
|
int flags ATTRIBUTE_UNUSED,
|
bool *no_add_attrs)
|
bool *no_add_attrs)
|
{
|
{
|
tree x = *node;
|
tree x = *node;
|
if (TREE_CODE (x) == FUNCTION_DECL)
|
if (TREE_CODE (x) == FUNCTION_DECL)
|
x = TREE_TYPE (x);
|
x = TREE_TYPE (x);
|
|
|
if (TREE_CODE (x) != FUNCTION_TYPE)
|
if (TREE_CODE (x) != FUNCTION_TYPE)
|
{
|
{
|
warning (OPT_Wattributes, "%qs attribute only applies to functions",
|
warning (OPT_Wattributes, "%qs attribute only applies to functions",
|
IDENTIFIER_POINTER (name));
|
IDENTIFIER_POINTER (name));
|
*no_add_attrs = true;
|
*no_add_attrs = true;
|
}
|
}
|
else if (funkind (x) != SUBROUTINE)
|
else if (funkind (x) != SUBROUTINE)
|
error ("multiple function type attributes specified");
|
error ("multiple function type attributes specified");
|
|
|
return NULL_TREE;
|
return NULL_TREE;
|
}
|
}
|
|
|
/* Return 0 if the attributes for two types are incompatible, 1 if they
|
/* Return 0 if the attributes for two types are incompatible, 1 if they
|
are compatible, and 2 if they are nearly compatible (which causes a
|
are compatible, and 2 if they are nearly compatible (which causes a
|
warning to be generated). */
|
warning to be generated). */
|
|
|
static int
|
static int
|
bfin_comp_type_attributes (tree type1, tree type2)
|
bfin_comp_type_attributes (tree type1, tree type2)
|
{
|
{
|
e_funkind kind1, kind2;
|
e_funkind kind1, kind2;
|
|
|
if (TREE_CODE (type1) != FUNCTION_TYPE)
|
if (TREE_CODE (type1) != FUNCTION_TYPE)
|
return 1;
|
return 1;
|
|
|
kind1 = funkind (type1);
|
kind1 = funkind (type1);
|
kind2 = funkind (type2);
|
kind2 = funkind (type2);
|
|
|
if (kind1 != kind2)
|
if (kind1 != kind2)
|
return 0;
|
return 0;
|
|
|
/* Check for mismatched modifiers */
|
/* Check for mismatched modifiers */
|
if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
|
if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
|
!= !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
|
!= !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
|
return 0;
|
return 0;
|
|
|
if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
|
if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
|
!= !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
|
!= !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
|
return 0;
|
return 0;
|
|
|
if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
|
if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
|
!= !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
|
!= !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
|
return 0;
|
return 0;
|
|
|
if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
|
if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
|
!= !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
|
!= !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
|
return 0;
|
return 0;
|
|
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Handle a "longcall" or "shortcall" attribute; arguments as in
|
/* Handle a "longcall" or "shortcall" attribute; arguments as in
|
struct attribute_spec.handler. */
|
struct attribute_spec.handler. */
|
|
|
static tree
|
static tree
|
bfin_handle_longcall_attribute (tree *node, tree name,
|
bfin_handle_longcall_attribute (tree *node, tree name,
|
tree args ATTRIBUTE_UNUSED,
|
tree args ATTRIBUTE_UNUSED,
|
int flags ATTRIBUTE_UNUSED,
|
int flags ATTRIBUTE_UNUSED,
|
bool *no_add_attrs)
|
bool *no_add_attrs)
|
{
|
{
|
if (TREE_CODE (*node) != FUNCTION_TYPE
|
if (TREE_CODE (*node) != FUNCTION_TYPE
|
&& TREE_CODE (*node) != FIELD_DECL
|
&& TREE_CODE (*node) != FIELD_DECL
|
&& TREE_CODE (*node) != TYPE_DECL)
|
&& TREE_CODE (*node) != TYPE_DECL)
|
{
|
{
|
warning (OPT_Wattributes, "`%s' attribute only applies to functions",
|
warning (OPT_Wattributes, "`%s' attribute only applies to functions",
|
IDENTIFIER_POINTER (name));
|
IDENTIFIER_POINTER (name));
|
*no_add_attrs = true;
|
*no_add_attrs = true;
|
}
|
}
|
|
|
if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
|
if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
|
&& lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
|
&& lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
|
|| (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
|
|| (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
|
&& lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
|
&& lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
|
{
|
{
|
warning (OPT_Wattributes,
|
warning (OPT_Wattributes,
|
"can't apply both longcall and shortcall attributes to the same function");
|
"can't apply both longcall and shortcall attributes to the same function");
|
*no_add_attrs = true;
|
*no_add_attrs = true;
|
}
|
}
|
|
|
return NULL_TREE;
|
return NULL_TREE;
|
}
|
}
|
|
|
/* Table of valid machine attributes. */
|
/* Table of valid machine attributes. */
|
const struct attribute_spec bfin_attribute_table[] =
|
const struct attribute_spec bfin_attribute_table[] =
|
{
|
{
|
/* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
|
/* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
|
{ "interrupt_handler", 0, 0, false, true, true, handle_int_attribute },
|
{ "interrupt_handler", 0, 0, false, true, true, handle_int_attribute },
|
{ "exception_handler", 0, 0, false, true, true, handle_int_attribute },
|
{ "exception_handler", 0, 0, false, true, true, handle_int_attribute },
|
{ "nmi_handler", 0, 0, false, true, true, handle_int_attribute },
|
{ "nmi_handler", 0, 0, false, true, true, handle_int_attribute },
|
{ "nesting", 0, 0, false, true, true, NULL },
|
{ "nesting", 0, 0, false, true, true, NULL },
|
{ "kspisusp", 0, 0, false, true, true, NULL },
|
{ "kspisusp", 0, 0, false, true, true, NULL },
|
{ "saveall", 0, 0, false, true, true, NULL },
|
{ "saveall", 0, 0, false, true, true, NULL },
|
{ "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
|
{ "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
|
{ "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
|
{ "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
|
{ NULL, 0, 0, false, false, false, NULL }
|
{ NULL, 0, 0, false, false, false, NULL }
|
};
|
};
|
|
|
/* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
|
/* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
|
tell the assembler to generate pointers to function descriptors in
|
tell the assembler to generate pointers to function descriptors in
|
some cases. */
|
some cases. */
|
|
|
static bool
|
static bool
|
bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
|
bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
|
{
|
{
|
if (TARGET_FDPIC && size == UNITS_PER_WORD)
|
if (TARGET_FDPIC && size == UNITS_PER_WORD)
|
{
|
{
|
if (GET_CODE (value) == SYMBOL_REF
|
if (GET_CODE (value) == SYMBOL_REF
|
&& SYMBOL_REF_FUNCTION_P (value))
|
&& SYMBOL_REF_FUNCTION_P (value))
|
{
|
{
|
fputs ("\t.picptr\tfuncdesc(", asm_out_file);
|
fputs ("\t.picptr\tfuncdesc(", asm_out_file);
|
output_addr_const (asm_out_file, value);
|
output_addr_const (asm_out_file, value);
|
fputs (")\n", asm_out_file);
|
fputs (")\n", asm_out_file);
|
return true;
|
return true;
|
}
|
}
|
if (!aligned_p)
|
if (!aligned_p)
|
{
|
{
|
/* We've set the unaligned SI op to NULL, so we always have to
|
/* We've set the unaligned SI op to NULL, so we always have to
|
handle the unaligned case here. */
|
handle the unaligned case here. */
|
assemble_integer_with_op ("\t.4byte\t", value);
|
assemble_integer_with_op ("\t.4byte\t", value);
|
return true;
|
return true;
|
}
|
}
|
}
|
}
|
return default_assemble_integer (value, size, aligned_p);
|
return default_assemble_integer (value, size, aligned_p);
|
}
|
}
|
|
|
/* Output the assembler code for a thunk function. THUNK_DECL is the
|
/* Output the assembler code for a thunk function. THUNK_DECL is the
|
declaration for the thunk function itself, FUNCTION is the decl for
|
declaration for the thunk function itself, FUNCTION is the decl for
|
the target function. DELTA is an immediate constant offset to be
|
the target function. DELTA is an immediate constant offset to be
|
added to THIS. If VCALL_OFFSET is nonzero, the word at
|
added to THIS. If VCALL_OFFSET is nonzero, the word at
|
*(*this + vcall_offset) should be added to THIS. */
|
*(*this + vcall_offset) should be added to THIS. */
|
|
|
static void
|
static void
|
bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
|
bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
|
tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
|
tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
|
HOST_WIDE_INT vcall_offset, tree function)
|
HOST_WIDE_INT vcall_offset, tree function)
|
{
|
{
|
rtx xops[3];
|
rtx xops[3];
|
/* The this parameter is passed as the first argument. */
|
/* The this parameter is passed as the first argument. */
|
rtx this = gen_rtx_REG (Pmode, REG_R0);
|
rtx this = gen_rtx_REG (Pmode, REG_R0);
|
|
|
/* Adjust the this parameter by a fixed constant. */
|
/* Adjust the this parameter by a fixed constant. */
|
if (delta)
|
if (delta)
|
{
|
{
|
xops[1] = this;
|
xops[1] = this;
|
if (delta >= -64 && delta <= 63)
|
if (delta >= -64 && delta <= 63)
|
{
|
{
|
xops[0] = GEN_INT (delta);
|
xops[0] = GEN_INT (delta);
|
output_asm_insn ("%1 += %0;", xops);
|
output_asm_insn ("%1 += %0;", xops);
|
}
|
}
|
else if (delta >= -128 && delta < -64)
|
else if (delta >= -128 && delta < -64)
|
{
|
{
|
xops[0] = GEN_INT (delta + 64);
|
xops[0] = GEN_INT (delta + 64);
|
output_asm_insn ("%1 += -64; %1 += %0;", xops);
|
output_asm_insn ("%1 += -64; %1 += %0;", xops);
|
}
|
}
|
else if (delta > 63 && delta <= 126)
|
else if (delta > 63 && delta <= 126)
|
{
|
{
|
xops[0] = GEN_INT (delta - 63);
|
xops[0] = GEN_INT (delta - 63);
|
output_asm_insn ("%1 += 63; %1 += %0;", xops);
|
output_asm_insn ("%1 += 63; %1 += %0;", xops);
|
}
|
}
|
else
|
else
|
{
|
{
|
xops[0] = GEN_INT (delta);
|
xops[0] = GEN_INT (delta);
|
output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
|
output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
|
}
|
}
|
}
|
}
|
|
|
/* Adjust the this parameter by a value stored in the vtable. */
|
/* Adjust the this parameter by a value stored in the vtable. */
|
if (vcall_offset)
|
if (vcall_offset)
|
{
|
{
|
rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
|
rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
|
rtx tmp = gen_rtx_REG (Pmode, REG_R2);
|
rtx tmp = gen_rtx_REG (Pmode, REG_R2);
|
|
|
xops[1] = tmp;
|
xops[1] = tmp;
|
xops[2] = p2tmp;
|
xops[2] = p2tmp;
|
output_asm_insn ("%2 = r0; %2 = [%2];", xops);
|
output_asm_insn ("%2 = r0; %2 = [%2];", xops);
|
|
|
/* Adjust the this parameter. */
|
/* Adjust the this parameter. */
|
xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
|
xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
|
if (!memory_operand (xops[0], Pmode))
|
if (!memory_operand (xops[0], Pmode))
|
{
|
{
|
rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
|
rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
|
xops[0] = GEN_INT (vcall_offset);
|
xops[0] = GEN_INT (vcall_offset);
|
xops[1] = tmp2;
|
xops[1] = tmp2;
|
output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
|
output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
|
xops[0] = gen_rtx_MEM (Pmode, p2tmp);
|
xops[0] = gen_rtx_MEM (Pmode, p2tmp);
|
}
|
}
|
xops[2] = this;
|
xops[2] = this;
|
output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
|
output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
|
}
|
}
|
|
|
xops[0] = XEXP (DECL_RTL (function), 0);
|
xops[0] = XEXP (DECL_RTL (function), 0);
|
if (1 || !flag_pic || (*targetm.binds_local_p) (function))
|
if (1 || !flag_pic || (*targetm.binds_local_p) (function))
|
output_asm_insn ("jump.l\t%P0", xops);
|
output_asm_insn ("jump.l\t%P0", xops);
|
}
|
}
|
|
|
/* Codes for all the Blackfin builtins. */
|
/* Codes for all the Blackfin builtins. */
|
enum bfin_builtins
|
enum bfin_builtins
|
{
|
{
|
BFIN_BUILTIN_CSYNC,
|
BFIN_BUILTIN_CSYNC,
|
BFIN_BUILTIN_SSYNC,
|
BFIN_BUILTIN_SSYNC,
|
BFIN_BUILTIN_COMPOSE_2X16,
|
BFIN_BUILTIN_COMPOSE_2X16,
|
BFIN_BUILTIN_EXTRACTLO,
|
BFIN_BUILTIN_EXTRACTLO,
|
BFIN_BUILTIN_EXTRACTHI,
|
BFIN_BUILTIN_EXTRACTHI,
|
|
|
BFIN_BUILTIN_SSADD_2X16,
|
BFIN_BUILTIN_SSADD_2X16,
|
BFIN_BUILTIN_SSSUB_2X16,
|
BFIN_BUILTIN_SSSUB_2X16,
|
BFIN_BUILTIN_SSADDSUB_2X16,
|
BFIN_BUILTIN_SSADDSUB_2X16,
|
BFIN_BUILTIN_SSSUBADD_2X16,
|
BFIN_BUILTIN_SSSUBADD_2X16,
|
BFIN_BUILTIN_MULT_2X16,
|
BFIN_BUILTIN_MULT_2X16,
|
BFIN_BUILTIN_MULTR_2X16,
|
BFIN_BUILTIN_MULTR_2X16,
|
BFIN_BUILTIN_NEG_2X16,
|
BFIN_BUILTIN_NEG_2X16,
|
BFIN_BUILTIN_ABS_2X16,
|
BFIN_BUILTIN_ABS_2X16,
|
BFIN_BUILTIN_MIN_2X16,
|
BFIN_BUILTIN_MIN_2X16,
|
BFIN_BUILTIN_MAX_2X16,
|
BFIN_BUILTIN_MAX_2X16,
|
|
|
BFIN_BUILTIN_SSADD_1X16,
|
BFIN_BUILTIN_SSADD_1X16,
|
BFIN_BUILTIN_SSSUB_1X16,
|
BFIN_BUILTIN_SSSUB_1X16,
|
BFIN_BUILTIN_MULT_1X16,
|
BFIN_BUILTIN_MULT_1X16,
|
BFIN_BUILTIN_MULTR_1X16,
|
BFIN_BUILTIN_MULTR_1X16,
|
BFIN_BUILTIN_NORM_1X16,
|
BFIN_BUILTIN_NORM_1X16,
|
BFIN_BUILTIN_NEG_1X16,
|
BFIN_BUILTIN_NEG_1X16,
|
BFIN_BUILTIN_ABS_1X16,
|
BFIN_BUILTIN_ABS_1X16,
|
BFIN_BUILTIN_MIN_1X16,
|
BFIN_BUILTIN_MIN_1X16,
|
BFIN_BUILTIN_MAX_1X16,
|
BFIN_BUILTIN_MAX_1X16,
|
|
|
BFIN_BUILTIN_DIFFHL_2X16,
|
BFIN_BUILTIN_DIFFHL_2X16,
|
BFIN_BUILTIN_DIFFLH_2X16,
|
BFIN_BUILTIN_DIFFLH_2X16,
|
|
|
BFIN_BUILTIN_SSADD_1X32,
|
BFIN_BUILTIN_SSADD_1X32,
|
BFIN_BUILTIN_SSSUB_1X32,
|
BFIN_BUILTIN_SSSUB_1X32,
|
BFIN_BUILTIN_NORM_1X32,
|
BFIN_BUILTIN_NORM_1X32,
|
BFIN_BUILTIN_NEG_1X32,
|
BFIN_BUILTIN_NEG_1X32,
|
BFIN_BUILTIN_MIN_1X32,
|
BFIN_BUILTIN_MIN_1X32,
|
BFIN_BUILTIN_MAX_1X32,
|
BFIN_BUILTIN_MAX_1X32,
|
BFIN_BUILTIN_MULT_1X32,
|
BFIN_BUILTIN_MULT_1X32,
|
|
|
BFIN_BUILTIN_MULHISILL,
|
BFIN_BUILTIN_MULHISILL,
|
BFIN_BUILTIN_MULHISILH,
|
BFIN_BUILTIN_MULHISILH,
|
BFIN_BUILTIN_MULHISIHL,
|
BFIN_BUILTIN_MULHISIHL,
|
BFIN_BUILTIN_MULHISIHH,
|
BFIN_BUILTIN_MULHISIHH,
|
|
|
BFIN_BUILTIN_LSHIFT_1X16,
|
BFIN_BUILTIN_LSHIFT_1X16,
|
BFIN_BUILTIN_LSHIFT_2X16,
|
BFIN_BUILTIN_LSHIFT_2X16,
|
BFIN_BUILTIN_SSASHIFT_1X16,
|
BFIN_BUILTIN_SSASHIFT_1X16,
|
BFIN_BUILTIN_SSASHIFT_2X16,
|
BFIN_BUILTIN_SSASHIFT_2X16,
|
|
|
BFIN_BUILTIN_CPLX_MUL_16,
|
BFIN_BUILTIN_CPLX_MUL_16,
|
BFIN_BUILTIN_CPLX_MAC_16,
|
BFIN_BUILTIN_CPLX_MAC_16,
|
BFIN_BUILTIN_CPLX_MSU_16,
|
BFIN_BUILTIN_CPLX_MSU_16,
|
|
|
BFIN_BUILTIN_MAX
|
BFIN_BUILTIN_MAX
|
};
|
};
|
|
|
#define def_builtin(NAME, TYPE, CODE) \
|
#define def_builtin(NAME, TYPE, CODE) \
|
do { \
|
do { \
|
lang_hooks.builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
|
lang_hooks.builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
|
NULL, NULL_TREE); \
|
NULL, NULL_TREE); \
|
} while (0)
|
} while (0)
|
|
|
/* Set up all builtin functions for this target. */
|
/* Set up all builtin functions for this target. */
|
static void
|
static void
|
bfin_init_builtins (void)
|
bfin_init_builtins (void)
|
{
|
{
|
tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
|
tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
|
tree void_ftype_void
|
tree void_ftype_void
|
= build_function_type (void_type_node, void_list_node);
|
= build_function_type (void_type_node, void_list_node);
|
tree short_ftype_short
|
tree short_ftype_short
|
= build_function_type_list (short_integer_type_node, short_integer_type_node,
|
= build_function_type_list (short_integer_type_node, short_integer_type_node,
|
NULL_TREE);
|
NULL_TREE);
|
tree short_ftype_int_int
|
tree short_ftype_int_int
|
= build_function_type_list (short_integer_type_node, integer_type_node,
|
= build_function_type_list (short_integer_type_node, integer_type_node,
|
integer_type_node, NULL_TREE);
|
integer_type_node, NULL_TREE);
|
tree int_ftype_int_int
|
tree int_ftype_int_int
|
= build_function_type_list (integer_type_node, integer_type_node,
|
= build_function_type_list (integer_type_node, integer_type_node,
|
integer_type_node, NULL_TREE);
|
integer_type_node, NULL_TREE);
|
tree int_ftype_int
|
tree int_ftype_int
|
= build_function_type_list (integer_type_node, integer_type_node,
|
= build_function_type_list (integer_type_node, integer_type_node,
|
NULL_TREE);
|
NULL_TREE);
|
tree short_ftype_int
|
tree short_ftype_int
|
= build_function_type_list (short_integer_type_node, integer_type_node,
|
= build_function_type_list (short_integer_type_node, integer_type_node,
|
NULL_TREE);
|
NULL_TREE);
|
tree int_ftype_v2hi_v2hi
|
tree int_ftype_v2hi_v2hi
|
= build_function_type_list (integer_type_node, V2HI_type_node,
|
= build_function_type_list (integer_type_node, V2HI_type_node,
|
V2HI_type_node, NULL_TREE);
|
V2HI_type_node, NULL_TREE);
|
tree v2hi_ftype_v2hi_v2hi
|
tree v2hi_ftype_v2hi_v2hi
|
= build_function_type_list (V2HI_type_node, V2HI_type_node,
|
= build_function_type_list (V2HI_type_node, V2HI_type_node,
|
V2HI_type_node, NULL_TREE);
|
V2HI_type_node, NULL_TREE);
|
tree v2hi_ftype_v2hi_v2hi_v2hi
|
tree v2hi_ftype_v2hi_v2hi_v2hi
|
= build_function_type_list (V2HI_type_node, V2HI_type_node,
|
= build_function_type_list (V2HI_type_node, V2HI_type_node,
|
V2HI_type_node, V2HI_type_node, NULL_TREE);
|
V2HI_type_node, V2HI_type_node, NULL_TREE);
|
tree v2hi_ftype_int_int
|
tree v2hi_ftype_int_int
|
= build_function_type_list (V2HI_type_node, integer_type_node,
|
= build_function_type_list (V2HI_type_node, integer_type_node,
|
integer_type_node, NULL_TREE);
|
integer_type_node, NULL_TREE);
|
tree v2hi_ftype_v2hi_int
|
tree v2hi_ftype_v2hi_int
|
= build_function_type_list (V2HI_type_node, V2HI_type_node,
|
= build_function_type_list (V2HI_type_node, V2HI_type_node,
|
integer_type_node, NULL_TREE);
|
integer_type_node, NULL_TREE);
|
tree int_ftype_short_short
|
tree int_ftype_short_short
|
= build_function_type_list (integer_type_node, short_integer_type_node,
|
= build_function_type_list (integer_type_node, short_integer_type_node,
|
short_integer_type_node, NULL_TREE);
|
short_integer_type_node, NULL_TREE);
|
tree v2hi_ftype_v2hi
|
tree v2hi_ftype_v2hi
|
= build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
|
= build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
|
tree short_ftype_v2hi
|
tree short_ftype_v2hi
|
= build_function_type_list (short_integer_type_node, V2HI_type_node,
|
= build_function_type_list (short_integer_type_node, V2HI_type_node,
|
NULL_TREE);
|
NULL_TREE);
|
|
|
/* Add the remaining MMX insns with somewhat more complicated types. */
|
/* Add the remaining MMX insns with somewhat more complicated types. */
|
def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
|
def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
|
def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
|
def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
|
|
|
def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
|
def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
|
BFIN_BUILTIN_COMPOSE_2X16);
|
BFIN_BUILTIN_COMPOSE_2X16);
|
def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
|
def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
|
BFIN_BUILTIN_EXTRACTHI);
|
BFIN_BUILTIN_EXTRACTHI);
|
def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
|
def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
|
BFIN_BUILTIN_EXTRACTLO);
|
BFIN_BUILTIN_EXTRACTLO);
|
|
|
def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
|
def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
|
BFIN_BUILTIN_MIN_2X16);
|
BFIN_BUILTIN_MIN_2X16);
|
def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
|
def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
|
BFIN_BUILTIN_MAX_2X16);
|
BFIN_BUILTIN_MAX_2X16);
|
|
|
def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
|
def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
|
BFIN_BUILTIN_SSADD_2X16);
|
BFIN_BUILTIN_SSADD_2X16);
|
def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
|
def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
|
BFIN_BUILTIN_SSSUB_2X16);
|
BFIN_BUILTIN_SSSUB_2X16);
|
def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
|
def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
|
BFIN_BUILTIN_SSADDSUB_2X16);
|
BFIN_BUILTIN_SSADDSUB_2X16);
|
def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
|
def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
|
BFIN_BUILTIN_SSSUBADD_2X16);
|
BFIN_BUILTIN_SSSUBADD_2X16);
|
def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
|
def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
|
BFIN_BUILTIN_MULT_2X16);
|
BFIN_BUILTIN_MULT_2X16);
|
def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
|
def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
|
BFIN_BUILTIN_MULTR_2X16);
|
BFIN_BUILTIN_MULTR_2X16);
|
def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
|
def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
|
BFIN_BUILTIN_NEG_2X16);
|
BFIN_BUILTIN_NEG_2X16);
|
def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
|
def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
|
BFIN_BUILTIN_ABS_2X16);
|
BFIN_BUILTIN_ABS_2X16);
|
|
|
def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
|
def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
|
BFIN_BUILTIN_SSADD_1X16);
|
BFIN_BUILTIN_SSADD_1X16);
|
def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
|
def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
|
BFIN_BUILTIN_SSSUB_1X16);
|
BFIN_BUILTIN_SSSUB_1X16);
|
def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
|
def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
|
BFIN_BUILTIN_MULT_1X16);
|
BFIN_BUILTIN_MULT_1X16);
|
def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
|
def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
|
BFIN_BUILTIN_MULTR_1X16);
|
BFIN_BUILTIN_MULTR_1X16);
|
def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
|
def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
|
BFIN_BUILTIN_NEG_1X16);
|
BFIN_BUILTIN_NEG_1X16);
|
def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
|
def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
|
BFIN_BUILTIN_ABS_1X16);
|
BFIN_BUILTIN_ABS_1X16);
|
def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
|
def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
|
BFIN_BUILTIN_NORM_1X16);
|
BFIN_BUILTIN_NORM_1X16);
|
|
|
def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
|
def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
|
BFIN_BUILTIN_DIFFHL_2X16);
|
BFIN_BUILTIN_DIFFHL_2X16);
|
def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
|
def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
|
BFIN_BUILTIN_DIFFLH_2X16);
|
BFIN_BUILTIN_DIFFLH_2X16);
|
|
|
def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
|
def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
|
BFIN_BUILTIN_MULHISILL);
|
BFIN_BUILTIN_MULHISILL);
|
def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
|
def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
|
BFIN_BUILTIN_MULHISIHL);
|
BFIN_BUILTIN_MULHISIHL);
|
def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
|
def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
|
BFIN_BUILTIN_MULHISILH);
|
BFIN_BUILTIN_MULHISILH);
|
def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
|
def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
|
BFIN_BUILTIN_MULHISIHH);
|
BFIN_BUILTIN_MULHISIHH);
|
|
|
def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
|
def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
|
BFIN_BUILTIN_SSADD_1X32);
|
BFIN_BUILTIN_SSADD_1X32);
|
def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
|
def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
|
BFIN_BUILTIN_SSSUB_1X32);
|
BFIN_BUILTIN_SSSUB_1X32);
|
def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
|
def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
|
BFIN_BUILTIN_NEG_1X32);
|
BFIN_BUILTIN_NEG_1X32);
|
def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
|
def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
|
BFIN_BUILTIN_NORM_1X32);
|
BFIN_BUILTIN_NORM_1X32);
|
def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
|
def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
|
BFIN_BUILTIN_MULT_1X32);
|
BFIN_BUILTIN_MULT_1X32);
|
|
|
/* Shifts. */
|
/* Shifts. */
|
def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
|
def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
|
BFIN_BUILTIN_SSASHIFT_1X16);
|
BFIN_BUILTIN_SSASHIFT_1X16);
|
def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
|
def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
|
BFIN_BUILTIN_SSASHIFT_2X16);
|
BFIN_BUILTIN_SSASHIFT_2X16);
|
def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
|
def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
|
BFIN_BUILTIN_LSHIFT_1X16);
|
BFIN_BUILTIN_LSHIFT_1X16);
|
def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
|
def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
|
BFIN_BUILTIN_LSHIFT_2X16);
|
BFIN_BUILTIN_LSHIFT_2X16);
|
|
|
/* Complex numbers. */
|
/* Complex numbers. */
|
def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
|
def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
|
BFIN_BUILTIN_CPLX_MUL_16);
|
BFIN_BUILTIN_CPLX_MUL_16);
|
def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
|
def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
|
BFIN_BUILTIN_CPLX_MAC_16);
|
BFIN_BUILTIN_CPLX_MAC_16);
|
def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
|
def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
|
BFIN_BUILTIN_CPLX_MSU_16);
|
BFIN_BUILTIN_CPLX_MSU_16);
|
}
|
}
|
|
|
|
|
struct builtin_description
|
struct builtin_description
|
{
|
{
|
const enum insn_code icode;
|
const enum insn_code icode;
|
const char *const name;
|
const char *const name;
|
const enum bfin_builtins code;
|
const enum bfin_builtins code;
|
int macflag;
|
int macflag;
|
};
|
};
|
|
|
static const struct builtin_description bdesc_2arg[] =
|
static const struct builtin_description bdesc_2arg[] =
|
{
|
{
|
{ CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
|
{ CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
|
|
|
{ CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
|
{ CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
|
{ CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
|
{ CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
|
{ CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
|
{ CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
|
{ CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
|
{ CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
|
|
|
{ CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
|
{ CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
|
{ CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
|
{ CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
|
{ CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
|
{ CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
|
{ CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
|
{ CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
|
|
|
{ CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
|
{ CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
|
{ CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
|
{ CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
|
{ CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
|
{ CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
|
{ CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
|
{ CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
|
|
|
{ CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
|
{ CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
|
{ CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
|
{ CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
|
{ CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
|
{ CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
|
{ CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
|
{ CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
|
{ CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
|
{ CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
|
{ CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
|
{ CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
|
|
|
{ CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
|
{ CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
|
{ CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
|
{ CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
|
{ CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
|
{ CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
|
{ CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
|
{ CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
|
{ CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE }
|
{ CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE }
|
};
|
};
|
|
|
static const struct builtin_description bdesc_1arg[] =
|
static const struct builtin_description bdesc_1arg[] =
|
{
|
{
|
{ CODE_FOR_signbitshi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
|
{ CODE_FOR_signbitshi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
|
{ CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
|
{ CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
|
{ CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
|
{ CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
|
|
|
{ CODE_FOR_signbitssi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
|
{ CODE_FOR_signbitssi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
|
{ CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
|
{ CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
|
|
|
{ CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
|
{ CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
|
{ CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
|
{ CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
|
{ CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
|
{ CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
|
{ CODE_FOR_absv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
|
{ CODE_FOR_absv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
|
};
|
};
|
|
|
/* Errors in the source file can cause expand_expr to return const0_rtx
|
/* Errors in the source file can cause expand_expr to return const0_rtx
|
where we expect a vector. To avoid crashing, use one of the vector
|
where we expect a vector. To avoid crashing, use one of the vector
|
clear instructions. */
|
clear instructions. */
|
static rtx
|
static rtx
|
safe_vector_operand (rtx x, enum machine_mode mode)
|
safe_vector_operand (rtx x, enum machine_mode mode)
|
{
|
{
|
if (x != const0_rtx)
|
if (x != const0_rtx)
|
return x;
|
return x;
|
x = gen_reg_rtx (SImode);
|
x = gen_reg_rtx (SImode);
|
|
|
emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
|
emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
|
return gen_lowpart (mode, x);
|
return gen_lowpart (mode, x);
|
}
|
}
|
|
|
/* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
|
/* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
|
if this is a normal binary op, or one of the MACFLAG_xxx constants. */
|
if this is a normal binary op, or one of the MACFLAG_xxx constants. */
|
|
|
static rtx
|
static rtx
|
bfin_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target,
|
bfin_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target,
|
int macflag)
|
int macflag)
|
{
|
{
|
rtx pat;
|
rtx pat;
|
tree arg0 = TREE_VALUE (arglist);
|
tree arg0 = TREE_VALUE (arglist);
|
tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
|
tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
|
rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
|
rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
|
rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
|
rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
|
enum machine_mode op0mode = GET_MODE (op0);
|
enum machine_mode op0mode = GET_MODE (op0);
|
enum machine_mode op1mode = GET_MODE (op1);
|
enum machine_mode op1mode = GET_MODE (op1);
|
enum machine_mode tmode = insn_data[icode].operand[0].mode;
|
enum machine_mode tmode = insn_data[icode].operand[0].mode;
|
enum machine_mode mode0 = insn_data[icode].operand[1].mode;
|
enum machine_mode mode0 = insn_data[icode].operand[1].mode;
|
enum machine_mode mode1 = insn_data[icode].operand[2].mode;
|
enum machine_mode mode1 = insn_data[icode].operand[2].mode;
|
|
|
if (VECTOR_MODE_P (mode0))
|
if (VECTOR_MODE_P (mode0))
|
op0 = safe_vector_operand (op0, mode0);
|
op0 = safe_vector_operand (op0, mode0);
|
if (VECTOR_MODE_P (mode1))
|
if (VECTOR_MODE_P (mode1))
|
op1 = safe_vector_operand (op1, mode1);
|
op1 = safe_vector_operand (op1, mode1);
|
|
|
if (! target
|
if (! target
|
|| GET_MODE (target) != tmode
|
|| GET_MODE (target) != tmode
|
|| ! (*insn_data[icode].operand[0].predicate) (target, tmode))
|
|| ! (*insn_data[icode].operand[0].predicate) (target, tmode))
|
target = gen_reg_rtx (tmode);
|
target = gen_reg_rtx (tmode);
|
|
|
if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
|
if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
|
{
|
{
|
op0mode = HImode;
|
op0mode = HImode;
|
op0 = gen_lowpart (HImode, op0);
|
op0 = gen_lowpart (HImode, op0);
|
}
|
}
|
if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
|
if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
|
{
|
{
|
op1mode = HImode;
|
op1mode = HImode;
|
op1 = gen_lowpart (HImode, op1);
|
op1 = gen_lowpart (HImode, op1);
|
}
|
}
|
/* In case the insn wants input operands in modes different from
|
/* In case the insn wants input operands in modes different from
|
the result, abort. */
|
the result, abort. */
|
gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
|
gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
|
&& (op1mode == mode1 || op1mode == VOIDmode));
|
&& (op1mode == mode1 || op1mode == VOIDmode));
|
|
|
if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
|
if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
|
op0 = copy_to_mode_reg (mode0, op0);
|
op0 = copy_to_mode_reg (mode0, op0);
|
if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
|
if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
|
op1 = copy_to_mode_reg (mode1, op1);
|
op1 = copy_to_mode_reg (mode1, op1);
|
|
|
if (macflag == -1)
|
if (macflag == -1)
|
pat = GEN_FCN (icode) (target, op0, op1);
|
pat = GEN_FCN (icode) (target, op0, op1);
|
else
|
else
|
pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
|
pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
|
if (! pat)
|
if (! pat)
|
return 0;
|
return 0;
|
|
|
emit_insn (pat);
|
emit_insn (pat);
|
return target;
|
return target;
|
}
|
}
|
|
|
/* Subroutine of bfin_expand_builtin to take care of unop insns. */
|
/* Subroutine of bfin_expand_builtin to take care of unop insns. */
|
|
|
static rtx
|
static rtx
|
bfin_expand_unop_builtin (enum insn_code icode, tree arglist,
|
bfin_expand_unop_builtin (enum insn_code icode, tree arglist,
|
rtx target)
|
rtx target)
|
{
|
{
|
rtx pat;
|
rtx pat;
|
tree arg0 = TREE_VALUE (arglist);
|
tree arg0 = TREE_VALUE (arglist);
|
rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
|
rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
|
enum machine_mode op0mode = GET_MODE (op0);
|
enum machine_mode op0mode = GET_MODE (op0);
|
enum machine_mode tmode = insn_data[icode].operand[0].mode;
|
enum machine_mode tmode = insn_data[icode].operand[0].mode;
|
enum machine_mode mode0 = insn_data[icode].operand[1].mode;
|
enum machine_mode mode0 = insn_data[icode].operand[1].mode;
|
|
|
if (! target
|
if (! target
|
|| GET_MODE (target) != tmode
|
|| GET_MODE (target) != tmode
|
|| ! (*insn_data[icode].operand[0].predicate) (target, tmode))
|
|| ! (*insn_data[icode].operand[0].predicate) (target, tmode))
|
target = gen_reg_rtx (tmode);
|
target = gen_reg_rtx (tmode);
|
|
|
if (VECTOR_MODE_P (mode0))
|
if (VECTOR_MODE_P (mode0))
|
op0 = safe_vector_operand (op0, mode0);
|
op0 = safe_vector_operand (op0, mode0);
|
|
|
if (op0mode == SImode && mode0 == HImode)
|
if (op0mode == SImode && mode0 == HImode)
|
{
|
{
|
op0mode = HImode;
|
op0mode = HImode;
|
op0 = gen_lowpart (HImode, op0);
|
op0 = gen_lowpart (HImode, op0);
|
}
|
}
|
gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
|
gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
|
|
|
if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
|
if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
|
op0 = copy_to_mode_reg (mode0, op0);
|
op0 = copy_to_mode_reg (mode0, op0);
|
|
|
pat = GEN_FCN (icode) (target, op0);
|
pat = GEN_FCN (icode) (target, op0);
|
if (! pat)
|
if (! pat)
|
return 0;
|
return 0;
|
emit_insn (pat);
|
emit_insn (pat);
|
return target;
|
return target;
|
}
|
}
|
|
|
/* Expand an expression EXP that calls a built-in function,
|
/* Expand an expression EXP that calls a built-in function,
|
with result going to TARGET if that's convenient
|
with result going to TARGET if that's convenient
|
(and in mode MODE if that's convenient).
|
(and in mode MODE if that's convenient).
|
SUBTARGET may be used as the target for computing one of EXP's operands.
|
SUBTARGET may be used as the target for computing one of EXP's operands.
|
IGNORE is nonzero if the value is to be ignored. */
|
IGNORE is nonzero if the value is to be ignored. */
|
|
|
static rtx
|
static rtx
|
bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
|
bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
|
rtx subtarget ATTRIBUTE_UNUSED,
|
rtx subtarget ATTRIBUTE_UNUSED,
|
enum machine_mode mode ATTRIBUTE_UNUSED,
|
enum machine_mode mode ATTRIBUTE_UNUSED,
|
int ignore ATTRIBUTE_UNUSED)
|
int ignore ATTRIBUTE_UNUSED)
|
{
|
{
|
size_t i;
|
size_t i;
|
enum insn_code icode;
|
enum insn_code icode;
|
const struct builtin_description *d;
|
const struct builtin_description *d;
|
tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
|
tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
|
tree arglist = TREE_OPERAND (exp, 1);
|
tree arglist = TREE_OPERAND (exp, 1);
|
unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
|
unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
|
tree arg0, arg1, arg2;
|
tree arg0, arg1, arg2;
|
rtx op0, op1, op2, accvec, pat, tmp1, tmp2;
|
rtx op0, op1, op2, accvec, pat, tmp1, tmp2;
|
enum machine_mode tmode, mode0;
|
enum machine_mode tmode, mode0;
|
|
|
switch (fcode)
|
switch (fcode)
|
{
|
{
|
case BFIN_BUILTIN_CSYNC:
|
case BFIN_BUILTIN_CSYNC:
|
emit_insn (gen_csync ());
|
emit_insn (gen_csync ());
|
return 0;
|
return 0;
|
case BFIN_BUILTIN_SSYNC:
|
case BFIN_BUILTIN_SSYNC:
|
emit_insn (gen_ssync ());
|
emit_insn (gen_ssync ());
|
return 0;
|
return 0;
|
|
|
case BFIN_BUILTIN_DIFFHL_2X16:
|
case BFIN_BUILTIN_DIFFHL_2X16:
|
case BFIN_BUILTIN_DIFFLH_2X16:
|
case BFIN_BUILTIN_DIFFLH_2X16:
|
arg0 = TREE_VALUE (arglist);
|
arg0 = TREE_VALUE (arglist);
|
op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
|
op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
|
icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16
|
icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16
|
? CODE_FOR_subhilov2hi3 : CODE_FOR_sublohiv2hi3);
|
? CODE_FOR_subhilov2hi3 : CODE_FOR_sublohiv2hi3);
|
tmode = insn_data[icode].operand[0].mode;
|
tmode = insn_data[icode].operand[0].mode;
|
mode0 = insn_data[icode].operand[1].mode;
|
mode0 = insn_data[icode].operand[1].mode;
|
|
|
if (! target
|
if (! target
|
|| GET_MODE (target) != tmode
|
|| GET_MODE (target) != tmode
|
|| ! (*insn_data[icode].operand[0].predicate) (target, tmode))
|
|| ! (*insn_data[icode].operand[0].predicate) (target, tmode))
|
target = gen_reg_rtx (tmode);
|
target = gen_reg_rtx (tmode);
|
|
|
if (VECTOR_MODE_P (mode0))
|
if (VECTOR_MODE_P (mode0))
|
op0 = safe_vector_operand (op0, mode0);
|
op0 = safe_vector_operand (op0, mode0);
|
|
|
if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
|
if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
|
op0 = copy_to_mode_reg (mode0, op0);
|
op0 = copy_to_mode_reg (mode0, op0);
|
|
|
pat = GEN_FCN (icode) (target, op0, op0);
|
pat = GEN_FCN (icode) (target, op0, op0);
|
if (! pat)
|
if (! pat)
|
return 0;
|
return 0;
|
emit_insn (pat);
|
emit_insn (pat);
|
return target;
|
return target;
|
|
|
case BFIN_BUILTIN_CPLX_MUL_16:
|
case BFIN_BUILTIN_CPLX_MUL_16:
|
arg0 = TREE_VALUE (arglist);
|
arg0 = TREE_VALUE (arglist);
|
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
|
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
|
op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
|
op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
|
op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
|
op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
|
accvec = gen_reg_rtx (V2PDImode);
|
accvec = gen_reg_rtx (V2PDImode);
|
|
|
if (! target
|
if (! target
|
|| GET_MODE (target) != V2HImode
|
|| GET_MODE (target) != V2HImode
|
|| ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
|
|| ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
|
target = gen_reg_rtx (tmode);
|
target = gen_reg_rtx (tmode);
|
if (! register_operand (op0, GET_MODE (op0)))
|
if (! register_operand (op0, GET_MODE (op0)))
|
op0 = copy_to_mode_reg (GET_MODE (op0), op0);
|
op0 = copy_to_mode_reg (GET_MODE (op0), op0);
|
if (! register_operand (op1, GET_MODE (op1)))
|
if (! register_operand (op1, GET_MODE (op1)))
|
op1 = copy_to_mode_reg (GET_MODE (op1), op1);
|
op1 = copy_to_mode_reg (GET_MODE (op1), op1);
|
|
|
emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
|
emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
|
const0_rtx, const0_rtx,
|
const0_rtx, const0_rtx,
|
const1_rtx, GEN_INT (MACFLAG_NONE)));
|
const1_rtx, GEN_INT (MACFLAG_NONE)));
|
emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
|
emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
|
const1_rtx, const1_rtx,
|
const1_rtx, const1_rtx,
|
const0_rtx, accvec, const1_rtx, const0_rtx,
|
const0_rtx, accvec, const1_rtx, const0_rtx,
|
GEN_INT (MACFLAG_NONE), accvec));
|
GEN_INT (MACFLAG_NONE), accvec));
|
|
|
return target;
|
return target;
|
|
|
case BFIN_BUILTIN_CPLX_MAC_16:
|
case BFIN_BUILTIN_CPLX_MAC_16:
|
case BFIN_BUILTIN_CPLX_MSU_16:
|
case BFIN_BUILTIN_CPLX_MSU_16:
|
arg0 = TREE_VALUE (arglist);
|
arg0 = TREE_VALUE (arglist);
|
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
|
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
|
arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
|
arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
|
op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
|
op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
|
op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
|
op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
|
op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
|
op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
|
accvec = gen_reg_rtx (V2PDImode);
|
accvec = gen_reg_rtx (V2PDImode);
|
|
|
if (! target
|
if (! target
|
|| GET_MODE (target) != V2HImode
|
|| GET_MODE (target) != V2HImode
|
|| ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
|
|| ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
|
target = gen_reg_rtx (tmode);
|
target = gen_reg_rtx (tmode);
|
if (! register_operand (op0, GET_MODE (op0)))
|
if (! register_operand (op0, GET_MODE (op0)))
|
op0 = copy_to_mode_reg (GET_MODE (op0), op0);
|
op0 = copy_to_mode_reg (GET_MODE (op0), op0);
|
if (! register_operand (op1, GET_MODE (op1)))
|
if (! register_operand (op1, GET_MODE (op1)))
|
op1 = copy_to_mode_reg (GET_MODE (op1), op1);
|
op1 = copy_to_mode_reg (GET_MODE (op1), op1);
|
|
|
tmp1 = gen_reg_rtx (SImode);
|
tmp1 = gen_reg_rtx (SImode);
|
tmp2 = gen_reg_rtx (SImode);
|
tmp2 = gen_reg_rtx (SImode);
|
emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op2), GEN_INT (16)));
|
emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op2), GEN_INT (16)));
|
emit_move_insn (tmp2, gen_lowpart (SImode, op2));
|
emit_move_insn (tmp2, gen_lowpart (SImode, op2));
|
emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
|
emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
|
emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
|
emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
|
emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op0, op1, const0_rtx,
|
emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op0, op1, const0_rtx,
|
const0_rtx, const0_rtx,
|
const0_rtx, const0_rtx,
|
const1_rtx, accvec, const0_rtx,
|
const1_rtx, accvec, const0_rtx,
|
const0_rtx,
|
const0_rtx,
|
GEN_INT (MACFLAG_W32)));
|
GEN_INT (MACFLAG_W32)));
|
tmp1 = (fcode == BFIN_BUILTIN_CPLX_MAC_16 ? const1_rtx : const0_rtx);
|
tmp1 = (fcode == BFIN_BUILTIN_CPLX_MAC_16 ? const1_rtx : const0_rtx);
|
tmp2 = (fcode == BFIN_BUILTIN_CPLX_MAC_16 ? const0_rtx : const1_rtx);
|
tmp2 = (fcode == BFIN_BUILTIN_CPLX_MAC_16 ? const0_rtx : const1_rtx);
|
emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
|
emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
|
const1_rtx, const1_rtx,
|
const1_rtx, const1_rtx,
|
const0_rtx, accvec, tmp1, tmp2,
|
const0_rtx, accvec, tmp1, tmp2,
|
GEN_INT (MACFLAG_NONE), accvec));
|
GEN_INT (MACFLAG_NONE), accvec));
|
|
|
return target;
|
return target;
|
|
|
default:
|
default:
|
break;
|
break;
|
}
|
}
|
|
|
for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
|
for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
|
if (d->code == fcode)
|
if (d->code == fcode)
|
return bfin_expand_binop_builtin (d->icode, arglist, target,
|
return bfin_expand_binop_builtin (d->icode, arglist, target,
|
d->macflag);
|
d->macflag);
|
|
|
for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
|
for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
|
if (d->code == fcode)
|
if (d->code == fcode)
|
return bfin_expand_unop_builtin (d->icode, arglist, target);
|
return bfin_expand_unop_builtin (d->icode, arglist, target);
|
|
|
gcc_unreachable ();
|
gcc_unreachable ();
|
}
|
}
|
|
|
#undef TARGET_INIT_BUILTINS
|
#undef TARGET_INIT_BUILTINS
|
#define TARGET_INIT_BUILTINS bfin_init_builtins
|
#define TARGET_INIT_BUILTINS bfin_init_builtins
|
|
|
#undef TARGET_EXPAND_BUILTIN
|
#undef TARGET_EXPAND_BUILTIN
|
#define TARGET_EXPAND_BUILTIN bfin_expand_builtin
|
#define TARGET_EXPAND_BUILTIN bfin_expand_builtin
|
|
|
#undef TARGET_ASM_GLOBALIZE_LABEL
|
#undef TARGET_ASM_GLOBALIZE_LABEL
|
#define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
|
#define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
|
|
|
#undef TARGET_ASM_FILE_START
|
#undef TARGET_ASM_FILE_START
|
#define TARGET_ASM_FILE_START output_file_start
|
#define TARGET_ASM_FILE_START output_file_start
|
|
|
#undef TARGET_ATTRIBUTE_TABLE
|
#undef TARGET_ATTRIBUTE_TABLE
|
#define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
|
#define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
|
|
|
#undef TARGET_COMP_TYPE_ATTRIBUTES
|
#undef TARGET_COMP_TYPE_ATTRIBUTES
|
#define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
|
#define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
|
|
|
#undef TARGET_RTX_COSTS
|
#undef TARGET_RTX_COSTS
|
#define TARGET_RTX_COSTS bfin_rtx_costs
|
#define TARGET_RTX_COSTS bfin_rtx_costs
|
|
|
#undef TARGET_ADDRESS_COST
|
#undef TARGET_ADDRESS_COST
|
#define TARGET_ADDRESS_COST bfin_address_cost
|
#define TARGET_ADDRESS_COST bfin_address_cost
|
|
|
#undef TARGET_ASM_INTERNAL_LABEL
|
#undef TARGET_ASM_INTERNAL_LABEL
|
#define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
|
#define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
|
|
|
#undef TARGET_ASM_INTEGER
|
#undef TARGET_ASM_INTEGER
|
#define TARGET_ASM_INTEGER bfin_assemble_integer
|
#define TARGET_ASM_INTEGER bfin_assemble_integer
|
|
|
#undef TARGET_MACHINE_DEPENDENT_REORG
|
#undef TARGET_MACHINE_DEPENDENT_REORG
|
#define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
|
#define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
|
|
|
#undef TARGET_FUNCTION_OK_FOR_SIBCALL
|
#undef TARGET_FUNCTION_OK_FOR_SIBCALL
|
#define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
|
#define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
|
|
|
#undef TARGET_ASM_OUTPUT_MI_THUNK
|
#undef TARGET_ASM_OUTPUT_MI_THUNK
|
#define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
|
#define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
|
#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
|
#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
|
#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
|
#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
|
|
|
#undef TARGET_SCHED_ADJUST_COST
|
#undef TARGET_SCHED_ADJUST_COST
|
#define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
|
#define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
|
|
|
#undef TARGET_PROMOTE_PROTOTYPES
|
#undef TARGET_PROMOTE_PROTOTYPES
|
#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
|
#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
|
#undef TARGET_PROMOTE_FUNCTION_ARGS
|
#undef TARGET_PROMOTE_FUNCTION_ARGS
|
#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
|
#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
|
#undef TARGET_PROMOTE_FUNCTION_RETURN
|
#undef TARGET_PROMOTE_FUNCTION_RETURN
|
#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
|
#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
|
|
|
#undef TARGET_ARG_PARTIAL_BYTES
|
#undef TARGET_ARG_PARTIAL_BYTES
|
#define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
|
#define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
|
|
|
#undef TARGET_PASS_BY_REFERENCE
|
#undef TARGET_PASS_BY_REFERENCE
|
#define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
|
#define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
|
|
|
#undef TARGET_SETUP_INCOMING_VARARGS
|
#undef TARGET_SETUP_INCOMING_VARARGS
|
#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
|
#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
|
|
|
#undef TARGET_STRUCT_VALUE_RTX
|
#undef TARGET_STRUCT_VALUE_RTX
|
#define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
|
#define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
|
|
|
#undef TARGET_VECTOR_MODE_SUPPORTED_P
|
#undef TARGET_VECTOR_MODE_SUPPORTED_P
|
#define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
|
#define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
|
|
|
#undef TARGET_HANDLE_OPTION
|
#undef TARGET_HANDLE_OPTION
|
#define TARGET_HANDLE_OPTION bfin_handle_option
|
#define TARGET_HANDLE_OPTION bfin_handle_option
|
|
|
#undef TARGET_DEFAULT_TARGET_FLAGS
|
#undef TARGET_DEFAULT_TARGET_FLAGS
|
#define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
|
#define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
|
|
|
#undef TARGET_SECONDARY_RELOAD
|
#undef TARGET_SECONDARY_RELOAD
|
#define TARGET_SECONDARY_RELOAD bfin_secondary_reload
|
#define TARGET_SECONDARY_RELOAD bfin_secondary_reload
|
|
|
#undef TARGET_DELEGITIMIZE_ADDRESS
|
#undef TARGET_DELEGITIMIZE_ADDRESS
|
#define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
|
#define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
|
|
|
struct gcc_target targetm = TARGET_INITIALIZER;
|
struct gcc_target targetm = TARGET_INITIALIZER;
|
|
|