/* Subroutines for assembler code output on the TMS320C[34]x
|
/* Subroutines for assembler code output on the TMS320C[34]x
|
Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003,
|
Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2003,
|
2004, 2005, 2007
|
2004, 2005, 2007
|
Free Software Foundation, Inc.
|
Free Software Foundation, Inc.
|
|
|
Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
|
Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
|
and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
|
and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
|
|
|
This file is part of GCC.
|
This file is part of GCC.
|
|
|
GCC is free software; you can redistribute it and/or modify
|
GCC is free software; you can redistribute it and/or modify
|
it under the terms of the GNU General Public License as published by
|
it under the terms of the GNU General Public License as published by
|
the Free Software Foundation; either version 3, or (at your option)
|
the Free Software Foundation; either version 3, or (at your option)
|
any later version.
|
any later version.
|
|
|
GCC is distributed in the hope that it will be useful,
|
GCC is distributed in the hope that it will be useful,
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
GNU General Public License for more details.
|
GNU General Public License for more details.
|
|
|
You should have received a copy of the GNU General Public License
|
You should have received a copy of the GNU General Public License
|
along with GCC; see the file COPYING3. If not see
|
along with GCC; see the file COPYING3. If not see
|
<http://www.gnu.org/licenses/>. */
|
<http://www.gnu.org/licenses/>. */
|
|
|
/* Some output-actions in c4x.md need these. */
|
/* Some output-actions in c4x.md need these. */
|
#include "config.h"
|
#include "config.h"
|
#include "system.h"
|
#include "system.h"
|
#include "coretypes.h"
|
#include "coretypes.h"
|
#include "tm.h"
|
#include "tm.h"
|
#include "rtl.h"
|
#include "rtl.h"
|
#include "tree.h"
|
#include "tree.h"
|
#include "regs.h"
|
#include "regs.h"
|
#include "hard-reg-set.h"
|
#include "hard-reg-set.h"
|
#include "basic-block.h"
|
#include "basic-block.h"
|
#include "real.h"
|
#include "real.h"
|
#include "insn-config.h"
|
#include "insn-config.h"
|
#include "insn-attr.h"
|
#include "insn-attr.h"
|
#include "conditions.h"
|
#include "conditions.h"
|
#include "output.h"
|
#include "output.h"
|
#include "function.h"
|
#include "function.h"
|
#include "expr.h"
|
#include "expr.h"
|
#include "optabs.h"
|
#include "optabs.h"
|
#include "libfuncs.h"
|
#include "libfuncs.h"
|
#include "flags.h"
|
#include "flags.h"
|
#include "recog.h"
|
#include "recog.h"
|
#include "ggc.h"
|
#include "ggc.h"
|
#include "cpplib.h"
|
#include "cpplib.h"
|
#include "toplev.h"
|
#include "toplev.h"
|
#include "tm_p.h"
|
#include "tm_p.h"
|
#include "target.h"
|
#include "target.h"
|
#include "target-def.h"
|
#include "target-def.h"
|
#include "langhooks.h"
|
#include "langhooks.h"
|
|
|
rtx smulhi3_libfunc;
|
rtx smulhi3_libfunc;
|
rtx umulhi3_libfunc;
|
rtx umulhi3_libfunc;
|
rtx fix_truncqfhi2_libfunc;
|
rtx fix_truncqfhi2_libfunc;
|
rtx fixuns_truncqfhi2_libfunc;
|
rtx fixuns_truncqfhi2_libfunc;
|
rtx fix_trunchfhi2_libfunc;
|
rtx fix_trunchfhi2_libfunc;
|
rtx fixuns_trunchfhi2_libfunc;
|
rtx fixuns_trunchfhi2_libfunc;
|
rtx floathiqf2_libfunc;
|
rtx floathiqf2_libfunc;
|
rtx floatunshiqf2_libfunc;
|
rtx floatunshiqf2_libfunc;
|
rtx floathihf2_libfunc;
|
rtx floathihf2_libfunc;
|
rtx floatunshihf2_libfunc;
|
rtx floatunshihf2_libfunc;
|
|
|
static int c4x_leaf_function;
|
static int c4x_leaf_function;
|
|
|
static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
|
static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
|
|
|
/* Array of the smallest class containing reg number REGNO, indexed by
|
/* Array of the smallest class containing reg number REGNO, indexed by
|
REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
|
REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
|
registers are available and set the class to NO_REGS for registers
|
registers are available and set the class to NO_REGS for registers
|
that the target switches say are unavailable. */
|
that the target switches say are unavailable. */
|
|
|
enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
|
enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
|
{
|
{
|
/* Reg Modes Saved. */
|
/* Reg Modes Saved. */
|
R0R1_REGS, /* R0 QI, QF, HF No. */
|
R0R1_REGS, /* R0 QI, QF, HF No. */
|
R0R1_REGS, /* R1 QI, QF, HF No. */
|
R0R1_REGS, /* R1 QI, QF, HF No. */
|
R2R3_REGS, /* R2 QI, QF, HF No. */
|
R2R3_REGS, /* R2 QI, QF, HF No. */
|
R2R3_REGS, /* R3 QI, QF, HF No. */
|
R2R3_REGS, /* R3 QI, QF, HF No. */
|
EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
|
EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
|
EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
|
EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
|
EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
|
EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
|
EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
|
EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
|
ADDR_REGS, /* AR0 QI No. */
|
ADDR_REGS, /* AR0 QI No. */
|
ADDR_REGS, /* AR1 QI No. */
|
ADDR_REGS, /* AR1 QI No. */
|
ADDR_REGS, /* AR2 QI No. */
|
ADDR_REGS, /* AR2 QI No. */
|
ADDR_REGS, /* AR3 QI QI. */
|
ADDR_REGS, /* AR3 QI QI. */
|
ADDR_REGS, /* AR4 QI QI. */
|
ADDR_REGS, /* AR4 QI QI. */
|
ADDR_REGS, /* AR5 QI QI. */
|
ADDR_REGS, /* AR5 QI QI. */
|
ADDR_REGS, /* AR6 QI QI. */
|
ADDR_REGS, /* AR6 QI QI. */
|
ADDR_REGS, /* AR7 QI QI. */
|
ADDR_REGS, /* AR7 QI QI. */
|
DP_REG, /* DP QI No. */
|
DP_REG, /* DP QI No. */
|
INDEX_REGS, /* IR0 QI No. */
|
INDEX_REGS, /* IR0 QI No. */
|
INDEX_REGS, /* IR1 QI No. */
|
INDEX_REGS, /* IR1 QI No. */
|
BK_REG, /* BK QI QI. */
|
BK_REG, /* BK QI QI. */
|
SP_REG, /* SP QI No. */
|
SP_REG, /* SP QI No. */
|
ST_REG, /* ST CC No. */
|
ST_REG, /* ST CC No. */
|
NO_REGS, /* DIE/IE No. */
|
NO_REGS, /* DIE/IE No. */
|
NO_REGS, /* IIE/IF No. */
|
NO_REGS, /* IIE/IF No. */
|
NO_REGS, /* IIF/IOF No. */
|
NO_REGS, /* IIF/IOF No. */
|
INT_REGS, /* RS QI No. */
|
INT_REGS, /* RS QI No. */
|
INT_REGS, /* RE QI No. */
|
INT_REGS, /* RE QI No. */
|
RC_REG, /* RC QI No. */
|
RC_REG, /* RC QI No. */
|
EXT_REGS, /* R8 QI, QF, HF QI. */
|
EXT_REGS, /* R8 QI, QF, HF QI. */
|
EXT_REGS, /* R9 QI, QF, HF No. */
|
EXT_REGS, /* R9 QI, QF, HF No. */
|
EXT_REGS, /* R10 QI, QF, HF No. */
|
EXT_REGS, /* R10 QI, QF, HF No. */
|
EXT_REGS, /* R11 QI, QF, HF No. */
|
EXT_REGS, /* R11 QI, QF, HF No. */
|
};
|
};
|
|
|
enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
|
enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
|
{
|
{
|
/* Reg Modes Saved. */
|
/* Reg Modes Saved. */
|
HFmode, /* R0 QI, QF, HF No. */
|
HFmode, /* R0 QI, QF, HF No. */
|
HFmode, /* R1 QI, QF, HF No. */
|
HFmode, /* R1 QI, QF, HF No. */
|
HFmode, /* R2 QI, QF, HF No. */
|
HFmode, /* R2 QI, QF, HF No. */
|
HFmode, /* R3 QI, QF, HF No. */
|
HFmode, /* R3 QI, QF, HF No. */
|
QFmode, /* R4 QI, QF, HF QI. */
|
QFmode, /* R4 QI, QF, HF QI. */
|
QFmode, /* R5 QI, QF, HF QI. */
|
QFmode, /* R5 QI, QF, HF QI. */
|
QImode, /* R6 QI, QF, HF QF. */
|
QImode, /* R6 QI, QF, HF QF. */
|
QImode, /* R7 QI, QF, HF QF. */
|
QImode, /* R7 QI, QF, HF QF. */
|
QImode, /* AR0 QI No. */
|
QImode, /* AR0 QI No. */
|
QImode, /* AR1 QI No. */
|
QImode, /* AR1 QI No. */
|
QImode, /* AR2 QI No. */
|
QImode, /* AR2 QI No. */
|
QImode, /* AR3 QI QI. */
|
QImode, /* AR3 QI QI. */
|
QImode, /* AR4 QI QI. */
|
QImode, /* AR4 QI QI. */
|
QImode, /* AR5 QI QI. */
|
QImode, /* AR5 QI QI. */
|
QImode, /* AR6 QI QI. */
|
QImode, /* AR6 QI QI. */
|
QImode, /* AR7 QI QI. */
|
QImode, /* AR7 QI QI. */
|
VOIDmode, /* DP QI No. */
|
VOIDmode, /* DP QI No. */
|
QImode, /* IR0 QI No. */
|
QImode, /* IR0 QI No. */
|
QImode, /* IR1 QI No. */
|
QImode, /* IR1 QI No. */
|
QImode, /* BK QI QI. */
|
QImode, /* BK QI QI. */
|
VOIDmode, /* SP QI No. */
|
VOIDmode, /* SP QI No. */
|
VOIDmode, /* ST CC No. */
|
VOIDmode, /* ST CC No. */
|
VOIDmode, /* DIE/IE No. */
|
VOIDmode, /* DIE/IE No. */
|
VOIDmode, /* IIE/IF No. */
|
VOIDmode, /* IIE/IF No. */
|
VOIDmode, /* IIF/IOF No. */
|
VOIDmode, /* IIF/IOF No. */
|
QImode, /* RS QI No. */
|
QImode, /* RS QI No. */
|
QImode, /* RE QI No. */
|
QImode, /* RE QI No. */
|
VOIDmode, /* RC QI No. */
|
VOIDmode, /* RC QI No. */
|
QFmode, /* R8 QI, QF, HF QI. */
|
QFmode, /* R8 QI, QF, HF QI. */
|
HFmode, /* R9 QI, QF, HF No. */
|
HFmode, /* R9 QI, QF, HF No. */
|
HFmode, /* R10 QI, QF, HF No. */
|
HFmode, /* R10 QI, QF, HF No. */
|
HFmode, /* R11 QI, QF, HF No. */
|
HFmode, /* R11 QI, QF, HF No. */
|
};
|
};
|
|
|
|
|
/* Test and compare insns in c4x.md store the information needed to
|
/* Test and compare insns in c4x.md store the information needed to
|
generate branch and scc insns here. */
|
generate branch and scc insns here. */
|
|
|
rtx c4x_compare_op0;
|
rtx c4x_compare_op0;
|
rtx c4x_compare_op1;
|
rtx c4x_compare_op1;
|
|
|
int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
|
int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
|
|
|
/* Pragma definitions. */
|
/* Pragma definitions. */
|
|
|
tree code_tree = NULL_TREE;
|
tree code_tree = NULL_TREE;
|
tree data_tree = NULL_TREE;
|
tree data_tree = NULL_TREE;
|
tree pure_tree = NULL_TREE;
|
tree pure_tree = NULL_TREE;
|
tree noreturn_tree = NULL_TREE;
|
tree noreturn_tree = NULL_TREE;
|
tree interrupt_tree = NULL_TREE;
|
tree interrupt_tree = NULL_TREE;
|
tree naked_tree = NULL_TREE;
|
tree naked_tree = NULL_TREE;
|
|
|
/* Forward declarations */
|
/* Forward declarations */
|
static bool c4x_handle_option (size_t, const char *, int);
|
static bool c4x_handle_option (size_t, const char *, int);
|
static int c4x_isr_reg_used_p (unsigned int);
|
static int c4x_isr_reg_used_p (unsigned int);
|
static int c4x_leaf_function_p (void);
|
static int c4x_leaf_function_p (void);
|
static int c4x_naked_function_p (void);
|
static int c4x_naked_function_p (void);
|
static int c4x_immed_int_constant (rtx);
|
static int c4x_immed_int_constant (rtx);
|
static int c4x_immed_float_constant (rtx);
|
static int c4x_immed_float_constant (rtx);
|
static int c4x_R_indirect (rtx);
|
static int c4x_R_indirect (rtx);
|
static void c4x_S_address_parse (rtx , int *, int *, int *, int *);
|
static void c4x_S_address_parse (rtx , int *, int *, int *, int *);
|
static int c4x_valid_operands (enum rtx_code, rtx *, enum machine_mode, int);
|
static int c4x_valid_operands (enum rtx_code, rtx *, enum machine_mode, int);
|
static int c4x_arn_reg_operand (rtx, enum machine_mode, unsigned int);
|
static int c4x_arn_reg_operand (rtx, enum machine_mode, unsigned int);
|
static int c4x_arn_mem_operand (rtx, enum machine_mode, unsigned int);
|
static int c4x_arn_mem_operand (rtx, enum machine_mode, unsigned int);
|
static void c4x_file_start (void);
|
static void c4x_file_start (void);
|
static void c4x_file_end (void);
|
static void c4x_file_end (void);
|
static void c4x_check_attribute (const char *, tree, tree, tree *);
|
static void c4x_check_attribute (const char *, tree, tree, tree *);
|
static int c4x_r11_set_p (rtx);
|
static int c4x_r11_set_p (rtx);
|
static int c4x_rptb_valid_p (rtx, rtx);
|
static int c4x_rptb_valid_p (rtx, rtx);
|
static void c4x_reorg (void);
|
static void c4x_reorg (void);
|
static int c4x_label_ref_used_p (rtx, rtx);
|
static int c4x_label_ref_used_p (rtx, rtx);
|
static tree c4x_handle_fntype_attribute (tree *, tree, tree, int, bool *);
|
static tree c4x_handle_fntype_attribute (tree *, tree, tree, int, bool *);
|
const struct attribute_spec c4x_attribute_table[];
|
const struct attribute_spec c4x_attribute_table[];
|
static void c4x_insert_attributes (tree, tree *);
|
static void c4x_insert_attributes (tree, tree *);
|
static void c4x_asm_named_section (const char *, unsigned int, tree);
|
static void c4x_asm_named_section (const char *, unsigned int, tree);
|
static int c4x_adjust_cost (rtx, rtx, rtx, int);
|
static int c4x_adjust_cost (rtx, rtx, rtx, int);
|
static void c4x_globalize_label (FILE *, const char *);
|
static void c4x_globalize_label (FILE *, const char *);
|
static bool c4x_rtx_costs (rtx, int, int, int *);
|
static bool c4x_rtx_costs (rtx, int, int, int *);
|
static int c4x_address_cost (rtx);
|
static int c4x_address_cost (rtx);
|
static void c4x_init_libfuncs (void);
|
static void c4x_init_libfuncs (void);
|
static void c4x_external_libcall (rtx);
|
static void c4x_external_libcall (rtx);
|
static rtx c4x_struct_value_rtx (tree, int);
|
static rtx c4x_struct_value_rtx (tree, int);
|
static tree c4x_gimplify_va_arg_expr (tree, tree, tree *, tree *);
|
static tree c4x_gimplify_va_arg_expr (tree, tree, tree *, tree *);
|
|
|
/* Initialize the GCC target structure. */
|
/* Initialize the GCC target structure. */
|
#undef TARGET_ASM_BYTE_OP
|
#undef TARGET_ASM_BYTE_OP
|
#define TARGET_ASM_BYTE_OP "\t.word\t"
|
#define TARGET_ASM_BYTE_OP "\t.word\t"
|
#undef TARGET_ASM_ALIGNED_HI_OP
|
#undef TARGET_ASM_ALIGNED_HI_OP
|
#define TARGET_ASM_ALIGNED_HI_OP NULL
|
#define TARGET_ASM_ALIGNED_HI_OP NULL
|
#undef TARGET_ASM_ALIGNED_SI_OP
|
#undef TARGET_ASM_ALIGNED_SI_OP
|
#define TARGET_ASM_ALIGNED_SI_OP NULL
|
#define TARGET_ASM_ALIGNED_SI_OP NULL
|
#undef TARGET_ASM_FILE_START
|
#undef TARGET_ASM_FILE_START
|
#define TARGET_ASM_FILE_START c4x_file_start
|
#define TARGET_ASM_FILE_START c4x_file_start
|
#undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
|
#undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
|
#define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
|
#define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
|
#undef TARGET_ASM_FILE_END
|
#undef TARGET_ASM_FILE_END
|
#define TARGET_ASM_FILE_END c4x_file_end
|
#define TARGET_ASM_FILE_END c4x_file_end
|
|
|
#undef TARGET_ASM_EXTERNAL_LIBCALL
|
#undef TARGET_ASM_EXTERNAL_LIBCALL
|
#define TARGET_ASM_EXTERNAL_LIBCALL c4x_external_libcall
|
#define TARGET_ASM_EXTERNAL_LIBCALL c4x_external_libcall
|
|
|
/* Play safe, not the fastest code. */
|
/* Play safe, not the fastest code. */
|
#undef TARGET_DEFAULT_TARGET_FLAGS
|
#undef TARGET_DEFAULT_TARGET_FLAGS
|
#define TARGET_DEFAULT_TARGET_FLAGS (MASK_ALIASES | MASK_PARALLEL \
|
#define TARGET_DEFAULT_TARGET_FLAGS (MASK_ALIASES | MASK_PARALLEL \
|
| MASK_PARALLEL_MPY | MASK_RPTB)
|
| MASK_PARALLEL_MPY | MASK_RPTB)
|
#undef TARGET_HANDLE_OPTION
|
#undef TARGET_HANDLE_OPTION
|
#define TARGET_HANDLE_OPTION c4x_handle_option
|
#define TARGET_HANDLE_OPTION c4x_handle_option
|
|
|
#undef TARGET_ATTRIBUTE_TABLE
|
#undef TARGET_ATTRIBUTE_TABLE
|
#define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
|
#define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
|
|
|
#undef TARGET_INSERT_ATTRIBUTES
|
#undef TARGET_INSERT_ATTRIBUTES
|
#define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
|
#define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
|
|
|
#undef TARGET_INIT_BUILTINS
|
#undef TARGET_INIT_BUILTINS
|
#define TARGET_INIT_BUILTINS c4x_init_builtins
|
#define TARGET_INIT_BUILTINS c4x_init_builtins
|
|
|
#undef TARGET_EXPAND_BUILTIN
|
#undef TARGET_EXPAND_BUILTIN
|
#define TARGET_EXPAND_BUILTIN c4x_expand_builtin
|
#define TARGET_EXPAND_BUILTIN c4x_expand_builtin
|
|
|
#undef TARGET_SCHED_ADJUST_COST
|
#undef TARGET_SCHED_ADJUST_COST
|
#define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
|
#define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
|
|
|
#undef TARGET_ASM_GLOBALIZE_LABEL
|
#undef TARGET_ASM_GLOBALIZE_LABEL
|
#define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
|
#define TARGET_ASM_GLOBALIZE_LABEL c4x_globalize_label
|
|
|
#undef TARGET_RTX_COSTS
|
#undef TARGET_RTX_COSTS
|
#define TARGET_RTX_COSTS c4x_rtx_costs
|
#define TARGET_RTX_COSTS c4x_rtx_costs
|
#undef TARGET_ADDRESS_COST
|
#undef TARGET_ADDRESS_COST
|
#define TARGET_ADDRESS_COST c4x_address_cost
|
#define TARGET_ADDRESS_COST c4x_address_cost
|
|
|
#undef TARGET_MACHINE_DEPENDENT_REORG
|
#undef TARGET_MACHINE_DEPENDENT_REORG
|
#define TARGET_MACHINE_DEPENDENT_REORG c4x_reorg
|
#define TARGET_MACHINE_DEPENDENT_REORG c4x_reorg
|
|
|
#undef TARGET_INIT_LIBFUNCS
|
#undef TARGET_INIT_LIBFUNCS
|
#define TARGET_INIT_LIBFUNCS c4x_init_libfuncs
|
#define TARGET_INIT_LIBFUNCS c4x_init_libfuncs
|
|
|
#undef TARGET_STRUCT_VALUE_RTX
|
#undef TARGET_STRUCT_VALUE_RTX
|
#define TARGET_STRUCT_VALUE_RTX c4x_struct_value_rtx
|
#define TARGET_STRUCT_VALUE_RTX c4x_struct_value_rtx
|
|
|
#undef TARGET_GIMPLIFY_VA_ARG_EXPR
|
#undef TARGET_GIMPLIFY_VA_ARG_EXPR
|
#define TARGET_GIMPLIFY_VA_ARG_EXPR c4x_gimplify_va_arg_expr
|
#define TARGET_GIMPLIFY_VA_ARG_EXPR c4x_gimplify_va_arg_expr
|
|
|
struct gcc_target targetm = TARGET_INITIALIZER;
|
struct gcc_target targetm = TARGET_INITIALIZER;
|
|
|
/* Implement TARGET_HANDLE_OPTION. */
|
/* Implement TARGET_HANDLE_OPTION. */
|
|
|
static bool
|
static bool
|
c4x_handle_option (size_t code, const char *arg, int value)
|
c4x_handle_option (size_t code, const char *arg, int value)
|
{
|
{
|
switch (code)
|
switch (code)
|
{
|
{
|
case OPT_m30: c4x_cpu_version = 30; return true;
|
case OPT_m30: c4x_cpu_version = 30; return true;
|
case OPT_m31: c4x_cpu_version = 31; return true;
|
case OPT_m31: c4x_cpu_version = 31; return true;
|
case OPT_m32: c4x_cpu_version = 32; return true;
|
case OPT_m32: c4x_cpu_version = 32; return true;
|
case OPT_m33: c4x_cpu_version = 33; return true;
|
case OPT_m33: c4x_cpu_version = 33; return true;
|
case OPT_m40: c4x_cpu_version = 40; return true;
|
case OPT_m40: c4x_cpu_version = 40; return true;
|
case OPT_m44: c4x_cpu_version = 44; return true;
|
case OPT_m44: c4x_cpu_version = 44; return true;
|
|
|
case OPT_mcpu_:
|
case OPT_mcpu_:
|
if (arg[0] == 'c' || arg[0] == 'C')
|
if (arg[0] == 'c' || arg[0] == 'C')
|
arg++;
|
arg++;
|
value = atoi (arg);
|
value = atoi (arg);
|
switch (value)
|
switch (value)
|
{
|
{
|
case 30: case 31: case 32: case 33: case 40: case 44:
|
case 30: case 31: case 32: case 33: case 40: case 44:
|
c4x_cpu_version = value;
|
c4x_cpu_version = value;
|
return true;
|
return true;
|
}
|
}
|
return false;
|
return false;
|
|
|
default:
|
default:
|
return true;
|
return true;
|
}
|
}
|
}
|
}
|
|
|
/* Override command line options.
|
/* Override command line options.
|
Called once after all options have been parsed.
|
Called once after all options have been parsed.
|
Mostly we process the processor
|
Mostly we process the processor
|
type and sometimes adjust other TARGET_ options. */
|
type and sometimes adjust other TARGET_ options. */
|
|
|
void
|
void
|
c4x_override_options (void)
|
c4x_override_options (void)
|
{
|
{
|
/* Convert foo / 8.0 into foo * 0.125, etc. */
|
/* Convert foo / 8.0 into foo * 0.125, etc. */
|
set_fast_math_flags (1);
|
set_fast_math_flags (1);
|
|
|
/* We should phase out the following at some stage.
|
/* We should phase out the following at some stage.
|
This provides compatibility with the old -mno-aliases option. */
|
This provides compatibility with the old -mno-aliases option. */
|
if (! TARGET_ALIASES && ! flag_argument_noalias)
|
if (! TARGET_ALIASES && ! flag_argument_noalias)
|
flag_argument_noalias = 1;
|
flag_argument_noalias = 1;
|
|
|
if (!TARGET_C3X)
|
if (!TARGET_C3X)
|
target_flags |= MASK_MPYI | MASK_DB;
|
target_flags |= MASK_MPYI | MASK_DB;
|
|
|
if (optimize < 2)
|
if (optimize < 2)
|
target_flags &= ~(MASK_RPTB | MASK_PARALLEL);
|
target_flags &= ~(MASK_RPTB | MASK_PARALLEL);
|
|
|
if (!TARGET_PARALLEL)
|
if (!TARGET_PARALLEL)
|
target_flags &= ~MASK_PARALLEL_MPY;
|
target_flags &= ~MASK_PARALLEL_MPY;
|
}
|
}
|
|
|
|
|
/* This is called before c4x_override_options. */
|
/* This is called before c4x_override_options. */
|
|
|
void
|
void
|
c4x_optimization_options (int level ATTRIBUTE_UNUSED,
|
c4x_optimization_options (int level ATTRIBUTE_UNUSED,
|
int size ATTRIBUTE_UNUSED)
|
int size ATTRIBUTE_UNUSED)
|
{
|
{
|
/* Scheduling before register allocation can screw up global
|
/* Scheduling before register allocation can screw up global
|
register allocation, especially for functions that use MPY||ADD
|
register allocation, especially for functions that use MPY||ADD
|
instructions. The benefit we gain we get by scheduling before
|
instructions. The benefit we gain we get by scheduling before
|
register allocation is probably marginal anyhow. */
|
register allocation is probably marginal anyhow. */
|
flag_schedule_insns = 0;
|
flag_schedule_insns = 0;
|
}
|
}
|
|
|
|
|
/* Write an ASCII string. */
|
/* Write an ASCII string. */
|
|
|
#define C4X_ASCII_LIMIT 40
|
#define C4X_ASCII_LIMIT 40
|
|
|
void
|
void
|
c4x_output_ascii (FILE *stream, const char *ptr, int len)
|
c4x_output_ascii (FILE *stream, const char *ptr, int len)
|
{
|
{
|
char sbuf[C4X_ASCII_LIMIT + 1];
|
char sbuf[C4X_ASCII_LIMIT + 1];
|
int s, l, special, first = 1, onlys;
|
int s, l, special, first = 1, onlys;
|
|
|
if (len)
|
if (len)
|
fprintf (stream, "\t.byte\t");
|
fprintf (stream, "\t.byte\t");
|
|
|
for (s = l = 0; len > 0; --len, ++ptr)
|
for (s = l = 0; len > 0; --len, ++ptr)
|
{
|
{
|
onlys = 0;
|
onlys = 0;
|
|
|
/* Escape " and \ with a \". */
|
/* Escape " and \ with a \". */
|
special = *ptr == '\"' || *ptr == '\\';
|
special = *ptr == '\"' || *ptr == '\\';
|
|
|
/* If printable - add to buff. */
|
/* If printable - add to buff. */
|
if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
|
if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
|
{
|
{
|
if (special)
|
if (special)
|
sbuf[s++] = '\\';
|
sbuf[s++] = '\\';
|
sbuf[s++] = *ptr;
|
sbuf[s++] = *ptr;
|
if (s < C4X_ASCII_LIMIT - 1)
|
if (s < C4X_ASCII_LIMIT - 1)
|
continue;
|
continue;
|
onlys = 1;
|
onlys = 1;
|
}
|
}
|
if (s)
|
if (s)
|
{
|
{
|
if (first)
|
if (first)
|
first = 0;
|
first = 0;
|
else
|
else
|
{
|
{
|
fputc (',', stream);
|
fputc (',', stream);
|
l++;
|
l++;
|
}
|
}
|
|
|
sbuf[s] = 0;
|
sbuf[s] = 0;
|
fprintf (stream, "\"%s\"", sbuf);
|
fprintf (stream, "\"%s\"", sbuf);
|
l += s + 2;
|
l += s + 2;
|
if (TARGET_TI && l >= 80 && len > 1)
|
if (TARGET_TI && l >= 80 && len > 1)
|
{
|
{
|
fprintf (stream, "\n\t.byte\t");
|
fprintf (stream, "\n\t.byte\t");
|
first = 1;
|
first = 1;
|
l = 0;
|
l = 0;
|
}
|
}
|
|
|
s = 0;
|
s = 0;
|
}
|
}
|
if (onlys)
|
if (onlys)
|
continue;
|
continue;
|
|
|
if (first)
|
if (first)
|
first = 0;
|
first = 0;
|
else
|
else
|
{
|
{
|
fputc (',', stream);
|
fputc (',', stream);
|
l++;
|
l++;
|
}
|
}
|
|
|
fprintf (stream, "%d", *ptr);
|
fprintf (stream, "%d", *ptr);
|
l += 3;
|
l += 3;
|
if (TARGET_TI && l >= 80 && len > 1)
|
if (TARGET_TI && l >= 80 && len > 1)
|
{
|
{
|
fprintf (stream, "\n\t.byte\t");
|
fprintf (stream, "\n\t.byte\t");
|
first = 1;
|
first = 1;
|
l = 0;
|
l = 0;
|
}
|
}
|
}
|
}
|
if (s)
|
if (s)
|
{
|
{
|
if (! first)
|
if (! first)
|
fputc (',', stream);
|
fputc (',', stream);
|
|
|
sbuf[s] = 0;
|
sbuf[s] = 0;
|
fprintf (stream, "\"%s\"", sbuf);
|
fprintf (stream, "\"%s\"", sbuf);
|
s = 0;
|
s = 0;
|
}
|
}
|
fputc ('\n', stream);
|
fputc ('\n', stream);
|
}
|
}
|
|
|
|
|
int
|
int
|
c4x_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
|
c4x_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
|
{
|
{
|
switch (mode)
|
switch (mode)
|
{
|
{
|
#if Pmode != QImode
|
#if Pmode != QImode
|
case Pmode: /* Pointer (24/32 bits). */
|
case Pmode: /* Pointer (24/32 bits). */
|
#endif
|
#endif
|
case QImode: /* Integer (32 bits). */
|
case QImode: /* Integer (32 bits). */
|
return IS_INT_REGNO (regno);
|
return IS_INT_REGNO (regno);
|
|
|
case QFmode: /* Float, Double (32 bits). */
|
case QFmode: /* Float, Double (32 bits). */
|
case HFmode: /* Long Double (40 bits). */
|
case HFmode: /* Long Double (40 bits). */
|
return IS_EXT_REGNO (regno);
|
return IS_EXT_REGNO (regno);
|
|
|
case CCmode: /* Condition Codes. */
|
case CCmode: /* Condition Codes. */
|
case CC_NOOVmode: /* Condition Codes. */
|
case CC_NOOVmode: /* Condition Codes. */
|
return IS_ST_REGNO (regno);
|
return IS_ST_REGNO (regno);
|
|
|
case HImode: /* Long Long (64 bits). */
|
case HImode: /* Long Long (64 bits). */
|
/* We need two registers to store long longs. Note that
|
/* We need two registers to store long longs. Note that
|
it is much easier to constrain the first register
|
it is much easier to constrain the first register
|
to start on an even boundary. */
|
to start on an even boundary. */
|
return IS_INT_REGNO (regno)
|
return IS_INT_REGNO (regno)
|
&& IS_INT_REGNO (regno + 1)
|
&& IS_INT_REGNO (regno + 1)
|
&& (regno & 1) == 0;
|
&& (regno & 1) == 0;
|
|
|
default:
|
default:
|
return 0; /* We don't support these modes. */
|
return 0; /* We don't support these modes. */
|
}
|
}
|
|
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* Return nonzero if REGNO1 can be renamed to REGNO2. */
|
/* Return nonzero if REGNO1 can be renamed to REGNO2. */
|
int
|
int
|
c4x_hard_regno_rename_ok (unsigned int regno1, unsigned int regno2)
|
c4x_hard_regno_rename_ok (unsigned int regno1, unsigned int regno2)
|
{
|
{
|
/* We cannot copy call saved registers from mode QI into QF or from
|
/* We cannot copy call saved registers from mode QI into QF or from
|
mode QF into QI. */
|
mode QF into QI. */
|
if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
|
if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
|
return 0;
|
return 0;
|
if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
|
if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
|
return 0;
|
return 0;
|
/* We cannot copy from an extended (40 bit) register to a standard
|
/* We cannot copy from an extended (40 bit) register to a standard
|
(32 bit) register because we only set the condition codes for
|
(32 bit) register because we only set the condition codes for
|
extended registers. */
|
extended registers. */
|
if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
|
if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
|
return 0;
|
return 0;
|
if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
|
if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
|
return 0;
|
return 0;
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* The TI C3x C compiler register argument runtime model uses 6 registers,
|
/* The TI C3x C compiler register argument runtime model uses 6 registers,
|
AR2, R2, R3, RC, RS, RE.
|
AR2, R2, R3, RC, RS, RE.
|
|
|
The first two floating point arguments (float, double, long double)
|
The first two floating point arguments (float, double, long double)
|
that are found scanning from left to right are assigned to R2 and R3.
|
that are found scanning from left to right are assigned to R2 and R3.
|
|
|
The remaining integer (char, short, int, long) or pointer arguments
|
The remaining integer (char, short, int, long) or pointer arguments
|
are assigned to the remaining registers in the order AR2, R2, R3,
|
are assigned to the remaining registers in the order AR2, R2, R3,
|
RC, RS, RE when scanning left to right, except for the last named
|
RC, RS, RE when scanning left to right, except for the last named
|
argument prior to an ellipsis denoting variable number of
|
argument prior to an ellipsis denoting variable number of
|
arguments. We don't have to worry about the latter condition since
|
arguments. We don't have to worry about the latter condition since
|
function.c treats the last named argument as anonymous (unnamed).
|
function.c treats the last named argument as anonymous (unnamed).
|
|
|
All arguments that cannot be passed in registers are pushed onto
|
All arguments that cannot be passed in registers are pushed onto
|
the stack in reverse order (right to left). GCC handles that for us.
|
the stack in reverse order (right to left). GCC handles that for us.
|
|
|
c4x_init_cumulative_args() is called at the start, so we can parse
|
c4x_init_cumulative_args() is called at the start, so we can parse
|
the args to see how many floating point arguments and how many
|
the args to see how many floating point arguments and how many
|
integer (or pointer) arguments there are. c4x_function_arg() is
|
integer (or pointer) arguments there are. c4x_function_arg() is
|
then called (sometimes repeatedly) for each argument (parsed left
|
then called (sometimes repeatedly) for each argument (parsed left
|
to right) to obtain the register to pass the argument in, or zero
|
to right) to obtain the register to pass the argument in, or zero
|
if the argument is to be passed on the stack. Once the compiler is
|
if the argument is to be passed on the stack. Once the compiler is
|
happy, c4x_function_arg_advance() is called.
|
happy, c4x_function_arg_advance() is called.
|
|
|
Don't use R0 to pass arguments in, we use 0 to indicate a stack
|
Don't use R0 to pass arguments in, we use 0 to indicate a stack
|
argument. */
|
argument. */
|
|
|
static const int c4x_int_reglist[3][6] =
|
static const int c4x_int_reglist[3][6] =
|
{
|
{
|
{AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
|
{AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
|
{AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
|
{AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
|
{AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
|
{AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
|
};
|
};
|
|
|
static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
|
static const int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
|
|
|
|
|
/* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
|
/* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
|
function whose data type is FNTYPE.
|
function whose data type is FNTYPE.
|
For a library call, FNTYPE is 0. */
|
For a library call, FNTYPE is 0. */
|
|
|
void
|
void
|
c4x_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname)
|
c4x_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname)
|
{
|
{
|
tree param, next_param;
|
tree param, next_param;
|
|
|
cum->floats = cum->ints = 0;
|
cum->floats = cum->ints = 0;
|
cum->init = 0;
|
cum->init = 0;
|
cum->var = 0;
|
cum->var = 0;
|
cum->args = 0;
|
cum->args = 0;
|
|
|
if (TARGET_DEBUG)
|
if (TARGET_DEBUG)
|
{
|
{
|
fprintf (stderr, "\nc4x_init_cumulative_args (");
|
fprintf (stderr, "\nc4x_init_cumulative_args (");
|
if (fntype)
|
if (fntype)
|
{
|
{
|
tree ret_type = TREE_TYPE (fntype);
|
tree ret_type = TREE_TYPE (fntype);
|
|
|
fprintf (stderr, "fntype code = %s, ret code = %s",
|
fprintf (stderr, "fntype code = %s, ret code = %s",
|
tree_code_name[(int) TREE_CODE (fntype)],
|
tree_code_name[(int) TREE_CODE (fntype)],
|
tree_code_name[(int) TREE_CODE (ret_type)]);
|
tree_code_name[(int) TREE_CODE (ret_type)]);
|
}
|
}
|
else
|
else
|
fprintf (stderr, "no fntype");
|
fprintf (stderr, "no fntype");
|
|
|
if (libname)
|
if (libname)
|
fprintf (stderr, ", libname = %s", XSTR (libname, 0));
|
fprintf (stderr, ", libname = %s", XSTR (libname, 0));
|
}
|
}
|
|
|
cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
|
cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
|
|
|
for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
|
for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
|
param; param = next_param)
|
param; param = next_param)
|
{
|
{
|
tree type;
|
tree type;
|
|
|
next_param = TREE_CHAIN (param);
|
next_param = TREE_CHAIN (param);
|
|
|
type = TREE_VALUE (param);
|
type = TREE_VALUE (param);
|
if (type && type != void_type_node)
|
if (type && type != void_type_node)
|
{
|
{
|
enum machine_mode mode;
|
enum machine_mode mode;
|
|
|
/* If the last arg doesn't have void type then we have
|
/* If the last arg doesn't have void type then we have
|
variable arguments. */
|
variable arguments. */
|
if (! next_param)
|
if (! next_param)
|
cum->var = 1;
|
cum->var = 1;
|
|
|
if ((mode = TYPE_MODE (type)))
|
if ((mode = TYPE_MODE (type)))
|
{
|
{
|
if (! targetm.calls.must_pass_in_stack (mode, type))
|
if (! targetm.calls.must_pass_in_stack (mode, type))
|
{
|
{
|
/* Look for float, double, or long double argument. */
|
/* Look for float, double, or long double argument. */
|
if (mode == QFmode || mode == HFmode)
|
if (mode == QFmode || mode == HFmode)
|
cum->floats++;
|
cum->floats++;
|
/* Look for integer, enumeral, boolean, char, or pointer
|
/* Look for integer, enumeral, boolean, char, or pointer
|
argument. */
|
argument. */
|
else if (mode == QImode || mode == Pmode)
|
else if (mode == QImode || mode == Pmode)
|
cum->ints++;
|
cum->ints++;
|
}
|
}
|
}
|
}
|
cum->args++;
|
cum->args++;
|
}
|
}
|
}
|
}
|
|
|
if (TARGET_DEBUG)
|
if (TARGET_DEBUG)
|
fprintf (stderr, "%s%s, args = %d)\n",
|
fprintf (stderr, "%s%s, args = %d)\n",
|
cum->prototype ? ", prototype" : "",
|
cum->prototype ? ", prototype" : "",
|
cum->var ? ", variable args" : "",
|
cum->var ? ", variable args" : "",
|
cum->args);
|
cum->args);
|
}
|
}
|
|
|
|
|
/* Update the data in CUM to advance over an argument
|
/* Update the data in CUM to advance over an argument
|
of mode MODE and data type TYPE.
|
of mode MODE and data type TYPE.
|
(TYPE is null for libcalls where that information may not be available.) */
|
(TYPE is null for libcalls where that information may not be available.) */
|
|
|
void
|
void
|
c4x_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
|
c4x_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
|
tree type, int named)
|
tree type, int named)
|
{
|
{
|
if (TARGET_DEBUG)
|
if (TARGET_DEBUG)
|
fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
|
fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
|
GET_MODE_NAME (mode), named);
|
GET_MODE_NAME (mode), named);
|
if (! TARGET_MEMPARM
|
if (! TARGET_MEMPARM
|
&& named
|
&& named
|
&& type
|
&& type
|
&& ! targetm.calls.must_pass_in_stack (mode, type))
|
&& ! targetm.calls.must_pass_in_stack (mode, type))
|
{
|
{
|
/* Look for float, double, or long double argument. */
|
/* Look for float, double, or long double argument. */
|
if (mode == QFmode || mode == HFmode)
|
if (mode == QFmode || mode == HFmode)
|
cum->floats++;
|
cum->floats++;
|
/* Look for integer, enumeral, boolean, char, or pointer argument. */
|
/* Look for integer, enumeral, boolean, char, or pointer argument. */
|
else if (mode == QImode || mode == Pmode)
|
else if (mode == QImode || mode == Pmode)
|
cum->ints++;
|
cum->ints++;
|
}
|
}
|
else if (! TARGET_MEMPARM && ! type)
|
else if (! TARGET_MEMPARM && ! type)
|
{
|
{
|
/* Handle libcall arguments. */
|
/* Handle libcall arguments. */
|
if (mode == QFmode || mode == HFmode)
|
if (mode == QFmode || mode == HFmode)
|
cum->floats++;
|
cum->floats++;
|
else if (mode == QImode || mode == Pmode)
|
else if (mode == QImode || mode == Pmode)
|
cum->ints++;
|
cum->ints++;
|
}
|
}
|
return;
|
return;
|
}
|
}
|
|
|
|
|
/* Define where to put the arguments to a function. Value is zero to
|
/* Define where to put the arguments to a function. Value is zero to
|
push the argument on the stack, or a hard register in which to
|
push the argument on the stack, or a hard register in which to
|
store the argument.
|
store the argument.
|
|
|
MODE is the argument's machine mode.
|
MODE is the argument's machine mode.
|
TYPE is the data type of the argument (as a tree).
|
TYPE is the data type of the argument (as a tree).
|
This is null for libcalls where that information may
|
This is null for libcalls where that information may
|
not be available.
|
not be available.
|
CUM is a variable of type CUMULATIVE_ARGS which gives info about
|
CUM is a variable of type CUMULATIVE_ARGS which gives info about
|
the preceding args and about the function being called.
|
the preceding args and about the function being called.
|
NAMED is nonzero if this argument is a named parameter
|
NAMED is nonzero if this argument is a named parameter
|
(otherwise it is an extra parameter matching an ellipsis). */
|
(otherwise it is an extra parameter matching an ellipsis). */
|
|
|
struct rtx_def *
|
struct rtx_def *
|
c4x_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
|
c4x_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
|
tree type, int named)
|
tree type, int named)
|
{
|
{
|
int reg = 0; /* Default to passing argument on stack. */
|
int reg = 0; /* Default to passing argument on stack. */
|
|
|
if (! cum->init)
|
if (! cum->init)
|
{
|
{
|
/* We can handle at most 2 floats in R2, R3. */
|
/* We can handle at most 2 floats in R2, R3. */
|
cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
|
cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
|
|
|
/* We can handle at most 6 integers minus number of floats passed
|
/* We can handle at most 6 integers minus number of floats passed
|
in registers. */
|
in registers. */
|
cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
|
cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
|
6 - cum->maxfloats : cum->ints;
|
6 - cum->maxfloats : cum->ints;
|
|
|
/* If there is no prototype, assume all the arguments are integers. */
|
/* If there is no prototype, assume all the arguments are integers. */
|
if (! cum->prototype)
|
if (! cum->prototype)
|
cum->maxints = 6;
|
cum->maxints = 6;
|
|
|
cum->ints = cum->floats = 0;
|
cum->ints = cum->floats = 0;
|
cum->init = 1;
|
cum->init = 1;
|
}
|
}
|
|
|
/* This marks the last argument. We don't need to pass this through
|
/* This marks the last argument. We don't need to pass this through
|
to the call insn. */
|
to the call insn. */
|
if (type == void_type_node)
|
if (type == void_type_node)
|
return 0;
|
return 0;
|
|
|
if (! TARGET_MEMPARM
|
if (! TARGET_MEMPARM
|
&& named
|
&& named
|
&& type
|
&& type
|
&& ! targetm.calls.must_pass_in_stack (mode, type))
|
&& ! targetm.calls.must_pass_in_stack (mode, type))
|
{
|
{
|
/* Look for float, double, or long double argument. */
|
/* Look for float, double, or long double argument. */
|
if (mode == QFmode || mode == HFmode)
|
if (mode == QFmode || mode == HFmode)
|
{
|
{
|
if (cum->floats < cum->maxfloats)
|
if (cum->floats < cum->maxfloats)
|
reg = c4x_fp_reglist[cum->floats];
|
reg = c4x_fp_reglist[cum->floats];
|
}
|
}
|
/* Look for integer, enumeral, boolean, char, or pointer argument. */
|
/* Look for integer, enumeral, boolean, char, or pointer argument. */
|
else if (mode == QImode || mode == Pmode)
|
else if (mode == QImode || mode == Pmode)
|
{
|
{
|
if (cum->ints < cum->maxints)
|
if (cum->ints < cum->maxints)
|
reg = c4x_int_reglist[cum->maxfloats][cum->ints];
|
reg = c4x_int_reglist[cum->maxfloats][cum->ints];
|
}
|
}
|
}
|
}
|
else if (! TARGET_MEMPARM && ! type)
|
else if (! TARGET_MEMPARM && ! type)
|
{
|
{
|
/* We could use a different argument calling model for libcalls,
|
/* We could use a different argument calling model for libcalls,
|
since we're only calling functions in libgcc. Thus we could
|
since we're only calling functions in libgcc. Thus we could
|
pass arguments for long longs in registers rather than on the
|
pass arguments for long longs in registers rather than on the
|
stack. In the meantime, use the odd TI format. We make the
|
stack. In the meantime, use the odd TI format. We make the
|
assumption that we won't have more than two floating point
|
assumption that we won't have more than two floating point
|
args, six integer args, and that all the arguments are of the
|
args, six integer args, and that all the arguments are of the
|
same mode. */
|
same mode. */
|
if (mode == QFmode || mode == HFmode)
|
if (mode == QFmode || mode == HFmode)
|
reg = c4x_fp_reglist[cum->floats];
|
reg = c4x_fp_reglist[cum->floats];
|
else if (mode == QImode || mode == Pmode)
|
else if (mode == QImode || mode == Pmode)
|
reg = c4x_int_reglist[0][cum->ints];
|
reg = c4x_int_reglist[0][cum->ints];
|
}
|
}
|
|
|
if (TARGET_DEBUG)
|
if (TARGET_DEBUG)
|
{
|
{
|
fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
|
fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
|
GET_MODE_NAME (mode), named);
|
GET_MODE_NAME (mode), named);
|
if (reg)
|
if (reg)
|
fprintf (stderr, ", reg=%s", reg_names[reg]);
|
fprintf (stderr, ", reg=%s", reg_names[reg]);
|
else
|
else
|
fprintf (stderr, ", stack");
|
fprintf (stderr, ", stack");
|
fprintf (stderr, ")\n");
|
fprintf (stderr, ")\n");
|
}
|
}
|
if (reg)
|
if (reg)
|
return gen_rtx_REG (mode, reg);
|
return gen_rtx_REG (mode, reg);
|
else
|
else
|
return NULL_RTX;
|
return NULL_RTX;
|
}
|
}
|
|
|
/* C[34]x arguments grow in weird ways (downwards) that the standard
|
/* C[34]x arguments grow in weird ways (downwards) that the standard
|
varargs stuff can't handle.. */
|
varargs stuff can't handle.. */
|
|
|
static tree
|
static tree
|
c4x_gimplify_va_arg_expr (tree valist, tree type,
|
c4x_gimplify_va_arg_expr (tree valist, tree type,
|
tree *pre_p ATTRIBUTE_UNUSED,
|
tree *pre_p ATTRIBUTE_UNUSED,
|
tree *post_p ATTRIBUTE_UNUSED)
|
tree *post_p ATTRIBUTE_UNUSED)
|
{
|
{
|
tree t;
|
tree t;
|
bool indirect;
|
bool indirect;
|
|
|
indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
|
indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
|
if (indirect)
|
if (indirect)
|
type = build_pointer_type (type);
|
type = build_pointer_type (type);
|
|
|
t = build2 (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
|
t = build2 (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
|
build_int_cst (NULL_TREE, int_size_in_bytes (type)));
|
build_int_cst (NULL_TREE, int_size_in_bytes (type)));
|
t = fold_convert (build_pointer_type (type), t);
|
t = fold_convert (build_pointer_type (type), t);
|
t = build_va_arg_indirect_ref (t);
|
t = build_va_arg_indirect_ref (t);
|
|
|
if (indirect)
|
if (indirect)
|
t = build_va_arg_indirect_ref (t);
|
t = build_va_arg_indirect_ref (t);
|
|
|
return t;
|
return t;
|
}
|
}
|
|
|
|
|
static int
|
static int
|
c4x_isr_reg_used_p (unsigned int regno)
|
c4x_isr_reg_used_p (unsigned int regno)
|
{
|
{
|
/* Don't save/restore FP or ST, we handle them separately. */
|
/* Don't save/restore FP or ST, we handle them separately. */
|
if (regno == FRAME_POINTER_REGNUM
|
if (regno == FRAME_POINTER_REGNUM
|
|| IS_ST_REGNO (regno))
|
|| IS_ST_REGNO (regno))
|
return 0;
|
return 0;
|
|
|
/* We could be a little smarter abut saving/restoring DP.
|
/* We could be a little smarter abut saving/restoring DP.
|
We'll only save if for the big memory model or if
|
We'll only save if for the big memory model or if
|
we're paranoid. ;-) */
|
we're paranoid. ;-) */
|
if (IS_DP_REGNO (regno))
|
if (IS_DP_REGNO (regno))
|
return ! TARGET_SMALL || TARGET_PARANOID;
|
return ! TARGET_SMALL || TARGET_PARANOID;
|
|
|
/* Only save/restore regs in leaf function that are used. */
|
/* Only save/restore regs in leaf function that are used. */
|
if (c4x_leaf_function)
|
if (c4x_leaf_function)
|
return regs_ever_live[regno] && fixed_regs[regno] == 0;
|
return regs_ever_live[regno] && fixed_regs[regno] == 0;
|
|
|
/* Only save/restore regs that are used by the ISR and regs
|
/* Only save/restore regs that are used by the ISR and regs
|
that are likely to be used by functions the ISR calls
|
that are likely to be used by functions the ISR calls
|
if they are not fixed. */
|
if they are not fixed. */
|
return IS_EXT_REGNO (regno)
|
return IS_EXT_REGNO (regno)
|
|| ((regs_ever_live[regno] || call_used_regs[regno])
|
|| ((regs_ever_live[regno] || call_used_regs[regno])
|
&& fixed_regs[regno] == 0);
|
&& fixed_regs[regno] == 0);
|
}
|
}
|
|
|
|
|
static int
|
static int
|
c4x_leaf_function_p (void)
|
c4x_leaf_function_p (void)
|
{
|
{
|
/* A leaf function makes no calls, so we only need
|
/* A leaf function makes no calls, so we only need
|
to save/restore the registers we actually use.
|
to save/restore the registers we actually use.
|
For the global variable leaf_function to be set, we need
|
For the global variable leaf_function to be set, we need
|
to define LEAF_REGISTERS and all that it entails.
|
to define LEAF_REGISTERS and all that it entails.
|
Let's check ourselves.... */
|
Let's check ourselves.... */
|
|
|
if (lookup_attribute ("leaf_pretend",
|
if (lookup_attribute ("leaf_pretend",
|
TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
|
TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
|
return 1;
|
return 1;
|
|
|
/* Use the leaf_pretend attribute at your own risk. This is a hack
|
/* Use the leaf_pretend attribute at your own risk. This is a hack
|
to speed up ISRs that call a function infrequently where the
|
to speed up ISRs that call a function infrequently where the
|
overhead of saving and restoring the additional registers is not
|
overhead of saving and restoring the additional registers is not
|
warranted. You must save and restore the additional registers
|
warranted. You must save and restore the additional registers
|
required by the called function. Caveat emptor. Here's enough
|
required by the called function. Caveat emptor. Here's enough
|
rope... */
|
rope... */
|
|
|
if (leaf_function_p ())
|
if (leaf_function_p ())
|
return 1;
|
return 1;
|
|
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
static int
|
static int
|
c4x_naked_function_p (void)
|
c4x_naked_function_p (void)
|
{
|
{
|
tree type;
|
tree type;
|
|
|
type = TREE_TYPE (current_function_decl);
|
type = TREE_TYPE (current_function_decl);
|
return lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL;
|
return lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL;
|
}
|
}
|
|
|
|
|
int
|
int
|
c4x_interrupt_function_p (void)
|
c4x_interrupt_function_p (void)
|
{
|
{
|
const char *cfun_name;
|
const char *cfun_name;
|
if (lookup_attribute ("interrupt",
|
if (lookup_attribute ("interrupt",
|
TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
|
TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
|
return 1;
|
return 1;
|
|
|
/* Look for TI style c_intnn. */
|
/* Look for TI style c_intnn. */
|
cfun_name = current_function_name ();
|
cfun_name = current_function_name ();
|
return cfun_name[0] == 'c'
|
return cfun_name[0] == 'c'
|
&& cfun_name[1] == '_'
|
&& cfun_name[1] == '_'
|
&& cfun_name[2] == 'i'
|
&& cfun_name[2] == 'i'
|
&& cfun_name[3] == 'n'
|
&& cfun_name[3] == 'n'
|
&& cfun_name[4] == 't'
|
&& cfun_name[4] == 't'
|
&& ISDIGIT (cfun_name[5])
|
&& ISDIGIT (cfun_name[5])
|
&& ISDIGIT (cfun_name[6]);
|
&& ISDIGIT (cfun_name[6]);
|
}
|
}
|
|
|
void
|
void
|
c4x_expand_prologue (void)
|
c4x_expand_prologue (void)
|
{
|
{
|
unsigned int regno;
|
unsigned int regno;
|
int size = get_frame_size ();
|
int size = get_frame_size ();
|
rtx insn;
|
rtx insn;
|
|
|
/* In functions where ar3 is not used but frame pointers are still
|
/* In functions where ar3 is not used but frame pointers are still
|
specified, frame pointers are not adjusted (if >= -O2) and this
|
specified, frame pointers are not adjusted (if >= -O2) and this
|
is used so it won't needlessly push the frame pointer. */
|
is used so it won't needlessly push the frame pointer. */
|
int dont_push_ar3;
|
int dont_push_ar3;
|
|
|
/* For __naked__ function don't build a prologue. */
|
/* For __naked__ function don't build a prologue. */
|
if (c4x_naked_function_p ())
|
if (c4x_naked_function_p ())
|
{
|
{
|
return;
|
return;
|
}
|
}
|
|
|
/* For __interrupt__ function build specific prologue. */
|
/* For __interrupt__ function build specific prologue. */
|
if (c4x_interrupt_function_p ())
|
if (c4x_interrupt_function_p ())
|
{
|
{
|
c4x_leaf_function = c4x_leaf_function_p ();
|
c4x_leaf_function = c4x_leaf_function_p ();
|
|
|
insn = emit_insn (gen_push_st ());
|
insn = emit_insn (gen_push_st ());
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
if (size)
|
if (size)
|
{
|
{
|
insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
|
insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
|
insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
|
gen_rtx_REG (QImode, SP_REGNO)));
|
gen_rtx_REG (QImode, SP_REGNO)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
/* We require that an ISR uses fewer than 32768 words of
|
/* We require that an ISR uses fewer than 32768 words of
|
local variables, otherwise we have to go to lots of
|
local variables, otherwise we have to go to lots of
|
effort to save a register, load it with the desired size,
|
effort to save a register, load it with the desired size,
|
adjust the stack pointer, and then restore the modified
|
adjust the stack pointer, and then restore the modified
|
register. Frankly, I think it is a poor ISR that
|
register. Frankly, I think it is a poor ISR that
|
requires more than 32767 words of local temporary
|
requires more than 32767 words of local temporary
|
storage! */
|
storage! */
|
if (size > 32767)
|
if (size > 32767)
|
error ("ISR %s requires %d words of local vars, max is 32767",
|
error ("ISR %s requires %d words of local vars, max is 32767",
|
current_function_name (), size);
|
current_function_name (), size);
|
|
|
insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
|
insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
|
gen_rtx_REG (QImode, SP_REGNO),
|
gen_rtx_REG (QImode, SP_REGNO),
|
GEN_INT (size)));
|
GEN_INT (size)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
|
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
|
{
|
{
|
if (c4x_isr_reg_used_p (regno))
|
if (c4x_isr_reg_used_p (regno))
|
{
|
{
|
if (regno == DP_REGNO)
|
if (regno == DP_REGNO)
|
{
|
{
|
insn = emit_insn (gen_push_dp ());
|
insn = emit_insn (gen_push_dp ());
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
else
|
else
|
{
|
{
|
insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
|
insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
if (IS_EXT_REGNO (regno))
|
if (IS_EXT_REGNO (regno))
|
{
|
{
|
insn = emit_insn (gen_pushqf
|
insn = emit_insn (gen_pushqf
|
(gen_rtx_REG (QFmode, regno)));
|
(gen_rtx_REG (QFmode, regno)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
/* We need to clear the repeat mode flag if the ISR is
|
/* We need to clear the repeat mode flag if the ISR is
|
going to use a RPTB instruction or uses the RC, RS, or RE
|
going to use a RPTB instruction or uses the RC, RS, or RE
|
registers. */
|
registers. */
|
if (regs_ever_live[RC_REGNO]
|
if (regs_ever_live[RC_REGNO]
|
|| regs_ever_live[RS_REGNO]
|
|| regs_ever_live[RS_REGNO]
|
|| regs_ever_live[RE_REGNO])
|
|| regs_ever_live[RE_REGNO])
|
{
|
{
|
insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
|
insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
|
|
/* Reload DP reg if we are paranoid about some turkey
|
/* Reload DP reg if we are paranoid about some turkey
|
violating small memory model rules. */
|
violating small memory model rules. */
|
if (TARGET_SMALL && TARGET_PARANOID)
|
if (TARGET_SMALL && TARGET_PARANOID)
|
{
|
{
|
insn = emit_insn (gen_set_ldp_prologue
|
insn = emit_insn (gen_set_ldp_prologue
|
(gen_rtx_REG (QImode, DP_REGNO),
|
(gen_rtx_REG (QImode, DP_REGNO),
|
gen_rtx_SYMBOL_REF (QImode, "data_sec")));
|
gen_rtx_SYMBOL_REF (QImode, "data_sec")));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
}
|
}
|
else
|
else
|
{
|
{
|
if (frame_pointer_needed)
|
if (frame_pointer_needed)
|
{
|
{
|
if ((size != 0)
|
if ((size != 0)
|
|| (current_function_args_size != 0)
|
|| (current_function_args_size != 0)
|
|| (optimize < 2))
|
|| (optimize < 2))
|
{
|
{
|
insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
|
insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
|
insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
|
gen_rtx_REG (QImode, SP_REGNO)));
|
gen_rtx_REG (QImode, SP_REGNO)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
dont_push_ar3 = 1;
|
dont_push_ar3 = 1;
|
}
|
}
|
else
|
else
|
{
|
{
|
/* Since ar3 is not used, we don't need to push it. */
|
/* Since ar3 is not used, we don't need to push it. */
|
dont_push_ar3 = 1;
|
dont_push_ar3 = 1;
|
}
|
}
|
}
|
}
|
else
|
else
|
{
|
{
|
/* If we use ar3, we need to push it. */
|
/* If we use ar3, we need to push it. */
|
dont_push_ar3 = 0;
|
dont_push_ar3 = 0;
|
if ((size != 0) || (current_function_args_size != 0))
|
if ((size != 0) || (current_function_args_size != 0))
|
{
|
{
|
/* If we are omitting the frame pointer, we still have
|
/* If we are omitting the frame pointer, we still have
|
to make space for it so the offsets are correct
|
to make space for it so the offsets are correct
|
unless we don't use anything on the stack at all. */
|
unless we don't use anything on the stack at all. */
|
size += 1;
|
size += 1;
|
}
|
}
|
}
|
}
|
|
|
if (size > 32767)
|
if (size > 32767)
|
{
|
{
|
/* Local vars are too big, it will take multiple operations
|
/* Local vars are too big, it will take multiple operations
|
to increment SP. */
|
to increment SP. */
|
if (TARGET_C3X)
|
if (TARGET_C3X)
|
{
|
{
|
insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
|
insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
|
GEN_INT(size >> 16)));
|
GEN_INT(size >> 16)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
|
insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
|
gen_rtx_REG (QImode, R1_REGNO),
|
gen_rtx_REG (QImode, R1_REGNO),
|
GEN_INT(-16)));
|
GEN_INT(-16)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
else
|
else
|
{
|
{
|
insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
|
insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
|
GEN_INT(size & ~0xffff)));
|
GEN_INT(size & ~0xffff)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
|
insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
|
gen_rtx_REG (QImode, R1_REGNO),
|
gen_rtx_REG (QImode, R1_REGNO),
|
GEN_INT(size & 0xffff)));
|
GEN_INT(size & 0xffff)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
|
insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
|
gen_rtx_REG (QImode, SP_REGNO),
|
gen_rtx_REG (QImode, SP_REGNO),
|
gen_rtx_REG (QImode, R1_REGNO)));
|
gen_rtx_REG (QImode, R1_REGNO)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
else if (size != 0)
|
else if (size != 0)
|
{
|
{
|
/* Local vars take up less than 32767 words, so we can directly
|
/* Local vars take up less than 32767 words, so we can directly
|
add the number. */
|
add the number. */
|
insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
|
insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
|
gen_rtx_REG (QImode, SP_REGNO),
|
gen_rtx_REG (QImode, SP_REGNO),
|
GEN_INT (size)));
|
GEN_INT (size)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
|
|
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
|
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
|
{
|
{
|
if (regs_ever_live[regno] && ! call_used_regs[regno])
|
if (regs_ever_live[regno] && ! call_used_regs[regno])
|
{
|
{
|
if (IS_FLOAT_CALL_SAVED_REGNO (regno))
|
if (IS_FLOAT_CALL_SAVED_REGNO (regno))
|
{
|
{
|
if (TARGET_PRESERVE_FLOAT)
|
if (TARGET_PRESERVE_FLOAT)
|
{
|
{
|
insn = emit_insn (gen_pushqi
|
insn = emit_insn (gen_pushqi
|
(gen_rtx_REG (QImode, regno)));
|
(gen_rtx_REG (QImode, regno)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
|
insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
else if ((! dont_push_ar3) || (regno != AR3_REGNO))
|
else if ((! dont_push_ar3) || (regno != AR3_REGNO))
|
{
|
{
|
insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
|
insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
|
|
void
|
void
|
c4x_expand_epilogue(void)
|
c4x_expand_epilogue(void)
|
{
|
{
|
int regno;
|
int regno;
|
int jump = 0;
|
int jump = 0;
|
int dont_pop_ar3;
|
int dont_pop_ar3;
|
rtx insn;
|
rtx insn;
|
int size = get_frame_size ();
|
int size = get_frame_size ();
|
|
|
/* For __naked__ function build no epilogue. */
|
/* For __naked__ function build no epilogue. */
|
if (c4x_naked_function_p ())
|
if (c4x_naked_function_p ())
|
{
|
{
|
insn = emit_jump_insn (gen_return_from_epilogue ());
|
insn = emit_jump_insn (gen_return_from_epilogue ());
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
return;
|
return;
|
}
|
}
|
|
|
/* For __interrupt__ function build specific epilogue. */
|
/* For __interrupt__ function build specific epilogue. */
|
if (c4x_interrupt_function_p ())
|
if (c4x_interrupt_function_p ())
|
{
|
{
|
for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
|
for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
|
{
|
{
|
if (! c4x_isr_reg_used_p (regno))
|
if (! c4x_isr_reg_used_p (regno))
|
continue;
|
continue;
|
if (regno == DP_REGNO)
|
if (regno == DP_REGNO)
|
{
|
{
|
insn = emit_insn (gen_pop_dp ());
|
insn = emit_insn (gen_pop_dp ());
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
else
|
else
|
{
|
{
|
/* We have to use unspec because the compiler will delete insns
|
/* We have to use unspec because the compiler will delete insns
|
that are not call-saved. */
|
that are not call-saved. */
|
if (IS_EXT_REGNO (regno))
|
if (IS_EXT_REGNO (regno))
|
{
|
{
|
insn = emit_insn (gen_popqf_unspec
|
insn = emit_insn (gen_popqf_unspec
|
(gen_rtx_REG (QFmode, regno)));
|
(gen_rtx_REG (QFmode, regno)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
|
insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
}
|
}
|
if (size)
|
if (size)
|
{
|
{
|
insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
|
insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
|
gen_rtx_REG (QImode, SP_REGNO),
|
gen_rtx_REG (QImode, SP_REGNO),
|
GEN_INT(size)));
|
GEN_INT(size)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
insn = emit_insn (gen_popqi
|
insn = emit_insn (gen_popqi
|
(gen_rtx_REG (QImode, AR3_REGNO)));
|
(gen_rtx_REG (QImode, AR3_REGNO)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
insn = emit_insn (gen_pop_st ());
|
insn = emit_insn (gen_pop_st ());
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
|
insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
else
|
else
|
{
|
{
|
if (frame_pointer_needed)
|
if (frame_pointer_needed)
|
{
|
{
|
if ((size != 0)
|
if ((size != 0)
|
|| (current_function_args_size != 0)
|
|| (current_function_args_size != 0)
|
|| (optimize < 2))
|
|| (optimize < 2))
|
{
|
{
|
insn = emit_insn
|
insn = emit_insn
|
(gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
|
(gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
|
gen_rtx_MEM (QImode,
|
gen_rtx_MEM (QImode,
|
gen_rtx_PLUS
|
gen_rtx_PLUS
|
(QImode, gen_rtx_REG (QImode,
|
(QImode, gen_rtx_REG (QImode,
|
AR3_REGNO),
|
AR3_REGNO),
|
constm1_rtx))));
|
constm1_rtx))));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
|
|
/* We already have the return value and the fp,
|
/* We already have the return value and the fp,
|
so we need to add those to the stack. */
|
so we need to add those to the stack. */
|
size += 2;
|
size += 2;
|
jump = 1;
|
jump = 1;
|
dont_pop_ar3 = 1;
|
dont_pop_ar3 = 1;
|
}
|
}
|
else
|
else
|
{
|
{
|
/* Since ar3 is not used for anything, we don't need to
|
/* Since ar3 is not used for anything, we don't need to
|
pop it. */
|
pop it. */
|
dont_pop_ar3 = 1;
|
dont_pop_ar3 = 1;
|
}
|
}
|
}
|
}
|
else
|
else
|
{
|
{
|
dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
|
dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
|
if (size || current_function_args_size)
|
if (size || current_function_args_size)
|
{
|
{
|
/* If we are omitting the frame pointer, we still have
|
/* If we are omitting the frame pointer, we still have
|
to make space for it so the offsets are correct
|
to make space for it so the offsets are correct
|
unless we don't use anything on the stack at all. */
|
unless we don't use anything on the stack at all. */
|
size += 1;
|
size += 1;
|
}
|
}
|
}
|
}
|
|
|
/* Now restore the saved registers, putting in the delayed branch
|
/* Now restore the saved registers, putting in the delayed branch
|
where required. */
|
where required. */
|
for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
|
for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
|
{
|
{
|
if (regs_ever_live[regno] && ! call_used_regs[regno])
|
if (regs_ever_live[regno] && ! call_used_regs[regno])
|
{
|
{
|
if (regno == AR3_REGNO && dont_pop_ar3)
|
if (regno == AR3_REGNO && dont_pop_ar3)
|
continue;
|
continue;
|
|
|
if (IS_FLOAT_CALL_SAVED_REGNO (regno))
|
if (IS_FLOAT_CALL_SAVED_REGNO (regno))
|
{
|
{
|
insn = emit_insn (gen_popqf_unspec
|
insn = emit_insn (gen_popqf_unspec
|
(gen_rtx_REG (QFmode, regno)));
|
(gen_rtx_REG (QFmode, regno)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
if (TARGET_PRESERVE_FLOAT)
|
if (TARGET_PRESERVE_FLOAT)
|
{
|
{
|
insn = emit_insn (gen_popqi_unspec
|
insn = emit_insn (gen_popqi_unspec
|
(gen_rtx_REG (QImode, regno)));
|
(gen_rtx_REG (QImode, regno)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
}
|
}
|
else
|
else
|
{
|
{
|
insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
|
insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
if (frame_pointer_needed)
|
if (frame_pointer_needed)
|
{
|
{
|
if ((size != 0)
|
if ((size != 0)
|
|| (current_function_args_size != 0)
|
|| (current_function_args_size != 0)
|
|| (optimize < 2))
|
|| (optimize < 2))
|
{
|
{
|
/* Restore the old FP. */
|
/* Restore the old FP. */
|
insn = emit_insn
|
insn = emit_insn
|
(gen_movqi
|
(gen_movqi
|
(gen_rtx_REG (QImode, AR3_REGNO),
|
(gen_rtx_REG (QImode, AR3_REGNO),
|
gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
|
gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
|
|
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
}
|
}
|
|
|
if (size > 32767)
|
if (size > 32767)
|
{
|
{
|
/* Local vars are too big, it will take multiple operations
|
/* Local vars are too big, it will take multiple operations
|
to decrement SP. */
|
to decrement SP. */
|
if (TARGET_C3X)
|
if (TARGET_C3X)
|
{
|
{
|
insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
|
insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
|
GEN_INT(size >> 16)));
|
GEN_INT(size >> 16)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
|
insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
|
gen_rtx_REG (QImode, R3_REGNO),
|
gen_rtx_REG (QImode, R3_REGNO),
|
GEN_INT(-16)));
|
GEN_INT(-16)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
else
|
else
|
{
|
{
|
insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
|
insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
|
GEN_INT(size & ~0xffff)));
|
GEN_INT(size & ~0xffff)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
|
insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
|
gen_rtx_REG (QImode, R3_REGNO),
|
gen_rtx_REG (QImode, R3_REGNO),
|
GEN_INT(size & 0xffff)));
|
GEN_INT(size & 0xffff)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
|
insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
|
gen_rtx_REG (QImode, SP_REGNO),
|
gen_rtx_REG (QImode, SP_REGNO),
|
gen_rtx_REG (QImode, R3_REGNO)));
|
gen_rtx_REG (QImode, R3_REGNO)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
else if (size != 0)
|
else if (size != 0)
|
{
|
{
|
/* Local vars take up less than 32768 words, so we can directly
|
/* Local vars take up less than 32768 words, so we can directly
|
subtract the number. */
|
subtract the number. */
|
insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
|
insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
|
gen_rtx_REG (QImode, SP_REGNO),
|
gen_rtx_REG (QImode, SP_REGNO),
|
GEN_INT(size)));
|
GEN_INT(size)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
|
|
if (jump)
|
if (jump)
|
{
|
{
|
insn = emit_jump_insn (gen_return_indirect_internal
|
insn = emit_jump_insn (gen_return_indirect_internal
|
(gen_rtx_REG (QImode, R2_REGNO)));
|
(gen_rtx_REG (QImode, R2_REGNO)));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
else
|
else
|
{
|
{
|
insn = emit_jump_insn (gen_return_from_epilogue ());
|
insn = emit_jump_insn (gen_return_from_epilogue ());
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
|
|
int
|
int
|
c4x_null_epilogue_p (void)
|
c4x_null_epilogue_p (void)
|
{
|
{
|
int regno;
|
int regno;
|
|
|
if (reload_completed
|
if (reload_completed
|
&& ! c4x_naked_function_p ()
|
&& ! c4x_naked_function_p ()
|
&& ! c4x_interrupt_function_p ()
|
&& ! c4x_interrupt_function_p ()
|
&& ! current_function_calls_alloca
|
&& ! current_function_calls_alloca
|
&& ! current_function_args_size
|
&& ! current_function_args_size
|
&& ! (optimize < 2)
|
&& ! (optimize < 2)
|
&& ! get_frame_size ())
|
&& ! get_frame_size ())
|
{
|
{
|
for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
|
for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
|
if (regs_ever_live[regno] && ! call_used_regs[regno]
|
if (regs_ever_live[regno] && ! call_used_regs[regno]
|
&& (regno != AR3_REGNO))
|
&& (regno != AR3_REGNO))
|
return 1;
|
return 1;
|
return 0;
|
return 0;
|
}
|
}
|
return 1;
|
return 1;
|
}
|
}
|
|
|
|
|
int
|
int
|
c4x_emit_move_sequence (rtx *operands, enum machine_mode mode)
|
c4x_emit_move_sequence (rtx *operands, enum machine_mode mode)
|
{
|
{
|
rtx op0 = operands[0];
|
rtx op0 = operands[0];
|
rtx op1 = operands[1];
|
rtx op1 = operands[1];
|
|
|
if (! reload_in_progress
|
if (! reload_in_progress
|
&& ! REG_P (op0)
|
&& ! REG_P (op0)
|
&& ! REG_P (op1)
|
&& ! REG_P (op1)
|
&& ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
|
&& ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
|
op1 = force_reg (mode, op1);
|
op1 = force_reg (mode, op1);
|
|
|
if (GET_CODE (op1) == LO_SUM
|
if (GET_CODE (op1) == LO_SUM
|
&& GET_MODE (op1) == Pmode
|
&& GET_MODE (op1) == Pmode
|
&& dp_reg_operand (XEXP (op1, 0), mode))
|
&& dp_reg_operand (XEXP (op1, 0), mode))
|
{
|
{
|
/* expand_increment will sometimes create a LO_SUM immediate
|
/* expand_increment will sometimes create a LO_SUM immediate
|
address. Undo this silliness. */
|
address. Undo this silliness. */
|
op1 = XEXP (op1, 1);
|
op1 = XEXP (op1, 1);
|
}
|
}
|
|
|
if (symbolic_address_operand (op1, mode))
|
if (symbolic_address_operand (op1, mode))
|
{
|
{
|
if (TARGET_LOAD_ADDRESS)
|
if (TARGET_LOAD_ADDRESS)
|
{
|
{
|
/* Alias analysis seems to do a better job if we force
|
/* Alias analysis seems to do a better job if we force
|
constant addresses to memory after reload. */
|
constant addresses to memory after reload. */
|
emit_insn (gen_load_immed_address (op0, op1));
|
emit_insn (gen_load_immed_address (op0, op1));
|
return 1;
|
return 1;
|
}
|
}
|
else
|
else
|
{
|
{
|
/* Stick symbol or label address into the constant pool. */
|
/* Stick symbol or label address into the constant pool. */
|
op1 = force_const_mem (Pmode, op1);
|
op1 = force_const_mem (Pmode, op1);
|
}
|
}
|
}
|
}
|
else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
|
else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
|
{
|
{
|
/* We could be a lot smarter about loading some of these
|
/* We could be a lot smarter about loading some of these
|
constants... */
|
constants... */
|
op1 = force_const_mem (mode, op1);
|
op1 = force_const_mem (mode, op1);
|
}
|
}
|
|
|
/* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
|
/* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
|
and emit associated (HIGH (SYMREF)) if large memory model.
|
and emit associated (HIGH (SYMREF)) if large memory model.
|
c4x_legitimize_address could be used to do this,
|
c4x_legitimize_address could be used to do this,
|
perhaps by calling validize_address. */
|
perhaps by calling validize_address. */
|
if (TARGET_EXPOSE_LDP
|
if (TARGET_EXPOSE_LDP
|
&& ! (reload_in_progress || reload_completed)
|
&& ! (reload_in_progress || reload_completed)
|
&& GET_CODE (op1) == MEM
|
&& GET_CODE (op1) == MEM
|
&& symbolic_address_operand (XEXP (op1, 0), Pmode))
|
&& symbolic_address_operand (XEXP (op1, 0), Pmode))
|
{
|
{
|
rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
|
rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
|
if (! TARGET_SMALL)
|
if (! TARGET_SMALL)
|
emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
|
emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
|
op1 = change_address (op1, mode,
|
op1 = change_address (op1, mode,
|
gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
|
gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
|
}
|
}
|
|
|
if (TARGET_EXPOSE_LDP
|
if (TARGET_EXPOSE_LDP
|
&& ! (reload_in_progress || reload_completed)
|
&& ! (reload_in_progress || reload_completed)
|
&& GET_CODE (op0) == MEM
|
&& GET_CODE (op0) == MEM
|
&& symbolic_address_operand (XEXP (op0, 0), Pmode))
|
&& symbolic_address_operand (XEXP (op0, 0), Pmode))
|
{
|
{
|
rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
|
rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
|
if (! TARGET_SMALL)
|
if (! TARGET_SMALL)
|
emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
|
emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
|
op0 = change_address (op0, mode,
|
op0 = change_address (op0, mode,
|
gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
|
gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
|
}
|
}
|
|
|
if (GET_CODE (op0) == SUBREG
|
if (GET_CODE (op0) == SUBREG
|
&& mixed_subreg_operand (op0, mode))
|
&& mixed_subreg_operand (op0, mode))
|
{
|
{
|
/* We should only generate these mixed mode patterns
|
/* We should only generate these mixed mode patterns
|
during RTL generation. If we need do it later on
|
during RTL generation. If we need do it later on
|
then we'll have to emit patterns that won't clobber CC. */
|
then we'll have to emit patterns that won't clobber CC. */
|
if (reload_in_progress || reload_completed)
|
if (reload_in_progress || reload_completed)
|
abort ();
|
abort ();
|
if (GET_MODE (SUBREG_REG (op0)) == QImode)
|
if (GET_MODE (SUBREG_REG (op0)) == QImode)
|
op0 = SUBREG_REG (op0);
|
op0 = SUBREG_REG (op0);
|
else if (GET_MODE (SUBREG_REG (op0)) == HImode)
|
else if (GET_MODE (SUBREG_REG (op0)) == HImode)
|
{
|
{
|
op0 = copy_rtx (op0);
|
op0 = copy_rtx (op0);
|
PUT_MODE (op0, QImode);
|
PUT_MODE (op0, QImode);
|
}
|
}
|
else
|
else
|
abort ();
|
abort ();
|
|
|
if (mode == QFmode)
|
if (mode == QFmode)
|
emit_insn (gen_storeqf_int_clobber (op0, op1));
|
emit_insn (gen_storeqf_int_clobber (op0, op1));
|
else
|
else
|
abort ();
|
abort ();
|
return 1;
|
return 1;
|
}
|
}
|
|
|
if (GET_CODE (op1) == SUBREG
|
if (GET_CODE (op1) == SUBREG
|
&& mixed_subreg_operand (op1, mode))
|
&& mixed_subreg_operand (op1, mode))
|
{
|
{
|
/* We should only generate these mixed mode patterns
|
/* We should only generate these mixed mode patterns
|
during RTL generation. If we need do it later on
|
during RTL generation. If we need do it later on
|
then we'll have to emit patterns that won't clobber CC. */
|
then we'll have to emit patterns that won't clobber CC. */
|
if (reload_in_progress || reload_completed)
|
if (reload_in_progress || reload_completed)
|
abort ();
|
abort ();
|
if (GET_MODE (SUBREG_REG (op1)) == QImode)
|
if (GET_MODE (SUBREG_REG (op1)) == QImode)
|
op1 = SUBREG_REG (op1);
|
op1 = SUBREG_REG (op1);
|
else if (GET_MODE (SUBREG_REG (op1)) == HImode)
|
else if (GET_MODE (SUBREG_REG (op1)) == HImode)
|
{
|
{
|
op1 = copy_rtx (op1);
|
op1 = copy_rtx (op1);
|
PUT_MODE (op1, QImode);
|
PUT_MODE (op1, QImode);
|
}
|
}
|
else
|
else
|
abort ();
|
abort ();
|
|
|
if (mode == QFmode)
|
if (mode == QFmode)
|
emit_insn (gen_loadqf_int_clobber (op0, op1));
|
emit_insn (gen_loadqf_int_clobber (op0, op1));
|
else
|
else
|
abort ();
|
abort ();
|
return 1;
|
return 1;
|
}
|
}
|
|
|
if (mode == QImode
|
if (mode == QImode
|
&& reg_operand (op0, mode)
|
&& reg_operand (op0, mode)
|
&& const_int_operand (op1, mode)
|
&& const_int_operand (op1, mode)
|
&& ! IS_INT16_CONST (INTVAL (op1))
|
&& ! IS_INT16_CONST (INTVAL (op1))
|
&& ! IS_HIGH_CONST (INTVAL (op1)))
|
&& ! IS_HIGH_CONST (INTVAL (op1)))
|
{
|
{
|
emit_insn (gen_loadqi_big_constant (op0, op1));
|
emit_insn (gen_loadqi_big_constant (op0, op1));
|
return 1;
|
return 1;
|
}
|
}
|
|
|
if (mode == HImode
|
if (mode == HImode
|
&& reg_operand (op0, mode)
|
&& reg_operand (op0, mode)
|
&& const_int_operand (op1, mode))
|
&& const_int_operand (op1, mode))
|
{
|
{
|
emit_insn (gen_loadhi_big_constant (op0, op1));
|
emit_insn (gen_loadhi_big_constant (op0, op1));
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Adjust operands in case we have modified them. */
|
/* Adjust operands in case we have modified them. */
|
operands[0] = op0;
|
operands[0] = op0;
|
operands[1] = op1;
|
operands[1] = op1;
|
|
|
/* Emit normal pattern. */
|
/* Emit normal pattern. */
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
void
|
void
|
c4x_emit_libcall (rtx libcall, enum rtx_code code,
|
c4x_emit_libcall (rtx libcall, enum rtx_code code,
|
enum machine_mode dmode, enum machine_mode smode,
|
enum machine_mode dmode, enum machine_mode smode,
|
int noperands, rtx *operands)
|
int noperands, rtx *operands)
|
{
|
{
|
rtx ret;
|
rtx ret;
|
rtx insns;
|
rtx insns;
|
rtx equiv;
|
rtx equiv;
|
|
|
start_sequence ();
|
start_sequence ();
|
switch (noperands)
|
switch (noperands)
|
{
|
{
|
case 2:
|
case 2:
|
ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
|
ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
|
operands[1], smode);
|
operands[1], smode);
|
equiv = gen_rtx_fmt_e (code, dmode, operands[1]);
|
equiv = gen_rtx_fmt_e (code, dmode, operands[1]);
|
break;
|
break;
|
|
|
case 3:
|
case 3:
|
ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
|
ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
|
operands[1], smode, operands[2], smode);
|
operands[1], smode, operands[2], smode);
|
equiv = gen_rtx_fmt_ee (code, dmode, operands[1], operands[2]);
|
equiv = gen_rtx_fmt_ee (code, dmode, operands[1], operands[2]);
|
break;
|
break;
|
|
|
default:
|
default:
|
abort ();
|
abort ();
|
}
|
}
|
|
|
insns = get_insns ();
|
insns = get_insns ();
|
end_sequence ();
|
end_sequence ();
|
emit_libcall_block (insns, operands[0], ret, equiv);
|
emit_libcall_block (insns, operands[0], ret, equiv);
|
}
|
}
|
|
|
|
|
void
|
void
|
c4x_emit_libcall3 (rtx libcall, enum rtx_code code,
|
c4x_emit_libcall3 (rtx libcall, enum rtx_code code,
|
enum machine_mode mode, rtx *operands)
|
enum machine_mode mode, rtx *operands)
|
{
|
{
|
c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
|
c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
|
}
|
}
|
|
|
|
|
void
|
void
|
c4x_emit_libcall_mulhi (rtx libcall, enum rtx_code code,
|
c4x_emit_libcall_mulhi (rtx libcall, enum rtx_code code,
|
enum machine_mode mode, rtx *operands)
|
enum machine_mode mode, rtx *operands)
|
{
|
{
|
rtx ret;
|
rtx ret;
|
rtx insns;
|
rtx insns;
|
rtx equiv;
|
rtx equiv;
|
|
|
start_sequence ();
|
start_sequence ();
|
ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
|
ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
|
operands[1], mode, operands[2], mode);
|
operands[1], mode, operands[2], mode);
|
equiv = gen_rtx_TRUNCATE (mode,
|
equiv = gen_rtx_TRUNCATE (mode,
|
gen_rtx_LSHIFTRT (HImode,
|
gen_rtx_LSHIFTRT (HImode,
|
gen_rtx_MULT (HImode,
|
gen_rtx_MULT (HImode,
|
gen_rtx_fmt_e (code, HImode, operands[1]),
|
gen_rtx_fmt_e (code, HImode, operands[1]),
|
gen_rtx_fmt_e (code, HImode, operands[2])),
|
gen_rtx_fmt_e (code, HImode, operands[2])),
|
GEN_INT (32)));
|
GEN_INT (32)));
|
insns = get_insns ();
|
insns = get_insns ();
|
end_sequence ();
|
end_sequence ();
|
emit_libcall_block (insns, operands[0], ret, equiv);
|
emit_libcall_block (insns, operands[0], ret, equiv);
|
}
|
}
|
|
|
|
|
int
|
int
|
c4x_legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
|
c4x_legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
|
{
|
{
|
rtx base = NULL_RTX; /* Base register (AR0-AR7). */
|
rtx base = NULL_RTX; /* Base register (AR0-AR7). */
|
rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
|
rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
|
rtx disp = NULL_RTX; /* Displacement. */
|
rtx disp = NULL_RTX; /* Displacement. */
|
enum rtx_code code;
|
enum rtx_code code;
|
|
|
code = GET_CODE (addr);
|
code = GET_CODE (addr);
|
switch (code)
|
switch (code)
|
{
|
{
|
/* Register indirect with auto increment/decrement. We don't
|
/* Register indirect with auto increment/decrement. We don't
|
allow SP here---push_operand should recognize an operand
|
allow SP here---push_operand should recognize an operand
|
being pushed on the stack. */
|
being pushed on the stack. */
|
|
|
case PRE_DEC:
|
case PRE_DEC:
|
case PRE_INC:
|
case PRE_INC:
|
case POST_DEC:
|
case POST_DEC:
|
if (mode != QImode && mode != QFmode)
|
if (mode != QImode && mode != QFmode)
|
return 0;
|
return 0;
|
|
|
case POST_INC:
|
case POST_INC:
|
base = XEXP (addr, 0);
|
base = XEXP (addr, 0);
|
if (! REG_P (base))
|
if (! REG_P (base))
|
return 0;
|
return 0;
|
break;
|
break;
|
|
|
case PRE_MODIFY:
|
case PRE_MODIFY:
|
case POST_MODIFY:
|
case POST_MODIFY:
|
{
|
{
|
rtx op0 = XEXP (addr, 0);
|
rtx op0 = XEXP (addr, 0);
|
rtx op1 = XEXP (addr, 1);
|
rtx op1 = XEXP (addr, 1);
|
|
|
if (mode != QImode && mode != QFmode)
|
if (mode != QImode && mode != QFmode)
|
return 0;
|
return 0;
|
|
|
if (! REG_P (op0)
|
if (! REG_P (op0)
|
|| (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
|
|| (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
|
return 0;
|
return 0;
|
base = XEXP (op1, 0);
|
base = XEXP (op1, 0);
|
if (! REG_P (base))
|
if (! REG_P (base))
|
return 0;
|
return 0;
|
if (REGNO (base) != REGNO (op0))
|
if (REGNO (base) != REGNO (op0))
|
return 0;
|
return 0;
|
if (REG_P (XEXP (op1, 1)))
|
if (REG_P (XEXP (op1, 1)))
|
indx = XEXP (op1, 1);
|
indx = XEXP (op1, 1);
|
else
|
else
|
disp = XEXP (op1, 1);
|
disp = XEXP (op1, 1);
|
}
|
}
|
break;
|
break;
|
|
|
/* Register indirect. */
|
/* Register indirect. */
|
case REG:
|
case REG:
|
base = addr;
|
base = addr;
|
break;
|
break;
|
|
|
/* Register indirect with displacement or index. */
|
/* Register indirect with displacement or index. */
|
case PLUS:
|
case PLUS:
|
{
|
{
|
rtx op0 = XEXP (addr, 0);
|
rtx op0 = XEXP (addr, 0);
|
rtx op1 = XEXP (addr, 1);
|
rtx op1 = XEXP (addr, 1);
|
enum rtx_code code0 = GET_CODE (op0);
|
enum rtx_code code0 = GET_CODE (op0);
|
|
|
switch (code0)
|
switch (code0)
|
{
|
{
|
case REG:
|
case REG:
|
if (REG_P (op1))
|
if (REG_P (op1))
|
{
|
{
|
base = op0; /* Base + index. */
|
base = op0; /* Base + index. */
|
indx = op1;
|
indx = op1;
|
if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
|
if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
|
{
|
{
|
base = op1;
|
base = op1;
|
indx = op0;
|
indx = op0;
|
}
|
}
|
}
|
}
|
else
|
else
|
{
|
{
|
base = op0; /* Base + displacement. */
|
base = op0; /* Base + displacement. */
|
disp = op1;
|
disp = op1;
|
}
|
}
|
break;
|
break;
|
|
|
default:
|
default:
|
return 0;
|
return 0;
|
}
|
}
|
}
|
}
|
break;
|
break;
|
|
|
/* Direct addressing with DP register. */
|
/* Direct addressing with DP register. */
|
case LO_SUM:
|
case LO_SUM:
|
{
|
{
|
rtx op0 = XEXP (addr, 0);
|
rtx op0 = XEXP (addr, 0);
|
rtx op1 = XEXP (addr, 1);
|
rtx op1 = XEXP (addr, 1);
|
|
|
/* HImode and HFmode direct memory references aren't truly
|
/* HImode and HFmode direct memory references aren't truly
|
offsettable (consider case at end of data page). We
|
offsettable (consider case at end of data page). We
|
probably get better code by loading a pointer and using an
|
probably get better code by loading a pointer and using an
|
indirect memory reference. */
|
indirect memory reference. */
|
if (mode == HImode || mode == HFmode)
|
if (mode == HImode || mode == HFmode)
|
return 0;
|
return 0;
|
|
|
if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
|
if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
|
return 0;
|
return 0;
|
|
|
if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
|
if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
|
return 1;
|
return 1;
|
|
|
if (GET_CODE (op1) == CONST)
|
if (GET_CODE (op1) == CONST)
|
return 1;
|
return 1;
|
return 0;
|
return 0;
|
}
|
}
|
break;
|
break;
|
|
|
/* Direct addressing with some work for the assembler... */
|
/* Direct addressing with some work for the assembler... */
|
case CONST:
|
case CONST:
|
/* Direct addressing. */
|
/* Direct addressing. */
|
case LABEL_REF:
|
case LABEL_REF:
|
case SYMBOL_REF:
|
case SYMBOL_REF:
|
if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
|
if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
|
return 1;
|
return 1;
|
/* These need to be converted to a LO_SUM (...).
|
/* These need to be converted to a LO_SUM (...).
|
LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
|
LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
|
return 0;
|
return 0;
|
|
|
/* Do not allow direct memory access to absolute addresses.
|
/* Do not allow direct memory access to absolute addresses.
|
This is more pain than it's worth, especially for the
|
This is more pain than it's worth, especially for the
|
small memory model where we can't guarantee that
|
small memory model where we can't guarantee that
|
this address is within the data page---we don't want
|
this address is within the data page---we don't want
|
to modify the DP register in the small memory model,
|
to modify the DP register in the small memory model,
|
even temporarily, since an interrupt can sneak in.... */
|
even temporarily, since an interrupt can sneak in.... */
|
case CONST_INT:
|
case CONST_INT:
|
return 0;
|
return 0;
|
|
|
/* Indirect indirect addressing. */
|
/* Indirect indirect addressing. */
|
case MEM:
|
case MEM:
|
return 0;
|
return 0;
|
|
|
case CONST_DOUBLE:
|
case CONST_DOUBLE:
|
fatal_insn ("using CONST_DOUBLE for address", addr);
|
fatal_insn ("using CONST_DOUBLE for address", addr);
|
|
|
default:
|
default:
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* Validate the base register. */
|
/* Validate the base register. */
|
if (base)
|
if (base)
|
{
|
{
|
/* Check that the address is offsettable for HImode and HFmode. */
|
/* Check that the address is offsettable for HImode and HFmode. */
|
if (indx && (mode == HImode || mode == HFmode))
|
if (indx && (mode == HImode || mode == HFmode))
|
return 0;
|
return 0;
|
|
|
/* Handle DP based stuff. */
|
/* Handle DP based stuff. */
|
if (REGNO (base) == DP_REGNO)
|
if (REGNO (base) == DP_REGNO)
|
return 1;
|
return 1;
|
if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
|
if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
|
return 0;
|
return 0;
|
else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
|
else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* Now validate the index register. */
|
/* Now validate the index register. */
|
if (indx)
|
if (indx)
|
{
|
{
|
if (GET_CODE (indx) != REG)
|
if (GET_CODE (indx) != REG)
|
return 0;
|
return 0;
|
if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
|
if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
|
return 0;
|
return 0;
|
else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
|
else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* Validate displacement. */
|
/* Validate displacement. */
|
if (disp)
|
if (disp)
|
{
|
{
|
if (GET_CODE (disp) != CONST_INT)
|
if (GET_CODE (disp) != CONST_INT)
|
return 0;
|
return 0;
|
if (mode == HImode || mode == HFmode)
|
if (mode == HImode || mode == HFmode)
|
{
|
{
|
/* The offset displacement must be legitimate. */
|
/* The offset displacement must be legitimate. */
|
if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
|
if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
|
return 0;
|
return 0;
|
}
|
}
|
else
|
else
|
{
|
{
|
if (! IS_DISP8_CONST (INTVAL (disp)))
|
if (! IS_DISP8_CONST (INTVAL (disp)))
|
return 0;
|
return 0;
|
}
|
}
|
/* Can't add an index with a disp. */
|
/* Can't add an index with a disp. */
|
if (indx)
|
if (indx)
|
return 0;
|
return 0;
|
}
|
}
|
return 1;
|
return 1;
|
}
|
}
|
|
|
|
|
rtx
|
rtx
|
c4x_legitimize_address (rtx orig ATTRIBUTE_UNUSED,
|
c4x_legitimize_address (rtx orig ATTRIBUTE_UNUSED,
|
enum machine_mode mode ATTRIBUTE_UNUSED)
|
enum machine_mode mode ATTRIBUTE_UNUSED)
|
{
|
{
|
if (GET_CODE (orig) == SYMBOL_REF
|
if (GET_CODE (orig) == SYMBOL_REF
|
|| GET_CODE (orig) == LABEL_REF)
|
|| GET_CODE (orig) == LABEL_REF)
|
{
|
{
|
if (mode == HImode || mode == HFmode)
|
if (mode == HImode || mode == HFmode)
|
{
|
{
|
/* We need to force the address into
|
/* We need to force the address into
|
a register so that it is offsettable. */
|
a register so that it is offsettable. */
|
rtx addr_reg = gen_reg_rtx (Pmode);
|
rtx addr_reg = gen_reg_rtx (Pmode);
|
emit_move_insn (addr_reg, orig);
|
emit_move_insn (addr_reg, orig);
|
return addr_reg;
|
return addr_reg;
|
}
|
}
|
else
|
else
|
{
|
{
|
rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
|
rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
|
|
|
if (! TARGET_SMALL)
|
if (! TARGET_SMALL)
|
emit_insn (gen_set_ldp (dp_reg, orig));
|
emit_insn (gen_set_ldp (dp_reg, orig));
|
|
|
return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
|
return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
|
}
|
}
|
}
|
}
|
|
|
return NULL_RTX;
|
return NULL_RTX;
|
}
|
}
|
|
|
|
|
/* Provide the costs of an addressing mode that contains ADDR.
|
/* Provide the costs of an addressing mode that contains ADDR.
|
If ADDR is not a valid address, its cost is irrelevant.
|
If ADDR is not a valid address, its cost is irrelevant.
|
This is used in cse and loop optimization to determine
|
This is used in cse and loop optimization to determine
|
if it is worthwhile storing a common address into a register.
|
if it is worthwhile storing a common address into a register.
|
Unfortunately, the C4x address cost depends on other operands. */
|
Unfortunately, the C4x address cost depends on other operands. */
|
|
|
static int
|
static int
|
c4x_address_cost (rtx addr)
|
c4x_address_cost (rtx addr)
|
{
|
{
|
switch (GET_CODE (addr))
|
switch (GET_CODE (addr))
|
{
|
{
|
case REG:
|
case REG:
|
return 1;
|
return 1;
|
|
|
case POST_INC:
|
case POST_INC:
|
case POST_DEC:
|
case POST_DEC:
|
case PRE_INC:
|
case PRE_INC:
|
case PRE_DEC:
|
case PRE_DEC:
|
return 1;
|
return 1;
|
|
|
/* These shouldn't be directly generated. */
|
/* These shouldn't be directly generated. */
|
case SYMBOL_REF:
|
case SYMBOL_REF:
|
case LABEL_REF:
|
case LABEL_REF:
|
case CONST:
|
case CONST:
|
return 10;
|
return 10;
|
|
|
case LO_SUM:
|
case LO_SUM:
|
{
|
{
|
rtx op1 = XEXP (addr, 1);
|
rtx op1 = XEXP (addr, 1);
|
|
|
if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
|
if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
|
return TARGET_SMALL ? 3 : 4;
|
return TARGET_SMALL ? 3 : 4;
|
|
|
if (GET_CODE (op1) == CONST)
|
if (GET_CODE (op1) == CONST)
|
{
|
{
|
rtx offset = const0_rtx;
|
rtx offset = const0_rtx;
|
|
|
op1 = eliminate_constant_term (op1, &offset);
|
op1 = eliminate_constant_term (op1, &offset);
|
|
|
/* ??? These costs need rethinking... */
|
/* ??? These costs need rethinking... */
|
if (GET_CODE (op1) == LABEL_REF)
|
if (GET_CODE (op1) == LABEL_REF)
|
return 3;
|
return 3;
|
|
|
if (GET_CODE (op1) != SYMBOL_REF)
|
if (GET_CODE (op1) != SYMBOL_REF)
|
return 4;
|
return 4;
|
|
|
if (INTVAL (offset) == 0)
|
if (INTVAL (offset) == 0)
|
return 3;
|
return 3;
|
|
|
return 4;
|
return 4;
|
}
|
}
|
fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
|
fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
|
}
|
}
|
break;
|
break;
|
|
|
case PLUS:
|
case PLUS:
|
{
|
{
|
register rtx op0 = XEXP (addr, 0);
|
register rtx op0 = XEXP (addr, 0);
|
register rtx op1 = XEXP (addr, 1);
|
register rtx op1 = XEXP (addr, 1);
|
|
|
if (GET_CODE (op0) != REG)
|
if (GET_CODE (op0) != REG)
|
break;
|
break;
|
|
|
switch (GET_CODE (op1))
|
switch (GET_CODE (op1))
|
{
|
{
|
default:
|
default:
|
break;
|
break;
|
|
|
case REG:
|
case REG:
|
/* This cost for REG+REG must be greater than the cost
|
/* This cost for REG+REG must be greater than the cost
|
for REG if we want autoincrement addressing modes. */
|
for REG if we want autoincrement addressing modes. */
|
return 2;
|
return 2;
|
|
|
case CONST_INT:
|
case CONST_INT:
|
/* The following tries to improve GIV combination
|
/* The following tries to improve GIV combination
|
in strength reduce but appears not to help. */
|
in strength reduce but appears not to help. */
|
if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
|
if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
|
return 1;
|
return 1;
|
|
|
if (IS_DISP1_CONST (INTVAL (op1)))
|
if (IS_DISP1_CONST (INTVAL (op1)))
|
return 1;
|
return 1;
|
|
|
if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
|
if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
|
return 2;
|
return 2;
|
|
|
return 3;
|
return 3;
|
}
|
}
|
}
|
}
|
default:
|
default:
|
break;
|
break;
|
}
|
}
|
|
|
return 4;
|
return 4;
|
}
|
}
|
|
|
|
|
rtx
|
rtx
|
c4x_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
|
c4x_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
|
{
|
{
|
enum machine_mode mode = SELECT_CC_MODE (code, x, y);
|
enum machine_mode mode = SELECT_CC_MODE (code, x, y);
|
rtx cc_reg;
|
rtx cc_reg;
|
|
|
if (mode == CC_NOOVmode
|
if (mode == CC_NOOVmode
|
&& (code == LE || code == GE || code == LT || code == GT))
|
&& (code == LE || code == GE || code == LT || code == GT))
|
return NULL_RTX;
|
return NULL_RTX;
|
|
|
cc_reg = gen_rtx_REG (mode, ST_REGNO);
|
cc_reg = gen_rtx_REG (mode, ST_REGNO);
|
emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
|
emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
|
gen_rtx_COMPARE (mode, x, y)));
|
gen_rtx_COMPARE (mode, x, y)));
|
return cc_reg;
|
return cc_reg;
|
}
|
}
|
|
|
char *
|
char *
|
c4x_output_cbranch (const char *form, rtx seq)
|
c4x_output_cbranch (const char *form, rtx seq)
|
{
|
{
|
int delayed = 0;
|
int delayed = 0;
|
int annultrue = 0;
|
int annultrue = 0;
|
int annulfalse = 0;
|
int annulfalse = 0;
|
rtx delay;
|
rtx delay;
|
char *cp;
|
char *cp;
|
static char str[100];
|
static char str[100];
|
|
|
if (final_sequence)
|
if (final_sequence)
|
{
|
{
|
delay = XVECEXP (final_sequence, 0, 1);
|
delay = XVECEXP (final_sequence, 0, 1);
|
delayed = ! INSN_ANNULLED_BRANCH_P (seq);
|
delayed = ! INSN_ANNULLED_BRANCH_P (seq);
|
annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
|
annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
|
annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
|
annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
|
}
|
}
|
strcpy (str, form);
|
strcpy (str, form);
|
cp = &str [strlen (str)];
|
cp = &str [strlen (str)];
|
if (delayed)
|
if (delayed)
|
{
|
{
|
*cp++ = '%';
|
*cp++ = '%';
|
*cp++ = '#';
|
*cp++ = '#';
|
}
|
}
|
if (annultrue)
|
if (annultrue)
|
{
|
{
|
*cp++ = 'a';
|
*cp++ = 'a';
|
*cp++ = 't';
|
*cp++ = 't';
|
}
|
}
|
if (annulfalse)
|
if (annulfalse)
|
{
|
{
|
*cp++ = 'a';
|
*cp++ = 'a';
|
*cp++ = 'f';
|
*cp++ = 'f';
|
}
|
}
|
*cp++ = '\t';
|
*cp++ = '\t';
|
*cp++ = '%';
|
*cp++ = '%';
|
*cp++ = 'l';
|
*cp++ = 'l';
|
*cp++ = '1';
|
*cp++ = '1';
|
*cp = 0;
|
*cp = 0;
|
return str;
|
return str;
|
}
|
}
|
|
|
void
|
void
|
c4x_print_operand (FILE *file, rtx op, int letter)
|
c4x_print_operand (FILE *file, rtx op, int letter)
|
{
|
{
|
rtx op1;
|
rtx op1;
|
enum rtx_code code;
|
enum rtx_code code;
|
|
|
switch (letter)
|
switch (letter)
|
{
|
{
|
case '#': /* Delayed. */
|
case '#': /* Delayed. */
|
if (final_sequence)
|
if (final_sequence)
|
fprintf (file, "d");
|
fprintf (file, "d");
|
return;
|
return;
|
}
|
}
|
|
|
code = GET_CODE (op);
|
code = GET_CODE (op);
|
switch (letter)
|
switch (letter)
|
{
|
{
|
case 'A': /* Direct address. */
|
case 'A': /* Direct address. */
|
if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
|
if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
|
fprintf (file, "@");
|
fprintf (file, "@");
|
break;
|
break;
|
|
|
case 'H': /* Sethi. */
|
case 'H': /* Sethi. */
|
output_addr_const (file, op);
|
output_addr_const (file, op);
|
return;
|
return;
|
|
|
case 'I': /* Reversed condition. */
|
case 'I': /* Reversed condition. */
|
code = reverse_condition (code);
|
code = reverse_condition (code);
|
break;
|
break;
|
|
|
case 'L': /* Log 2 of constant. */
|
case 'L': /* Log 2 of constant. */
|
if (code != CONST_INT)
|
if (code != CONST_INT)
|
fatal_insn ("c4x_print_operand: %%L inconsistency", op);
|
fatal_insn ("c4x_print_operand: %%L inconsistency", op);
|
fprintf (file, "%d", exact_log2 (INTVAL (op)));
|
fprintf (file, "%d", exact_log2 (INTVAL (op)));
|
return;
|
return;
|
|
|
case 'N': /* Ones complement of small constant. */
|
case 'N': /* Ones complement of small constant. */
|
if (code != CONST_INT)
|
if (code != CONST_INT)
|
fatal_insn ("c4x_print_operand: %%N inconsistency", op);
|
fatal_insn ("c4x_print_operand: %%N inconsistency", op);
|
fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~INTVAL (op));
|
fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~INTVAL (op));
|
return;
|
return;
|
|
|
case 'K': /* Generate ldp(k) if direct address. */
|
case 'K': /* Generate ldp(k) if direct address. */
|
if (! TARGET_SMALL
|
if (! TARGET_SMALL
|
&& code == MEM
|
&& code == MEM
|
&& GET_CODE (XEXP (op, 0)) == LO_SUM
|
&& GET_CODE (XEXP (op, 0)) == LO_SUM
|
&& GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
|
&& GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
|
&& REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
|
&& REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
|
{
|
{
|
op1 = XEXP (XEXP (op, 0), 1);
|
op1 = XEXP (XEXP (op, 0), 1);
|
if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
|
if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
|
{
|
{
|
fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
|
fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
|
output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
|
output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
|
fprintf (file, "\n");
|
fprintf (file, "\n");
|
}
|
}
|
}
|
}
|
return;
|
return;
|
|
|
case 'M': /* Generate ldp(k) if direct address. */
|
case 'M': /* Generate ldp(k) if direct address. */
|
if (! TARGET_SMALL /* Only used in asm statements. */
|
if (! TARGET_SMALL /* Only used in asm statements. */
|
&& code == MEM
|
&& code == MEM
|
&& (GET_CODE (XEXP (op, 0)) == CONST
|
&& (GET_CODE (XEXP (op, 0)) == CONST
|
|| GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
|
|| GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
|
{
|
{
|
fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
|
fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
|
output_address (XEXP (op, 0));
|
output_address (XEXP (op, 0));
|
fprintf (file, "\n\t");
|
fprintf (file, "\n\t");
|
}
|
}
|
return;
|
return;
|
|
|
case 'O': /* Offset address. */
|
case 'O': /* Offset address. */
|
if (code == MEM && c4x_autoinc_operand (op, Pmode))
|
if (code == MEM && c4x_autoinc_operand (op, Pmode))
|
break;
|
break;
|
else if (code == MEM)
|
else if (code == MEM)
|
output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
|
output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
|
else if (code == REG)
|
else if (code == REG)
|
fprintf (file, "%s", reg_names[REGNO (op) + 1]);
|
fprintf (file, "%s", reg_names[REGNO (op) + 1]);
|
else
|
else
|
fatal_insn ("c4x_print_operand: %%O inconsistency", op);
|
fatal_insn ("c4x_print_operand: %%O inconsistency", op);
|
return;
|
return;
|
|
|
case 'C': /* Call. */
|
case 'C': /* Call. */
|
break;
|
break;
|
|
|
case 'U': /* Call/callu. */
|
case 'U': /* Call/callu. */
|
if (code != SYMBOL_REF)
|
if (code != SYMBOL_REF)
|
fprintf (file, "u");
|
fprintf (file, "u");
|
return;
|
return;
|
|
|
default:
|
default:
|
break;
|
break;
|
}
|
}
|
|
|
switch (code)
|
switch (code)
|
{
|
{
|
case REG:
|
case REG:
|
if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
|
if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
|
&& ! TARGET_TI)
|
&& ! TARGET_TI)
|
fprintf (file, "%s", float_reg_names[REGNO (op)]);
|
fprintf (file, "%s", float_reg_names[REGNO (op)]);
|
else
|
else
|
fprintf (file, "%s", reg_names[REGNO (op)]);
|
fprintf (file, "%s", reg_names[REGNO (op)]);
|
break;
|
break;
|
|
|
case MEM:
|
case MEM:
|
output_address (XEXP (op, 0));
|
output_address (XEXP (op, 0));
|
break;
|
break;
|
|
|
case CONST_DOUBLE:
|
case CONST_DOUBLE:
|
{
|
{
|
char str[64];
|
char str[64];
|
|
|
real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (op),
|
real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (op),
|
sizeof (str), 0, 1);
|
sizeof (str), 0, 1);
|
fprintf (file, "%s", str);
|
fprintf (file, "%s", str);
|
}
|
}
|
break;
|
break;
|
|
|
case CONST_INT:
|
case CONST_INT:
|
fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (op));
|
fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (op));
|
break;
|
break;
|
|
|
case NE:
|
case NE:
|
fprintf (file, "ne");
|
fprintf (file, "ne");
|
break;
|
break;
|
|
|
case EQ:
|
case EQ:
|
fprintf (file, "eq");
|
fprintf (file, "eq");
|
break;
|
break;
|
|
|
case GE:
|
case GE:
|
fprintf (file, "ge");
|
fprintf (file, "ge");
|
break;
|
break;
|
|
|
case GT:
|
case GT:
|
fprintf (file, "gt");
|
fprintf (file, "gt");
|
break;
|
break;
|
|
|
case LE:
|
case LE:
|
fprintf (file, "le");
|
fprintf (file, "le");
|
break;
|
break;
|
|
|
case LT:
|
case LT:
|
fprintf (file, "lt");
|
fprintf (file, "lt");
|
break;
|
break;
|
|
|
case GEU:
|
case GEU:
|
fprintf (file, "hs");
|
fprintf (file, "hs");
|
break;
|
break;
|
|
|
case GTU:
|
case GTU:
|
fprintf (file, "hi");
|
fprintf (file, "hi");
|
break;
|
break;
|
|
|
case LEU:
|
case LEU:
|
fprintf (file, "ls");
|
fprintf (file, "ls");
|
break;
|
break;
|
|
|
case LTU:
|
case LTU:
|
fprintf (file, "lo");
|
fprintf (file, "lo");
|
break;
|
break;
|
|
|
case SYMBOL_REF:
|
case SYMBOL_REF:
|
output_addr_const (file, op);
|
output_addr_const (file, op);
|
break;
|
break;
|
|
|
case CONST:
|
case CONST:
|
output_addr_const (file, XEXP (op, 0));
|
output_addr_const (file, XEXP (op, 0));
|
break;
|
break;
|
|
|
case CODE_LABEL:
|
case CODE_LABEL:
|
break;
|
break;
|
|
|
default:
|
default:
|
fatal_insn ("c4x_print_operand: Bad operand case", op);
|
fatal_insn ("c4x_print_operand: Bad operand case", op);
|
break;
|
break;
|
}
|
}
|
}
|
}
|
|
|
|
|
void
|
void
|
c4x_print_operand_address (FILE *file, rtx addr)
|
c4x_print_operand_address (FILE *file, rtx addr)
|
{
|
{
|
switch (GET_CODE (addr))
|
switch (GET_CODE (addr))
|
{
|
{
|
case REG:
|
case REG:
|
fprintf (file, "*%s", reg_names[REGNO (addr)]);
|
fprintf (file, "*%s", reg_names[REGNO (addr)]);
|
break;
|
break;
|
|
|
case PRE_DEC:
|
case PRE_DEC:
|
fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
|
fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
|
break;
|
break;
|
|
|
case POST_INC:
|
case POST_INC:
|
fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
|
fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
|
break;
|
break;
|
|
|
case POST_MODIFY:
|
case POST_MODIFY:
|
{
|
{
|
rtx op0 = XEXP (XEXP (addr, 1), 0);
|
rtx op0 = XEXP (XEXP (addr, 1), 0);
|
rtx op1 = XEXP (XEXP (addr, 1), 1);
|
rtx op1 = XEXP (XEXP (addr, 1), 1);
|
|
|
if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
|
if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
|
fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
|
fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
|
reg_names[REGNO (op1)]);
|
reg_names[REGNO (op1)]);
|
else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
|
else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
|
fprintf (file, "*%s++(" HOST_WIDE_INT_PRINT_DEC ")",
|
fprintf (file, "*%s++(" HOST_WIDE_INT_PRINT_DEC ")",
|
reg_names[REGNO (op0)], INTVAL (op1));
|
reg_names[REGNO (op0)], INTVAL (op1));
|
else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
|
else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
|
fprintf (file, "*%s--(" HOST_WIDE_INT_PRINT_DEC ")",
|
fprintf (file, "*%s--(" HOST_WIDE_INT_PRINT_DEC ")",
|
reg_names[REGNO (op0)], -INTVAL (op1));
|
reg_names[REGNO (op0)], -INTVAL (op1));
|
else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
|
else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
|
fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
|
fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
|
reg_names[REGNO (op1)]);
|
reg_names[REGNO (op1)]);
|
else
|
else
|
fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
|
fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
|
}
|
}
|
break;
|
break;
|
|
|
case PRE_MODIFY:
|
case PRE_MODIFY:
|
{
|
{
|
rtx op0 = XEXP (XEXP (addr, 1), 0);
|
rtx op0 = XEXP (XEXP (addr, 1), 0);
|
rtx op1 = XEXP (XEXP (addr, 1), 1);
|
rtx op1 = XEXP (XEXP (addr, 1), 1);
|
|
|
if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
|
if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
|
fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
|
fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
|
reg_names[REGNO (op1)]);
|
reg_names[REGNO (op1)]);
|
else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
|
else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
|
fprintf (file, "*++%s(" HOST_WIDE_INT_PRINT_DEC ")",
|
fprintf (file, "*++%s(" HOST_WIDE_INT_PRINT_DEC ")",
|
reg_names[REGNO (op0)], INTVAL (op1));
|
reg_names[REGNO (op0)], INTVAL (op1));
|
else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
|
else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
|
fprintf (file, "*--%s(" HOST_WIDE_INT_PRINT_DEC ")",
|
fprintf (file, "*--%s(" HOST_WIDE_INT_PRINT_DEC ")",
|
reg_names[REGNO (op0)], -INTVAL (op1));
|
reg_names[REGNO (op0)], -INTVAL (op1));
|
else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
|
else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
|
fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
|
fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
|
reg_names[REGNO (op1)]);
|
reg_names[REGNO (op1)]);
|
else
|
else
|
fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
|
fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
|
}
|
}
|
break;
|
break;
|
|
|
case PRE_INC:
|
case PRE_INC:
|
fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
|
fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
|
break;
|
break;
|
|
|
case POST_DEC:
|
case POST_DEC:
|
fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
|
fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
|
break;
|
break;
|
|
|
case PLUS: /* Indirect with displacement. */
|
case PLUS: /* Indirect with displacement. */
|
{
|
{
|
rtx op0 = XEXP (addr, 0);
|
rtx op0 = XEXP (addr, 0);
|
rtx op1 = XEXP (addr, 1);
|
rtx op1 = XEXP (addr, 1);
|
|
|
if (REG_P (op0))
|
if (REG_P (op0))
|
{
|
{
|
if (REG_P (op1))
|
if (REG_P (op1))
|
{
|
{
|
if (IS_INDEX_REG (op0))
|
if (IS_INDEX_REG (op0))
|
{
|
{
|
fprintf (file, "*+%s(%s)",
|
fprintf (file, "*+%s(%s)",
|
reg_names[REGNO (op1)],
|
reg_names[REGNO (op1)],
|
reg_names[REGNO (op0)]); /* Index + base. */
|
reg_names[REGNO (op0)]); /* Index + base. */
|
}
|
}
|
else
|
else
|
{
|
{
|
fprintf (file, "*+%s(%s)",
|
fprintf (file, "*+%s(%s)",
|
reg_names[REGNO (op0)],
|
reg_names[REGNO (op0)],
|
reg_names[REGNO (op1)]); /* Base + index. */
|
reg_names[REGNO (op1)]); /* Base + index. */
|
}
|
}
|
}
|
}
|
else if (INTVAL (op1) < 0)
|
else if (INTVAL (op1) < 0)
|
{
|
{
|
fprintf (file, "*-%s(" HOST_WIDE_INT_PRINT_DEC ")",
|
fprintf (file, "*-%s(" HOST_WIDE_INT_PRINT_DEC ")",
|
reg_names[REGNO (op0)],
|
reg_names[REGNO (op0)],
|
-INTVAL (op1)); /* Base - displacement. */
|
-INTVAL (op1)); /* Base - displacement. */
|
}
|
}
|
else
|
else
|
{
|
{
|
fprintf (file, "*+%s(" HOST_WIDE_INT_PRINT_DEC ")",
|
fprintf (file, "*+%s(" HOST_WIDE_INT_PRINT_DEC ")",
|
reg_names[REGNO (op0)],
|
reg_names[REGNO (op0)],
|
INTVAL (op1)); /* Base + displacement. */
|
INTVAL (op1)); /* Base + displacement. */
|
}
|
}
|
}
|
}
|
else
|
else
|
fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
|
fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
|
}
|
}
|
break;
|
break;
|
|
|
case LO_SUM:
|
case LO_SUM:
|
{
|
{
|
rtx op0 = XEXP (addr, 0);
|
rtx op0 = XEXP (addr, 0);
|
rtx op1 = XEXP (addr, 1);
|
rtx op1 = XEXP (addr, 1);
|
|
|
if (REG_P (op0) && REGNO (op0) == DP_REGNO)
|
if (REG_P (op0) && REGNO (op0) == DP_REGNO)
|
c4x_print_operand_address (file, op1);
|
c4x_print_operand_address (file, op1);
|
else
|
else
|
fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
|
fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
|
}
|
}
|
break;
|
break;
|
|
|
case CONST:
|
case CONST:
|
case SYMBOL_REF:
|
case SYMBOL_REF:
|
case LABEL_REF:
|
case LABEL_REF:
|
fprintf (file, "@");
|
fprintf (file, "@");
|
output_addr_const (file, addr);
|
output_addr_const (file, addr);
|
break;
|
break;
|
|
|
/* We shouldn't access CONST_INT addresses. */
|
/* We shouldn't access CONST_INT addresses. */
|
case CONST_INT:
|
case CONST_INT:
|
|
|
default:
|
default:
|
fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
|
fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
|
break;
|
break;
|
}
|
}
|
}
|
}
|
|
|
|
|
/* Return nonzero if the floating point operand will fit
|
/* Return nonzero if the floating point operand will fit
|
in the immediate field. */
|
in the immediate field. */
|
|
|
int
|
int
|
c4x_immed_float_p (rtx op)
|
c4x_immed_float_p (rtx op)
|
{
|
{
|
long convval[2];
|
long convval[2];
|
int exponent;
|
int exponent;
|
REAL_VALUE_TYPE r;
|
REAL_VALUE_TYPE r;
|
|
|
REAL_VALUE_FROM_CONST_DOUBLE (r, op);
|
REAL_VALUE_FROM_CONST_DOUBLE (r, op);
|
if (GET_MODE (op) == HFmode)
|
if (GET_MODE (op) == HFmode)
|
REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
|
REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
|
else
|
else
|
{
|
{
|
REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
|
REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
|
convval[1] = 0;
|
convval[1] = 0;
|
}
|
}
|
|
|
/* Sign extend exponent. */
|
/* Sign extend exponent. */
|
exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
|
exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
|
if (exponent == -128)
|
if (exponent == -128)
|
return 1; /* 0.0 */
|
return 1; /* 0.0 */
|
if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
|
if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
|
return 0; /* Precision doesn't fit. */
|
return 0; /* Precision doesn't fit. */
|
return (exponent <= 7) /* Positive exp. */
|
return (exponent <= 7) /* Positive exp. */
|
&& (exponent >= -7); /* Negative exp. */
|
&& (exponent >= -7); /* Negative exp. */
|
}
|
}
|
|
|
|
|
/* The last instruction in a repeat block cannot be a Bcond, DBcound,
|
/* The last instruction in a repeat block cannot be a Bcond, DBcound,
|
CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
|
CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
|
|
|
None of the last four instructions from the bottom of the block can
|
None of the last four instructions from the bottom of the block can
|
be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
|
be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
|
BcondAT or RETIcondD.
|
BcondAT or RETIcondD.
|
|
|
This routine scans the four previous insns for a jump insn, and if
|
This routine scans the four previous insns for a jump insn, and if
|
one is found, returns 1 so that we bung in a nop instruction.
|
one is found, returns 1 so that we bung in a nop instruction.
|
This simple minded strategy will add a nop, when it may not
|
This simple minded strategy will add a nop, when it may not
|
be required. Say when there is a JUMP_INSN near the end of the
|
be required. Say when there is a JUMP_INSN near the end of the
|
block that doesn't get converted into a delayed branch.
|
block that doesn't get converted into a delayed branch.
|
|
|
Note that we cannot have a call insn, since we don't generate
|
Note that we cannot have a call insn, since we don't generate
|
repeat loops with calls in them (although I suppose we could, but
|
repeat loops with calls in them (although I suppose we could, but
|
there's no benefit.)
|
there's no benefit.)
|
|
|
!!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
|
!!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
|
|
|
int
|
int
|
c4x_rptb_nop_p (rtx insn)
|
c4x_rptb_nop_p (rtx insn)
|
{
|
{
|
rtx start_label;
|
rtx start_label;
|
int i;
|
int i;
|
|
|
/* Extract the start label from the jump pattern (rptb_end). */
|
/* Extract the start label from the jump pattern (rptb_end). */
|
start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
|
start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
|
|
|
/* If there is a label at the end of the loop we must insert
|
/* If there is a label at the end of the loop we must insert
|
a NOP. */
|
a NOP. */
|
do {
|
do {
|
insn = previous_insn (insn);
|
insn = previous_insn (insn);
|
} while (GET_CODE (insn) == NOTE
|
} while (GET_CODE (insn) == NOTE
|
|| GET_CODE (insn) == USE
|
|| GET_CODE (insn) == USE
|
|| GET_CODE (insn) == CLOBBER);
|
|| GET_CODE (insn) == CLOBBER);
|
if (GET_CODE (insn) == CODE_LABEL)
|
if (GET_CODE (insn) == CODE_LABEL)
|
return 1;
|
return 1;
|
|
|
for (i = 0; i < 4; i++)
|
for (i = 0; i < 4; i++)
|
{
|
{
|
/* Search back for prev non-note and non-label insn. */
|
/* Search back for prev non-note and non-label insn. */
|
while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
|
while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
|
|| GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
|
|| GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
|
{
|
{
|
if (insn == start_label)
|
if (insn == start_label)
|
return i == 0;
|
return i == 0;
|
|
|
insn = previous_insn (insn);
|
insn = previous_insn (insn);
|
};
|
};
|
|
|
/* If we have a jump instruction we should insert a NOP. If we
|
/* If we have a jump instruction we should insert a NOP. If we
|
hit repeat block top we should only insert a NOP if the loop
|
hit repeat block top we should only insert a NOP if the loop
|
is empty. */
|
is empty. */
|
if (GET_CODE (insn) == JUMP_INSN)
|
if (GET_CODE (insn) == JUMP_INSN)
|
return 1;
|
return 1;
|
insn = previous_insn (insn);
|
insn = previous_insn (insn);
|
}
|
}
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
/* The C4x looping instruction needs to be emitted at the top of the
|
/* The C4x looping instruction needs to be emitted at the top of the
|
loop. Emitting the true RTL for a looping instruction at the top of
|
loop. Emitting the true RTL for a looping instruction at the top of
|
the loop can cause problems with flow analysis. So instead, a dummy
|
the loop can cause problems with flow analysis. So instead, a dummy
|
doloop insn is emitted at the end of the loop. This routine checks
|
doloop insn is emitted at the end of the loop. This routine checks
|
for the presence of this doloop insn and then searches back to the
|
for the presence of this doloop insn and then searches back to the
|
top of the loop, where it inserts the true looping insn (provided
|
top of the loop, where it inserts the true looping insn (provided
|
there are no instructions in the loop which would cause problems).
|
there are no instructions in the loop which would cause problems).
|
Any additional labels can be emitted at this point. In addition, if
|
Any additional labels can be emitted at this point. In addition, if
|
the desired loop count register was not allocated, this routine does
|
the desired loop count register was not allocated, this routine does
|
nothing.
|
nothing.
|
|
|
Before we can create a repeat block looping instruction we have to
|
Before we can create a repeat block looping instruction we have to
|
verify that there are no jumps outside the loop and no jumps outside
|
verify that there are no jumps outside the loop and no jumps outside
|
the loop go into this loop. This can happen in the basic blocks reorder
|
the loop go into this loop. This can happen in the basic blocks reorder
|
pass. The C4x cpu cannot handle this. */
|
pass. The C4x cpu cannot handle this. */
|
|
|
static int
|
static int
|
c4x_label_ref_used_p (rtx x, rtx code_label)
|
c4x_label_ref_used_p (rtx x, rtx code_label)
|
{
|
{
|
enum rtx_code code;
|
enum rtx_code code;
|
int i, j;
|
int i, j;
|
const char *fmt;
|
const char *fmt;
|
|
|
if (x == 0)
|
if (x == 0)
|
return 0;
|
return 0;
|
|
|
code = GET_CODE (x);
|
code = GET_CODE (x);
|
if (code == LABEL_REF)
|
if (code == LABEL_REF)
|
return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
|
return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
|
|
|
fmt = GET_RTX_FORMAT (code);
|
fmt = GET_RTX_FORMAT (code);
|
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
|
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
|
{
|
{
|
if (fmt[i] == 'e')
|
if (fmt[i] == 'e')
|
{
|
{
|
if (c4x_label_ref_used_p (XEXP (x, i), code_label))
|
if (c4x_label_ref_used_p (XEXP (x, i), code_label))
|
return 1;
|
return 1;
|
}
|
}
|
else if (fmt[i] == 'E')
|
else if (fmt[i] == 'E')
|
for (j = XVECLEN (x, i) - 1; j >= 0; j--)
|
for (j = XVECLEN (x, i) - 1; j >= 0; j--)
|
if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
|
if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
|
return 1;
|
return 1;
|
}
|
}
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
static int
|
static int
|
c4x_rptb_valid_p (rtx insn, rtx start_label)
|
c4x_rptb_valid_p (rtx insn, rtx start_label)
|
{
|
{
|
rtx end = insn;
|
rtx end = insn;
|
rtx start;
|
rtx start;
|
rtx tmp;
|
rtx tmp;
|
|
|
/* Find the start label. */
|
/* Find the start label. */
|
for (; insn; insn = PREV_INSN (insn))
|
for (; insn; insn = PREV_INSN (insn))
|
if (insn == start_label)
|
if (insn == start_label)
|
break;
|
break;
|
|
|
/* Note found then we cannot use a rptb or rpts. The label was
|
/* Note found then we cannot use a rptb or rpts. The label was
|
probably moved by the basic block reorder pass. */
|
probably moved by the basic block reorder pass. */
|
if (! insn)
|
if (! insn)
|
return 0;
|
return 0;
|
|
|
start = insn;
|
start = insn;
|
/* If any jump jumps inside this block then we must fail. */
|
/* If any jump jumps inside this block then we must fail. */
|
for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
|
for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
|
{
|
{
|
if (GET_CODE (insn) == CODE_LABEL)
|
if (GET_CODE (insn) == CODE_LABEL)
|
{
|
{
|
for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
|
for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
|
if (GET_CODE (tmp) == JUMP_INSN
|
if (GET_CODE (tmp) == JUMP_INSN
|
&& c4x_label_ref_used_p (tmp, insn))
|
&& c4x_label_ref_used_p (tmp, insn))
|
return 0;
|
return 0;
|
}
|
}
|
}
|
}
|
for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
|
for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
|
{
|
{
|
if (GET_CODE (insn) == CODE_LABEL)
|
if (GET_CODE (insn) == CODE_LABEL)
|
{
|
{
|
for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
|
for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
|
if (GET_CODE (tmp) == JUMP_INSN
|
if (GET_CODE (tmp) == JUMP_INSN
|
&& c4x_label_ref_used_p (tmp, insn))
|
&& c4x_label_ref_used_p (tmp, insn))
|
return 0;
|
return 0;
|
}
|
}
|
}
|
}
|
/* If any jump jumps outside this block then we must fail. */
|
/* If any jump jumps outside this block then we must fail. */
|
for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
|
for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
|
{
|
{
|
if (GET_CODE (insn) == CODE_LABEL)
|
if (GET_CODE (insn) == CODE_LABEL)
|
{
|
{
|
for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
|
for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
|
if (GET_CODE (tmp) == JUMP_INSN
|
if (GET_CODE (tmp) == JUMP_INSN
|
&& c4x_label_ref_used_p (tmp, insn))
|
&& c4x_label_ref_used_p (tmp, insn))
|
return 0;
|
return 0;
|
for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
|
for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
|
if (GET_CODE (tmp) == JUMP_INSN
|
if (GET_CODE (tmp) == JUMP_INSN
|
&& c4x_label_ref_used_p (tmp, insn))
|
&& c4x_label_ref_used_p (tmp, insn))
|
return 0;
|
return 0;
|
}
|
}
|
}
|
}
|
|
|
/* All checks OK. */
|
/* All checks OK. */
|
return 1;
|
return 1;
|
}
|
}
|
|
|
|
|
void
|
void
|
c4x_rptb_insert (rtx insn)
|
c4x_rptb_insert (rtx insn)
|
{
|
{
|
rtx end_label;
|
rtx end_label;
|
rtx start_label;
|
rtx start_label;
|
rtx new_start_label;
|
rtx new_start_label;
|
rtx count_reg;
|
rtx count_reg;
|
|
|
/* If the count register has not been allocated to RC, say if
|
/* If the count register has not been allocated to RC, say if
|
there is a movmem pattern in the loop, then do not insert a
|
there is a movmem pattern in the loop, then do not insert a
|
RPTB instruction. Instead we emit a decrement and branch
|
RPTB instruction. Instead we emit a decrement and branch
|
at the end of the loop. */
|
at the end of the loop. */
|
count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
|
count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
|
if (REGNO (count_reg) != RC_REGNO)
|
if (REGNO (count_reg) != RC_REGNO)
|
return;
|
return;
|
|
|
/* Extract the start label from the jump pattern (rptb_end). */
|
/* Extract the start label from the jump pattern (rptb_end). */
|
start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
|
start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
|
|
|
if (! c4x_rptb_valid_p (insn, start_label))
|
if (! c4x_rptb_valid_p (insn, start_label))
|
{
|
{
|
/* We cannot use the rptb insn. Replace it so reorg can use
|
/* We cannot use the rptb insn. Replace it so reorg can use
|
the delay slots of the jump insn. */
|
the delay slots of the jump insn. */
|
emit_insn_before (gen_addqi3 (count_reg, count_reg, constm1_rtx), insn);
|
emit_insn_before (gen_addqi3 (count_reg, count_reg, constm1_rtx), insn);
|
emit_insn_before (gen_cmpqi (count_reg, const0_rtx), insn);
|
emit_insn_before (gen_cmpqi (count_reg, const0_rtx), insn);
|
emit_insn_before (gen_bge (start_label), insn);
|
emit_insn_before (gen_bge (start_label), insn);
|
LABEL_NUSES (start_label)++;
|
LABEL_NUSES (start_label)++;
|
delete_insn (insn);
|
delete_insn (insn);
|
return;
|
return;
|
}
|
}
|
|
|
end_label = gen_label_rtx ();
|
end_label = gen_label_rtx ();
|
LABEL_NUSES (end_label)++;
|
LABEL_NUSES (end_label)++;
|
emit_label_after (end_label, insn);
|
emit_label_after (end_label, insn);
|
|
|
new_start_label = gen_label_rtx ();
|
new_start_label = gen_label_rtx ();
|
LABEL_NUSES (new_start_label)++;
|
LABEL_NUSES (new_start_label)++;
|
|
|
for (; insn; insn = PREV_INSN (insn))
|
for (; insn; insn = PREV_INSN (insn))
|
{
|
{
|
if (insn == start_label)
|
if (insn == start_label)
|
break;
|
break;
|
if (GET_CODE (insn) == JUMP_INSN &&
|
if (GET_CODE (insn) == JUMP_INSN &&
|
JUMP_LABEL (insn) == start_label)
|
JUMP_LABEL (insn) == start_label)
|
redirect_jump (insn, new_start_label, 0);
|
redirect_jump (insn, new_start_label, 0);
|
}
|
}
|
if (! insn)
|
if (! insn)
|
fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
|
fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
|
|
|
emit_label_after (new_start_label, insn);
|
emit_label_after (new_start_label, insn);
|
|
|
if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
|
if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
|
emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
|
emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
|
else
|
else
|
emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
|
emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
|
if (LABEL_NUSES (start_label) == 0)
|
if (LABEL_NUSES (start_label) == 0)
|
delete_insn (start_label);
|
delete_insn (start_label);
|
}
|
}
|
|
|
|
|
/* We need to use direct addressing for large constants and addresses
|
/* We need to use direct addressing for large constants and addresses
|
that cannot fit within an instruction. We must check for these
|
that cannot fit within an instruction. We must check for these
|
after after the final jump optimization pass, since this may
|
after after the final jump optimization pass, since this may
|
introduce a local_move insn for a SYMBOL_REF. This pass
|
introduce a local_move insn for a SYMBOL_REF. This pass
|
must come before delayed branch slot filling since it can generate
|
must come before delayed branch slot filling since it can generate
|
additional instructions.
|
additional instructions.
|
|
|
This function also fixes up RTPB style loops that didn't get RC
|
This function also fixes up RTPB style loops that didn't get RC
|
allocated as the loop counter. */
|
allocated as the loop counter. */
|
|
|
static void
|
static void
|
c4x_reorg (void)
|
c4x_reorg (void)
|
{
|
{
|
rtx insn;
|
rtx insn;
|
|
|
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
{
|
{
|
/* Look for insn. */
|
/* Look for insn. */
|
if (INSN_P (insn))
|
if (INSN_P (insn))
|
{
|
{
|
int insn_code_number;
|
int insn_code_number;
|
rtx old;
|
rtx old;
|
|
|
insn_code_number = recog_memoized (insn);
|
insn_code_number = recog_memoized (insn);
|
|
|
if (insn_code_number < 0)
|
if (insn_code_number < 0)
|
continue;
|
continue;
|
|
|
/* Insert the RTX for RPTB at the top of the loop
|
/* Insert the RTX for RPTB at the top of the loop
|
and a label at the end of the loop. */
|
and a label at the end of the loop. */
|
if (insn_code_number == CODE_FOR_rptb_end)
|
if (insn_code_number == CODE_FOR_rptb_end)
|
c4x_rptb_insert(insn);
|
c4x_rptb_insert(insn);
|
|
|
/* We need to split the insn here. Otherwise the calls to
|
/* We need to split the insn here. Otherwise the calls to
|
force_const_mem will not work for load_immed_address. */
|
force_const_mem will not work for load_immed_address. */
|
old = insn;
|
old = insn;
|
|
|
/* Don't split the insn if it has been deleted. */
|
/* Don't split the insn if it has been deleted. */
|
if (! INSN_DELETED_P (old))
|
if (! INSN_DELETED_P (old))
|
insn = try_split (PATTERN(old), old, 1);
|
insn = try_split (PATTERN(old), old, 1);
|
|
|
/* When not optimizing, the old insn will be still left around
|
/* When not optimizing, the old insn will be still left around
|
with only the 'deleted' bit set. Transform it into a note
|
with only the 'deleted' bit set. Transform it into a note
|
to avoid confusion of subsequent processing. */
|
to avoid confusion of subsequent processing. */
|
if (INSN_DELETED_P (old))
|
if (INSN_DELETED_P (old))
|
{
|
{
|
PUT_CODE (old, NOTE);
|
PUT_CODE (old, NOTE);
|
NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
|
NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
|
NOTE_SOURCE_FILE (old) = 0;
|
NOTE_SOURCE_FILE (old) = 0;
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
|
|
int
|
int
|
c4x_a_register (rtx op)
|
c4x_a_register (rtx op)
|
{
|
{
|
return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
|
return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
|
}
|
}
|
|
|
|
|
int
|
int
|
c4x_x_register (rtx op)
|
c4x_x_register (rtx op)
|
{
|
{
|
return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
|
return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
|
}
|
}
|
|
|
|
|
static int
|
static int
|
c4x_immed_int_constant (rtx op)
|
c4x_immed_int_constant (rtx op)
|
{
|
{
|
if (GET_CODE (op) != CONST_INT)
|
if (GET_CODE (op) != CONST_INT)
|
return 0;
|
return 0;
|
|
|
return GET_MODE (op) == VOIDmode
|
return GET_MODE (op) == VOIDmode
|
|| GET_MODE_CLASS (GET_MODE (op)) == MODE_INT
|
|| GET_MODE_CLASS (GET_MODE (op)) == MODE_INT
|
|| GET_MODE_CLASS (GET_MODE (op)) == MODE_PARTIAL_INT;
|
|| GET_MODE_CLASS (GET_MODE (op)) == MODE_PARTIAL_INT;
|
}
|
}
|
|
|
|
|
static int
|
static int
|
c4x_immed_float_constant (rtx op)
|
c4x_immed_float_constant (rtx op)
|
{
|
{
|
if (GET_CODE (op) != CONST_DOUBLE)
|
if (GET_CODE (op) != CONST_DOUBLE)
|
return 0;
|
return 0;
|
|
|
/* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
|
/* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
|
present this only means that a MEM rtx has been generated. It does
|
present this only means that a MEM rtx has been generated. It does
|
not mean the rtx is really in memory. */
|
not mean the rtx is really in memory. */
|
|
|
return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
|
return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
|
}
|
}
|
|
|
|
|
int
|
int
|
c4x_shiftable_constant (rtx op)
|
c4x_shiftable_constant (rtx op)
|
{
|
{
|
int i;
|
int i;
|
int mask;
|
int mask;
|
int val = INTVAL (op);
|
int val = INTVAL (op);
|
|
|
for (i = 0; i < 16; i++)
|
for (i = 0; i < 16; i++)
|
{
|
{
|
if (val & (1 << i))
|
if (val & (1 << i))
|
break;
|
break;
|
}
|
}
|
mask = ((0xffff >> i) << 16) | 0xffff;
|
mask = ((0xffff >> i) << 16) | 0xffff;
|
if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
|
if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
|
: (val >> i) & mask))
|
: (val >> i) & mask))
|
return i;
|
return i;
|
return -1;
|
return -1;
|
}
|
}
|
|
|
|
|
int
|
int
|
c4x_H_constant (rtx op)
|
c4x_H_constant (rtx op)
|
{
|
{
|
return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
|
return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
|
}
|
}
|
|
|
|
|
int
|
int
|
c4x_I_constant (rtx op)
|
c4x_I_constant (rtx op)
|
{
|
{
|
return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
|
return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
|
}
|
}
|
|
|
|
|
int
|
int
|
c4x_J_constant (rtx op)
|
c4x_J_constant (rtx op)
|
{
|
{
|
if (TARGET_C3X)
|
if (TARGET_C3X)
|
return 0;
|
return 0;
|
return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
|
return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
|
}
|
}
|
|
|
|
|
int
|
int
|
c4x_K_constant (rtx op)
|
c4x_K_constant (rtx op)
|
{
|
{
|
if (TARGET_C3X || ! c4x_immed_int_constant (op))
|
if (TARGET_C3X || ! c4x_immed_int_constant (op))
|
return 0;
|
return 0;
|
return IS_INT5_CONST (INTVAL (op));
|
return IS_INT5_CONST (INTVAL (op));
|
}
|
}
|
|
|
|
|
int
|
int
|
c4x_L_constant (rtx op)
|
c4x_L_constant (rtx op)
|
{
|
{
|
return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
|
return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
|
}
|
}
|
|
|
|
|
int
|
int
|
c4x_N_constant (rtx op)
|
c4x_N_constant (rtx op)
|
{
|
{
|
return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
|
return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
|
}
|
}
|
|
|
|
|
int
|
int
|
c4x_O_constant (rtx op)
|
c4x_O_constant (rtx op)
|
{
|
{
|
return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
|
return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
|
}
|
}
|
|
|
|
|
/* The constraints do not have to check the register class,
|
/* The constraints do not have to check the register class,
|
except when needed to discriminate between the constraints.
|
except when needed to discriminate between the constraints.
|
The operand has been checked by the predicates to be valid. */
|
The operand has been checked by the predicates to be valid. */
|
|
|
/* ARx + 9-bit signed const or IRn
|
/* ARx + 9-bit signed const or IRn
|
*ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
|
*ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
|
We don't include the pre/post inc/dec forms here since
|
We don't include the pre/post inc/dec forms here since
|
they are handled by the <> constraints. */
|
they are handled by the <> constraints. */
|
|
|
int
|
int
|
c4x_Q_constraint (rtx op)
|
c4x_Q_constraint (rtx op)
|
{
|
{
|
enum machine_mode mode = GET_MODE (op);
|
enum machine_mode mode = GET_MODE (op);
|
|
|
if (GET_CODE (op) != MEM)
|
if (GET_CODE (op) != MEM)
|
return 0;
|
return 0;
|
op = XEXP (op, 0);
|
op = XEXP (op, 0);
|
switch (GET_CODE (op))
|
switch (GET_CODE (op))
|
{
|
{
|
case REG:
|
case REG:
|
return 1;
|
return 1;
|
|
|
case PLUS:
|
case PLUS:
|
{
|
{
|
rtx op0 = XEXP (op, 0);
|
rtx op0 = XEXP (op, 0);
|
rtx op1 = XEXP (op, 1);
|
rtx op1 = XEXP (op, 1);
|
|
|
if (! REG_P (op0))
|
if (! REG_P (op0))
|
return 0;
|
return 0;
|
|
|
if (REG_P (op1))
|
if (REG_P (op1))
|
return 1;
|
return 1;
|
|
|
if (GET_CODE (op1) != CONST_INT)
|
if (GET_CODE (op1) != CONST_INT)
|
return 0;
|
return 0;
|
|
|
/* HImode and HFmode must be offsettable. */
|
/* HImode and HFmode must be offsettable. */
|
if (mode == HImode || mode == HFmode)
|
if (mode == HImode || mode == HFmode)
|
return IS_DISP8_OFF_CONST (INTVAL (op1));
|
return IS_DISP8_OFF_CONST (INTVAL (op1));
|
|
|
return IS_DISP8_CONST (INTVAL (op1));
|
return IS_DISP8_CONST (INTVAL (op1));
|
}
|
}
|
break;
|
break;
|
|
|
default:
|
default:
|
break;
|
break;
|
}
|
}
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
/* ARx + 5-bit unsigned const
|
/* ARx + 5-bit unsigned const
|
*ARx, *+ARx(n) for n < 32. */
|
*ARx, *+ARx(n) for n < 32. */
|
|
|
int
|
int
|
c4x_R_constraint (rtx op)
|
c4x_R_constraint (rtx op)
|
{
|
{
|
enum machine_mode mode = GET_MODE (op);
|
enum machine_mode mode = GET_MODE (op);
|
|
|
if (TARGET_C3X)
|
if (TARGET_C3X)
|
return 0;
|
return 0;
|
if (GET_CODE (op) != MEM)
|
if (GET_CODE (op) != MEM)
|
return 0;
|
return 0;
|
op = XEXP (op, 0);
|
op = XEXP (op, 0);
|
switch (GET_CODE (op))
|
switch (GET_CODE (op))
|
{
|
{
|
case REG:
|
case REG:
|
return 1;
|
return 1;
|
|
|
case PLUS:
|
case PLUS:
|
{
|
{
|
rtx op0 = XEXP (op, 0);
|
rtx op0 = XEXP (op, 0);
|
rtx op1 = XEXP (op, 1);
|
rtx op1 = XEXP (op, 1);
|
|
|
if (! REG_P (op0))
|
if (! REG_P (op0))
|
return 0;
|
return 0;
|
|
|
if (GET_CODE (op1) != CONST_INT)
|
if (GET_CODE (op1) != CONST_INT)
|
return 0;
|
return 0;
|
|
|
/* HImode and HFmode must be offsettable. */
|
/* HImode and HFmode must be offsettable. */
|
if (mode == HImode || mode == HFmode)
|
if (mode == HImode || mode == HFmode)
|
return IS_UINT5_CONST (INTVAL (op1) + 1);
|
return IS_UINT5_CONST (INTVAL (op1) + 1);
|
|
|
return IS_UINT5_CONST (INTVAL (op1));
|
return IS_UINT5_CONST (INTVAL (op1));
|
}
|
}
|
break;
|
break;
|
|
|
default:
|
default:
|
break;
|
break;
|
}
|
}
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
static int
|
static int
|
c4x_R_indirect (rtx op)
|
c4x_R_indirect (rtx op)
|
{
|
{
|
enum machine_mode mode = GET_MODE (op);
|
enum machine_mode mode = GET_MODE (op);
|
|
|
if (TARGET_C3X || GET_CODE (op) != MEM)
|
if (TARGET_C3X || GET_CODE (op) != MEM)
|
return 0;
|
return 0;
|
|
|
op = XEXP (op, 0);
|
op = XEXP (op, 0);
|
switch (GET_CODE (op))
|
switch (GET_CODE (op))
|
{
|
{
|
case REG:
|
case REG:
|
return IS_ADDR_OR_PSEUDO_REG (op);
|
return IS_ADDR_OR_PSEUDO_REG (op);
|
|
|
case PLUS:
|
case PLUS:
|
{
|
{
|
rtx op0 = XEXP (op, 0);
|
rtx op0 = XEXP (op, 0);
|
rtx op1 = XEXP (op, 1);
|
rtx op1 = XEXP (op, 1);
|
|
|
/* HImode and HFmode must be offsettable. */
|
/* HImode and HFmode must be offsettable. */
|
if (mode == HImode || mode == HFmode)
|
if (mode == HImode || mode == HFmode)
|
return IS_ADDR_OR_PSEUDO_REG (op0)
|
return IS_ADDR_OR_PSEUDO_REG (op0)
|
&& GET_CODE (op1) == CONST_INT
|
&& GET_CODE (op1) == CONST_INT
|
&& IS_UINT5_CONST (INTVAL (op1) + 1);
|
&& IS_UINT5_CONST (INTVAL (op1) + 1);
|
|
|
return REG_P (op0)
|
return REG_P (op0)
|
&& IS_ADDR_OR_PSEUDO_REG (op0)
|
&& IS_ADDR_OR_PSEUDO_REG (op0)
|
&& GET_CODE (op1) == CONST_INT
|
&& GET_CODE (op1) == CONST_INT
|
&& IS_UINT5_CONST (INTVAL (op1));
|
&& IS_UINT5_CONST (INTVAL (op1));
|
}
|
}
|
break;
|
break;
|
|
|
default:
|
default:
|
break;
|
break;
|
}
|
}
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
/* ARx + 1-bit unsigned const or IRn
|
/* ARx + 1-bit unsigned const or IRn
|
*ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
|
*ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
|
We don't include the pre/post inc/dec forms here since
|
We don't include the pre/post inc/dec forms here since
|
they are handled by the <> constraints. */
|
they are handled by the <> constraints. */
|
|
|
int
|
int
|
c4x_S_constraint (rtx op)
|
c4x_S_constraint (rtx op)
|
{
|
{
|
enum machine_mode mode = GET_MODE (op);
|
enum machine_mode mode = GET_MODE (op);
|
if (GET_CODE (op) != MEM)
|
if (GET_CODE (op) != MEM)
|
return 0;
|
return 0;
|
op = XEXP (op, 0);
|
op = XEXP (op, 0);
|
switch (GET_CODE (op))
|
switch (GET_CODE (op))
|
{
|
{
|
case REG:
|
case REG:
|
return 1;
|
return 1;
|
|
|
case PRE_MODIFY:
|
case PRE_MODIFY:
|
case POST_MODIFY:
|
case POST_MODIFY:
|
{
|
{
|
rtx op0 = XEXP (op, 0);
|
rtx op0 = XEXP (op, 0);
|
rtx op1 = XEXP (op, 1);
|
rtx op1 = XEXP (op, 1);
|
|
|
if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
|
if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
|
|| (op0 != XEXP (op1, 0)))
|
|| (op0 != XEXP (op1, 0)))
|
return 0;
|
return 0;
|
|
|
op0 = XEXP (op1, 0);
|
op0 = XEXP (op1, 0);
|
op1 = XEXP (op1, 1);
|
op1 = XEXP (op1, 1);
|
return REG_P (op0) && REG_P (op1);
|
return REG_P (op0) && REG_P (op1);
|
/* Pre or post_modify with a displacement of 0 or 1
|
/* Pre or post_modify with a displacement of 0 or 1
|
should not be generated. */
|
should not be generated. */
|
}
|
}
|
break;
|
break;
|
|
|
case PLUS:
|
case PLUS:
|
{
|
{
|
rtx op0 = XEXP (op, 0);
|
rtx op0 = XEXP (op, 0);
|
rtx op1 = XEXP (op, 1);
|
rtx op1 = XEXP (op, 1);
|
|
|
if (!REG_P (op0))
|
if (!REG_P (op0))
|
return 0;
|
return 0;
|
|
|
if (REG_P (op1))
|
if (REG_P (op1))
|
return 1;
|
return 1;
|
|
|
if (GET_CODE (op1) != CONST_INT)
|
if (GET_CODE (op1) != CONST_INT)
|
return 0;
|
return 0;
|
|
|
/* HImode and HFmode must be offsettable. */
|
/* HImode and HFmode must be offsettable. */
|
if (mode == HImode || mode == HFmode)
|
if (mode == HImode || mode == HFmode)
|
return IS_DISP1_OFF_CONST (INTVAL (op1));
|
return IS_DISP1_OFF_CONST (INTVAL (op1));
|
|
|
return IS_DISP1_CONST (INTVAL (op1));
|
return IS_DISP1_CONST (INTVAL (op1));
|
}
|
}
|
break;
|
break;
|
|
|
default:
|
default:
|
break;
|
break;
|
}
|
}
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
int
|
int
|
c4x_S_indirect (rtx op)
|
c4x_S_indirect (rtx op)
|
{
|
{
|
enum machine_mode mode = GET_MODE (op);
|
enum machine_mode mode = GET_MODE (op);
|
if (GET_CODE (op) != MEM)
|
if (GET_CODE (op) != MEM)
|
return 0;
|
return 0;
|
|
|
op = XEXP (op, 0);
|
op = XEXP (op, 0);
|
switch (GET_CODE (op))
|
switch (GET_CODE (op))
|
{
|
{
|
case PRE_DEC:
|
case PRE_DEC:
|
case POST_DEC:
|
case POST_DEC:
|
if (mode != QImode && mode != QFmode)
|
if (mode != QImode && mode != QFmode)
|
return 0;
|
return 0;
|
case PRE_INC:
|
case PRE_INC:
|
case POST_INC:
|
case POST_INC:
|
op = XEXP (op, 0);
|
op = XEXP (op, 0);
|
|
|
case REG:
|
case REG:
|
return IS_ADDR_OR_PSEUDO_REG (op);
|
return IS_ADDR_OR_PSEUDO_REG (op);
|
|
|
case PRE_MODIFY:
|
case PRE_MODIFY:
|
case POST_MODIFY:
|
case POST_MODIFY:
|
{
|
{
|
rtx op0 = XEXP (op, 0);
|
rtx op0 = XEXP (op, 0);
|
rtx op1 = XEXP (op, 1);
|
rtx op1 = XEXP (op, 1);
|
|
|
if (mode != QImode && mode != QFmode)
|
if (mode != QImode && mode != QFmode)
|
return 0;
|
return 0;
|
|
|
if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
|
if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
|
|| (op0 != XEXP (op1, 0)))
|
|| (op0 != XEXP (op1, 0)))
|
return 0;
|
return 0;
|
|
|
op0 = XEXP (op1, 0);
|
op0 = XEXP (op1, 0);
|
op1 = XEXP (op1, 1);
|
op1 = XEXP (op1, 1);
|
return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
|
return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
|
&& REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
|
&& REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
|
/* Pre or post_modify with a displacement of 0 or 1
|
/* Pre or post_modify with a displacement of 0 or 1
|
should not be generated. */
|
should not be generated. */
|
}
|
}
|
|
|
case PLUS:
|
case PLUS:
|
{
|
{
|
rtx op0 = XEXP (op, 0);
|
rtx op0 = XEXP (op, 0);
|
rtx op1 = XEXP (op, 1);
|
rtx op1 = XEXP (op, 1);
|
|
|
if (REG_P (op0))
|
if (REG_P (op0))
|
{
|
{
|
/* HImode and HFmode must be offsettable. */
|
/* HImode and HFmode must be offsettable. */
|
if (mode == HImode || mode == HFmode)
|
if (mode == HImode || mode == HFmode)
|
return IS_ADDR_OR_PSEUDO_REG (op0)
|
return IS_ADDR_OR_PSEUDO_REG (op0)
|
&& GET_CODE (op1) == CONST_INT
|
&& GET_CODE (op1) == CONST_INT
|
&& IS_DISP1_OFF_CONST (INTVAL (op1));
|
&& IS_DISP1_OFF_CONST (INTVAL (op1));
|
|
|
if (REG_P (op1))
|
if (REG_P (op1))
|
return (IS_INDEX_OR_PSEUDO_REG (op1)
|
return (IS_INDEX_OR_PSEUDO_REG (op1)
|
&& IS_ADDR_OR_PSEUDO_REG (op0))
|
&& IS_ADDR_OR_PSEUDO_REG (op0))
|
|| (IS_ADDR_OR_PSEUDO_REG (op1)
|
|| (IS_ADDR_OR_PSEUDO_REG (op1)
|
&& IS_INDEX_OR_PSEUDO_REG (op0));
|
&& IS_INDEX_OR_PSEUDO_REG (op0));
|
|
|
return IS_ADDR_OR_PSEUDO_REG (op0)
|
return IS_ADDR_OR_PSEUDO_REG (op0)
|
&& GET_CODE (op1) == CONST_INT
|
&& GET_CODE (op1) == CONST_INT
|
&& IS_DISP1_CONST (INTVAL (op1));
|
&& IS_DISP1_CONST (INTVAL (op1));
|
}
|
}
|
}
|
}
|
break;
|
break;
|
|
|
default:
|
default:
|
break;
|
break;
|
}
|
}
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
/* Direct memory operand. */
|
/* Direct memory operand. */
|
|
|
int
|
int
|
c4x_T_constraint (rtx op)
|
c4x_T_constraint (rtx op)
|
{
|
{
|
if (GET_CODE (op) != MEM)
|
if (GET_CODE (op) != MEM)
|
return 0;
|
return 0;
|
op = XEXP (op, 0);
|
op = XEXP (op, 0);
|
|
|
if (GET_CODE (op) != LO_SUM)
|
if (GET_CODE (op) != LO_SUM)
|
{
|
{
|
/* Allow call operands. */
|
/* Allow call operands. */
|
return GET_CODE (op) == SYMBOL_REF
|
return GET_CODE (op) == SYMBOL_REF
|
&& GET_MODE (op) == Pmode
|
&& GET_MODE (op) == Pmode
|
&& SYMBOL_REF_FUNCTION_P (op);
|
&& SYMBOL_REF_FUNCTION_P (op);
|
}
|
}
|
|
|
/* HImode and HFmode are not offsettable. */
|
/* HImode and HFmode are not offsettable. */
|
if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
|
if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
|
return 0;
|
return 0;
|
|
|
if ((GET_CODE (XEXP (op, 0)) == REG)
|
if ((GET_CODE (XEXP (op, 0)) == REG)
|
&& (REGNO (XEXP (op, 0)) == DP_REGNO))
|
&& (REGNO (XEXP (op, 0)) == DP_REGNO))
|
return c4x_U_constraint (XEXP (op, 1));
|
return c4x_U_constraint (XEXP (op, 1));
|
|
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
/* Symbolic operand. */
|
/* Symbolic operand. */
|
|
|
int
|
int
|
c4x_U_constraint (rtx op)
|
c4x_U_constraint (rtx op)
|
{
|
{
|
/* Don't allow direct addressing to an arbitrary constant. */
|
/* Don't allow direct addressing to an arbitrary constant. */
|
return GET_CODE (op) == CONST
|
return GET_CODE (op) == CONST
|
|| GET_CODE (op) == SYMBOL_REF
|
|| GET_CODE (op) == SYMBOL_REF
|
|| GET_CODE (op) == LABEL_REF;
|
|| GET_CODE (op) == LABEL_REF;
|
}
|
}
|
|
|
|
|
int
|
int
|
c4x_autoinc_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
c4x_autoinc_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
{
|
{
|
if (GET_CODE (op) == MEM)
|
if (GET_CODE (op) == MEM)
|
{
|
{
|
enum rtx_code code = GET_CODE (XEXP (op, 0));
|
enum rtx_code code = GET_CODE (XEXP (op, 0));
|
|
|
if (code == PRE_INC
|
if (code == PRE_INC
|
|| code == PRE_DEC
|
|| code == PRE_DEC
|
|| code == POST_INC
|
|| code == POST_INC
|
|| code == POST_DEC
|
|| code == POST_DEC
|
|| code == PRE_MODIFY
|
|| code == PRE_MODIFY
|
|| code == POST_MODIFY
|
|| code == POST_MODIFY
|
)
|
)
|
return 1;
|
return 1;
|
}
|
}
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
int
|
int
|
mixed_subreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
mixed_subreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
{
|
{
|
/* Allow (subreg:HF (reg:HI)) that be generated for a union of an
|
/* Allow (subreg:HF (reg:HI)) that be generated for a union of an
|
int and a long double. */
|
int and a long double. */
|
if (GET_CODE (op) == SUBREG
|
if (GET_CODE (op) == SUBREG
|
&& (GET_MODE (op) == QFmode)
|
&& (GET_MODE (op) == QFmode)
|
&& (GET_MODE (SUBREG_REG (op)) == QImode
|
&& (GET_MODE (SUBREG_REG (op)) == QImode
|
|| GET_MODE (SUBREG_REG (op)) == HImode))
|
|| GET_MODE (SUBREG_REG (op)) == HImode))
|
return 1;
|
return 1;
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
int
|
int
|
reg_imm_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
reg_imm_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
{
|
{
|
if (REG_P (op) || CONSTANT_P (op))
|
if (REG_P (op) || CONSTANT_P (op))
|
return 1;
|
return 1;
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
int
|
int
|
not_modify_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
not_modify_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
{
|
{
|
if (REG_P (op) || CONSTANT_P (op))
|
if (REG_P (op) || CONSTANT_P (op))
|
return 1;
|
return 1;
|
if (GET_CODE (op) != MEM)
|
if (GET_CODE (op) != MEM)
|
return 0;
|
return 0;
|
op = XEXP (op, 0);
|
op = XEXP (op, 0);
|
switch (GET_CODE (op))
|
switch (GET_CODE (op))
|
{
|
{
|
case REG:
|
case REG:
|
return 1;
|
return 1;
|
|
|
case PLUS:
|
case PLUS:
|
{
|
{
|
rtx op0 = XEXP (op, 0);
|
rtx op0 = XEXP (op, 0);
|
rtx op1 = XEXP (op, 1);
|
rtx op1 = XEXP (op, 1);
|
|
|
if (! REG_P (op0))
|
if (! REG_P (op0))
|
return 0;
|
return 0;
|
|
|
if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
|
if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
|
return 1;
|
return 1;
|
}
|
}
|
|
|
case LO_SUM:
|
case LO_SUM:
|
{
|
{
|
rtx op0 = XEXP (op, 0);
|
rtx op0 = XEXP (op, 0);
|
|
|
if (REG_P (op0) && REGNO (op0) == DP_REGNO)
|
if (REG_P (op0) && REGNO (op0) == DP_REGNO)
|
return 1;
|
return 1;
|
}
|
}
|
break;
|
break;
|
|
|
case CONST:
|
case CONST:
|
case SYMBOL_REF:
|
case SYMBOL_REF:
|
case LABEL_REF:
|
case LABEL_REF:
|
return 1;
|
return 1;
|
|
|
default:
|
default:
|
break;
|
break;
|
}
|
}
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
int
|
int
|
not_rc_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
not_rc_reg (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
{
|
{
|
if (REG_P (op) && REGNO (op) == RC_REGNO)
|
if (REG_P (op) && REGNO (op) == RC_REGNO)
|
return 0;
|
return 0;
|
return 1;
|
return 1;
|
}
|
}
|
|
|
|
|
static void
|
static void
|
c4x_S_address_parse (rtx op, int *base, int *incdec, int *index, int *disp)
|
c4x_S_address_parse (rtx op, int *base, int *incdec, int *index, int *disp)
|
{
|
{
|
*base = 0;
|
*base = 0;
|
*incdec = 0;
|
*incdec = 0;
|
*index = 0;
|
*index = 0;
|
*disp = 0;
|
*disp = 0;
|
|
|
if (GET_CODE (op) != MEM)
|
if (GET_CODE (op) != MEM)
|
fatal_insn ("invalid indirect memory address", op);
|
fatal_insn ("invalid indirect memory address", op);
|
|
|
op = XEXP (op, 0);
|
op = XEXP (op, 0);
|
switch (GET_CODE (op))
|
switch (GET_CODE (op))
|
{
|
{
|
case PRE_DEC:
|
case PRE_DEC:
|
*base = REGNO (XEXP (op, 0));
|
*base = REGNO (XEXP (op, 0));
|
*incdec = 1;
|
*incdec = 1;
|
*disp = -1;
|
*disp = -1;
|
return;
|
return;
|
|
|
case POST_DEC:
|
case POST_DEC:
|
*base = REGNO (XEXP (op, 0));
|
*base = REGNO (XEXP (op, 0));
|
*incdec = 1;
|
*incdec = 1;
|
*disp = 0;
|
*disp = 0;
|
return;
|
return;
|
|
|
case PRE_INC:
|
case PRE_INC:
|
*base = REGNO (XEXP (op, 0));
|
*base = REGNO (XEXP (op, 0));
|
*incdec = 1;
|
*incdec = 1;
|
*disp = 1;
|
*disp = 1;
|
return;
|
return;
|
|
|
case POST_INC:
|
case POST_INC:
|
*base = REGNO (XEXP (op, 0));
|
*base = REGNO (XEXP (op, 0));
|
*incdec = 1;
|
*incdec = 1;
|
*disp = 0;
|
*disp = 0;
|
return;
|
return;
|
|
|
case POST_MODIFY:
|
case POST_MODIFY:
|
*base = REGNO (XEXP (op, 0));
|
*base = REGNO (XEXP (op, 0));
|
if (REG_P (XEXP (XEXP (op, 1), 1)))
|
if (REG_P (XEXP (XEXP (op, 1), 1)))
|
{
|
{
|
*index = REGNO (XEXP (XEXP (op, 1), 1));
|
*index = REGNO (XEXP (XEXP (op, 1), 1));
|
*disp = 0; /* ??? */
|
*disp = 0; /* ??? */
|
}
|
}
|
else
|
else
|
*disp = INTVAL (XEXP (XEXP (op, 1), 1));
|
*disp = INTVAL (XEXP (XEXP (op, 1), 1));
|
*incdec = 1;
|
*incdec = 1;
|
return;
|
return;
|
|
|
case PRE_MODIFY:
|
case PRE_MODIFY:
|
*base = REGNO (XEXP (op, 0));
|
*base = REGNO (XEXP (op, 0));
|
if (REG_P (XEXP (XEXP (op, 1), 1)))
|
if (REG_P (XEXP (XEXP (op, 1), 1)))
|
{
|
{
|
*index = REGNO (XEXP (XEXP (op, 1), 1));
|
*index = REGNO (XEXP (XEXP (op, 1), 1));
|
*disp = 1; /* ??? */
|
*disp = 1; /* ??? */
|
}
|
}
|
else
|
else
|
*disp = INTVAL (XEXP (XEXP (op, 1), 1));
|
*disp = INTVAL (XEXP (XEXP (op, 1), 1));
|
*incdec = 1;
|
*incdec = 1;
|
|
|
return;
|
return;
|
|
|
case REG:
|
case REG:
|
*base = REGNO (op);
|
*base = REGNO (op);
|
return;
|
return;
|
|
|
case PLUS:
|
case PLUS:
|
{
|
{
|
rtx op0 = XEXP (op, 0);
|
rtx op0 = XEXP (op, 0);
|
rtx op1 = XEXP (op, 1);
|
rtx op1 = XEXP (op, 1);
|
|
|
if (c4x_a_register (op0))
|
if (c4x_a_register (op0))
|
{
|
{
|
if (c4x_x_register (op1))
|
if (c4x_x_register (op1))
|
{
|
{
|
*base = REGNO (op0);
|
*base = REGNO (op0);
|
*index = REGNO (op1);
|
*index = REGNO (op1);
|
return;
|
return;
|
}
|
}
|
else if ((GET_CODE (op1) == CONST_INT
|
else if ((GET_CODE (op1) == CONST_INT
|
&& IS_DISP1_CONST (INTVAL (op1))))
|
&& IS_DISP1_CONST (INTVAL (op1))))
|
{
|
{
|
*base = REGNO (op0);
|
*base = REGNO (op0);
|
*disp = INTVAL (op1);
|
*disp = INTVAL (op1);
|
return;
|
return;
|
}
|
}
|
}
|
}
|
else if (c4x_x_register (op0) && c4x_a_register (op1))
|
else if (c4x_x_register (op0) && c4x_a_register (op1))
|
{
|
{
|
*base = REGNO (op1);
|
*base = REGNO (op1);
|
*index = REGNO (op0);
|
*index = REGNO (op0);
|
return;
|
return;
|
}
|
}
|
}
|
}
|
/* Fall through. */
|
/* Fall through. */
|
|
|
default:
|
default:
|
fatal_insn ("invalid indirect (S) memory address", op);
|
fatal_insn ("invalid indirect (S) memory address", op);
|
}
|
}
|
}
|
}
|
|
|
|
|
int
|
int
|
c4x_address_conflict (rtx op0, rtx op1, int store0, int store1)
|
c4x_address_conflict (rtx op0, rtx op1, int store0, int store1)
|
{
|
{
|
int base0;
|
int base0;
|
int base1;
|
int base1;
|
int incdec0;
|
int incdec0;
|
int incdec1;
|
int incdec1;
|
int index0;
|
int index0;
|
int index1;
|
int index1;
|
int disp0;
|
int disp0;
|
int disp1;
|
int disp1;
|
|
|
if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
|
if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
|
return 1;
|
return 1;
|
|
|
c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
|
c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
|
c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
|
c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
|
|
|
if (store0 && store1)
|
if (store0 && store1)
|
{
|
{
|
/* If we have two stores in parallel to the same address, then
|
/* If we have two stores in parallel to the same address, then
|
the C4x only executes one of the stores. This is unlikely to
|
the C4x only executes one of the stores. This is unlikely to
|
cause problems except when writing to a hardware device such
|
cause problems except when writing to a hardware device such
|
as a FIFO since the second write will be lost. The user
|
as a FIFO since the second write will be lost. The user
|
should flag the hardware location as being volatile so that
|
should flag the hardware location as being volatile so that
|
we don't do this optimization. While it is unlikely that we
|
we don't do this optimization. While it is unlikely that we
|
have an aliased address if both locations are not marked
|
have an aliased address if both locations are not marked
|
volatile, it is probably safer to flag a potential conflict
|
volatile, it is probably safer to flag a potential conflict
|
if either location is volatile. */
|
if either location is volatile. */
|
if (! flag_argument_noalias)
|
if (! flag_argument_noalias)
|
{
|
{
|
if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
|
if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
|
return 1;
|
return 1;
|
}
|
}
|
}
|
}
|
|
|
/* If have a parallel load and a store to the same address, the load
|
/* If have a parallel load and a store to the same address, the load
|
is performed first, so there is no conflict. Similarly, there is
|
is performed first, so there is no conflict. Similarly, there is
|
no conflict if have parallel loads from the same address. */
|
no conflict if have parallel loads from the same address. */
|
|
|
/* Cannot use auto increment or auto decrement twice for same
|
/* Cannot use auto increment or auto decrement twice for same
|
base register. */
|
base register. */
|
if (base0 == base1 && incdec0 && incdec0)
|
if (base0 == base1 && incdec0 && incdec0)
|
return 1;
|
return 1;
|
|
|
/* It might be too confusing for GCC if we have use a base register
|
/* It might be too confusing for GCC if we have use a base register
|
with a side effect and a memory reference using the same register
|
with a side effect and a memory reference using the same register
|
in parallel. */
|
in parallel. */
|
if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
|
if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
|
return 1;
|
return 1;
|
|
|
/* We cannot optimize the case where op1 and op2 refer to the same
|
/* We cannot optimize the case where op1 and op2 refer to the same
|
address. */
|
address. */
|
if (base0 == base1 && disp0 == disp1 && index0 == index1)
|
if (base0 == base1 && disp0 == disp1 && index0 == index1)
|
return 1;
|
return 1;
|
|
|
/* No conflict. */
|
/* No conflict. */
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
/* Check for while loop inside a decrement and branch loop. */
|
/* Check for while loop inside a decrement and branch loop. */
|
|
|
int
|
int
|
c4x_label_conflict (rtx insn, rtx jump, rtx db)
|
c4x_label_conflict (rtx insn, rtx jump, rtx db)
|
{
|
{
|
while (insn)
|
while (insn)
|
{
|
{
|
if (GET_CODE (insn) == CODE_LABEL)
|
if (GET_CODE (insn) == CODE_LABEL)
|
{
|
{
|
if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
|
if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
|
return 1;
|
return 1;
|
if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
|
if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
|
return 0;
|
return 0;
|
}
|
}
|
insn = PREV_INSN (insn);
|
insn = PREV_INSN (insn);
|
}
|
}
|
return 1;
|
return 1;
|
}
|
}
|
|
|
|
|
/* Validate combination of operands for parallel load/store instructions. */
|
/* Validate combination of operands for parallel load/store instructions. */
|
|
|
int
|
int
|
valid_parallel_load_store (rtx *operands,
|
valid_parallel_load_store (rtx *operands,
|
enum machine_mode mode ATTRIBUTE_UNUSED)
|
enum machine_mode mode ATTRIBUTE_UNUSED)
|
{
|
{
|
rtx op0 = operands[0];
|
rtx op0 = operands[0];
|
rtx op1 = operands[1];
|
rtx op1 = operands[1];
|
rtx op2 = operands[2];
|
rtx op2 = operands[2];
|
rtx op3 = operands[3];
|
rtx op3 = operands[3];
|
|
|
if (GET_CODE (op0) == SUBREG)
|
if (GET_CODE (op0) == SUBREG)
|
op0 = SUBREG_REG (op0);
|
op0 = SUBREG_REG (op0);
|
if (GET_CODE (op1) == SUBREG)
|
if (GET_CODE (op1) == SUBREG)
|
op1 = SUBREG_REG (op1);
|
op1 = SUBREG_REG (op1);
|
if (GET_CODE (op2) == SUBREG)
|
if (GET_CODE (op2) == SUBREG)
|
op2 = SUBREG_REG (op2);
|
op2 = SUBREG_REG (op2);
|
if (GET_CODE (op3) == SUBREG)
|
if (GET_CODE (op3) == SUBREG)
|
op3 = SUBREG_REG (op3);
|
op3 = SUBREG_REG (op3);
|
|
|
/* The patterns should only allow ext_low_reg_operand() or
|
/* The patterns should only allow ext_low_reg_operand() or
|
par_ind_operand() operands. Thus of the 4 operands, only 2
|
par_ind_operand() operands. Thus of the 4 operands, only 2
|
should be REGs and the other 2 should be MEMs. */
|
should be REGs and the other 2 should be MEMs. */
|
|
|
/* This test prevents the multipack pass from using this pattern if
|
/* This test prevents the multipack pass from using this pattern if
|
op0 is used as an index or base register in op2 or op3, since
|
op0 is used as an index or base register in op2 or op3, since
|
this combination will require reloading. */
|
this combination will require reloading. */
|
if (GET_CODE (op0) == REG
|
if (GET_CODE (op0) == REG
|
&& ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
|
&& ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
|
|| (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
|
|| (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
|
return 0;
|
return 0;
|
|
|
/* LDI||LDI. */
|
/* LDI||LDI. */
|
if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
|
if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
|
return (REGNO (op0) != REGNO (op2))
|
return (REGNO (op0) != REGNO (op2))
|
&& GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
|
&& GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
|
&& ! c4x_address_conflict (op1, op3, 0, 0);
|
&& ! c4x_address_conflict (op1, op3, 0, 0);
|
|
|
/* STI||STI. */
|
/* STI||STI. */
|
if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
|
if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
|
return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
|
return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
|
&& ! c4x_address_conflict (op0, op2, 1, 1);
|
&& ! c4x_address_conflict (op0, op2, 1, 1);
|
|
|
/* LDI||STI. */
|
/* LDI||STI. */
|
if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
|
if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
|
return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
|
return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
|
&& ! c4x_address_conflict (op1, op2, 0, 1);
|
&& ! c4x_address_conflict (op1, op2, 0, 1);
|
|
|
/* STI||LDI. */
|
/* STI||LDI. */
|
if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
|
if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
|
return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
|
return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
|
&& ! c4x_address_conflict (op0, op3, 1, 0);
|
&& ! c4x_address_conflict (op0, op3, 1, 0);
|
|
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
int
|
int
|
valid_parallel_operands_4 (rtx *operands,
|
valid_parallel_operands_4 (rtx *operands,
|
enum machine_mode mode ATTRIBUTE_UNUSED)
|
enum machine_mode mode ATTRIBUTE_UNUSED)
|
{
|
{
|
rtx op0 = operands[0];
|
rtx op0 = operands[0];
|
rtx op2 = operands[2];
|
rtx op2 = operands[2];
|
|
|
if (GET_CODE (op0) == SUBREG)
|
if (GET_CODE (op0) == SUBREG)
|
op0 = SUBREG_REG (op0);
|
op0 = SUBREG_REG (op0);
|
if (GET_CODE (op2) == SUBREG)
|
if (GET_CODE (op2) == SUBREG)
|
op2 = SUBREG_REG (op2);
|
op2 = SUBREG_REG (op2);
|
|
|
/* This test prevents the multipack pass from using this pattern if
|
/* This test prevents the multipack pass from using this pattern if
|
op0 is used as an index or base register in op2, since this combination
|
op0 is used as an index or base register in op2, since this combination
|
will require reloading. */
|
will require reloading. */
|
if (GET_CODE (op0) == REG
|
if (GET_CODE (op0) == REG
|
&& GET_CODE (op2) == MEM
|
&& GET_CODE (op2) == MEM
|
&& reg_mentioned_p (op0, XEXP (op2, 0)))
|
&& reg_mentioned_p (op0, XEXP (op2, 0)))
|
return 0;
|
return 0;
|
|
|
return 1;
|
return 1;
|
}
|
}
|
|
|
|
|
int
|
int
|
valid_parallel_operands_5 (rtx *operands,
|
valid_parallel_operands_5 (rtx *operands,
|
enum machine_mode mode ATTRIBUTE_UNUSED)
|
enum machine_mode mode ATTRIBUTE_UNUSED)
|
{
|
{
|
int regs = 0;
|
int regs = 0;
|
rtx op0 = operands[0];
|
rtx op0 = operands[0];
|
rtx op1 = operands[1];
|
rtx op1 = operands[1];
|
rtx op2 = operands[2];
|
rtx op2 = operands[2];
|
rtx op3 = operands[3];
|
rtx op3 = operands[3];
|
|
|
if (GET_CODE (op0) == SUBREG)
|
if (GET_CODE (op0) == SUBREG)
|
op0 = SUBREG_REG (op0);
|
op0 = SUBREG_REG (op0);
|
if (GET_CODE (op1) == SUBREG)
|
if (GET_CODE (op1) == SUBREG)
|
op1 = SUBREG_REG (op1);
|
op1 = SUBREG_REG (op1);
|
if (GET_CODE (op2) == SUBREG)
|
if (GET_CODE (op2) == SUBREG)
|
op2 = SUBREG_REG (op2);
|
op2 = SUBREG_REG (op2);
|
|
|
/* The patterns should only allow ext_low_reg_operand() or
|
/* The patterns should only allow ext_low_reg_operand() or
|
par_ind_operand() operands. Operands 1 and 2 may be commutative
|
par_ind_operand() operands. Operands 1 and 2 may be commutative
|
but only one of them can be a register. */
|
but only one of them can be a register. */
|
if (GET_CODE (op1) == REG)
|
if (GET_CODE (op1) == REG)
|
regs++;
|
regs++;
|
if (GET_CODE (op2) == REG)
|
if (GET_CODE (op2) == REG)
|
regs++;
|
regs++;
|
|
|
if (regs != 1)
|
if (regs != 1)
|
return 0;
|
return 0;
|
|
|
/* This test prevents the multipack pass from using this pattern if
|
/* This test prevents the multipack pass from using this pattern if
|
op0 is used as an index or base register in op3, since this combination
|
op0 is used as an index or base register in op3, since this combination
|
will require reloading. */
|
will require reloading. */
|
if (GET_CODE (op0) == REG
|
if (GET_CODE (op0) == REG
|
&& GET_CODE (op3) == MEM
|
&& GET_CODE (op3) == MEM
|
&& reg_mentioned_p (op0, XEXP (op3, 0)))
|
&& reg_mentioned_p (op0, XEXP (op3, 0)))
|
return 0;
|
return 0;
|
|
|
return 1;
|
return 1;
|
}
|
}
|
|
|
|
|
int
|
int
|
valid_parallel_operands_6 (rtx *operands,
|
valid_parallel_operands_6 (rtx *operands,
|
enum machine_mode mode ATTRIBUTE_UNUSED)
|
enum machine_mode mode ATTRIBUTE_UNUSED)
|
{
|
{
|
int regs = 0;
|
int regs = 0;
|
rtx op0 = operands[0];
|
rtx op0 = operands[0];
|
rtx op1 = operands[1];
|
rtx op1 = operands[1];
|
rtx op2 = operands[2];
|
rtx op2 = operands[2];
|
rtx op4 = operands[4];
|
rtx op4 = operands[4];
|
rtx op5 = operands[5];
|
rtx op5 = operands[5];
|
|
|
if (GET_CODE (op1) == SUBREG)
|
if (GET_CODE (op1) == SUBREG)
|
op1 = SUBREG_REG (op1);
|
op1 = SUBREG_REG (op1);
|
if (GET_CODE (op2) == SUBREG)
|
if (GET_CODE (op2) == SUBREG)
|
op2 = SUBREG_REG (op2);
|
op2 = SUBREG_REG (op2);
|
if (GET_CODE (op4) == SUBREG)
|
if (GET_CODE (op4) == SUBREG)
|
op4 = SUBREG_REG (op4);
|
op4 = SUBREG_REG (op4);
|
if (GET_CODE (op5) == SUBREG)
|
if (GET_CODE (op5) == SUBREG)
|
op5 = SUBREG_REG (op5);
|
op5 = SUBREG_REG (op5);
|
|
|
/* The patterns should only allow ext_low_reg_operand() or
|
/* The patterns should only allow ext_low_reg_operand() or
|
par_ind_operand() operands. Thus of the 4 input operands, only 2
|
par_ind_operand() operands. Thus of the 4 input operands, only 2
|
should be REGs and the other 2 should be MEMs. */
|
should be REGs and the other 2 should be MEMs. */
|
|
|
if (GET_CODE (op1) == REG)
|
if (GET_CODE (op1) == REG)
|
regs++;
|
regs++;
|
if (GET_CODE (op2) == REG)
|
if (GET_CODE (op2) == REG)
|
regs++;
|
regs++;
|
if (GET_CODE (op4) == REG)
|
if (GET_CODE (op4) == REG)
|
regs++;
|
regs++;
|
if (GET_CODE (op5) == REG)
|
if (GET_CODE (op5) == REG)
|
regs++;
|
regs++;
|
|
|
/* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
|
/* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
|
Perhaps we should count the MEMs as well? */
|
Perhaps we should count the MEMs as well? */
|
if (regs != 2)
|
if (regs != 2)
|
return 0;
|
return 0;
|
|
|
/* This test prevents the multipack pass from using this pattern if
|
/* This test prevents the multipack pass from using this pattern if
|
op0 is used as an index or base register in op4 or op5, since
|
op0 is used as an index or base register in op4 or op5, since
|
this combination will require reloading. */
|
this combination will require reloading. */
|
if (GET_CODE (op0) == REG
|
if (GET_CODE (op0) == REG
|
&& ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
|
&& ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
|
|| (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
|
|| (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
|
return 0;
|
return 0;
|
|
|
return 1;
|
return 1;
|
}
|
}
|
|
|
|
|
/* Validate combination of src operands. Note that the operands have
|
/* Validate combination of src operands. Note that the operands have
|
been screened by the src_operand predicate. We just have to check
|
been screened by the src_operand predicate. We just have to check
|
that the combination of operands is valid. If FORCE is set, ensure
|
that the combination of operands is valid. If FORCE is set, ensure
|
that the destination regno is valid if we have a 2 operand insn. */
|
that the destination regno is valid if we have a 2 operand insn. */
|
|
|
static int
|
static int
|
c4x_valid_operands (enum rtx_code code, rtx *operands,
|
c4x_valid_operands (enum rtx_code code, rtx *operands,
|
enum machine_mode mode ATTRIBUTE_UNUSED,
|
enum machine_mode mode ATTRIBUTE_UNUSED,
|
int force)
|
int force)
|
{
|
{
|
rtx op0;
|
rtx op0;
|
rtx op1;
|
rtx op1;
|
rtx op2;
|
rtx op2;
|
enum rtx_code code1;
|
enum rtx_code code1;
|
enum rtx_code code2;
|
enum rtx_code code2;
|
|
|
|
|
/* FIXME, why can't we tighten the operands for IF_THEN_ELSE? */
|
/* FIXME, why can't we tighten the operands for IF_THEN_ELSE? */
|
if (code == IF_THEN_ELSE)
|
if (code == IF_THEN_ELSE)
|
return 1 || (operands[0] == operands[2] || operands[0] == operands[3]);
|
return 1 || (operands[0] == operands[2] || operands[0] == operands[3]);
|
|
|
if (code == COMPARE)
|
if (code == COMPARE)
|
{
|
{
|
op1 = operands[0];
|
op1 = operands[0];
|
op2 = operands[1];
|
op2 = operands[1];
|
}
|
}
|
else
|
else
|
{
|
{
|
op1 = operands[1];
|
op1 = operands[1];
|
op2 = operands[2];
|
op2 = operands[2];
|
}
|
}
|
|
|
op0 = operands[0];
|
op0 = operands[0];
|
|
|
if (GET_CODE (op0) == SUBREG)
|
if (GET_CODE (op0) == SUBREG)
|
op0 = SUBREG_REG (op0);
|
op0 = SUBREG_REG (op0);
|
if (GET_CODE (op1) == SUBREG)
|
if (GET_CODE (op1) == SUBREG)
|
op1 = SUBREG_REG (op1);
|
op1 = SUBREG_REG (op1);
|
if (GET_CODE (op2) == SUBREG)
|
if (GET_CODE (op2) == SUBREG)
|
op2 = SUBREG_REG (op2);
|
op2 = SUBREG_REG (op2);
|
|
|
code1 = GET_CODE (op1);
|
code1 = GET_CODE (op1);
|
code2 = GET_CODE (op2);
|
code2 = GET_CODE (op2);
|
|
|
|
|
if (code1 == REG && code2 == REG)
|
if (code1 == REG && code2 == REG)
|
return 1;
|
return 1;
|
|
|
if (code1 == MEM && code2 == MEM)
|
if (code1 == MEM && code2 == MEM)
|
{
|
{
|
if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
|
if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
|
return 1;
|
return 1;
|
return c4x_R_indirect (op1) && c4x_R_indirect (op2);
|
return c4x_R_indirect (op1) && c4x_R_indirect (op2);
|
}
|
}
|
|
|
/* We cannot handle two MEMs or two CONSTS, etc. */
|
/* We cannot handle two MEMs or two CONSTS, etc. */
|
if (code1 == code2)
|
if (code1 == code2)
|
return 0;
|
return 0;
|
|
|
if (code1 == REG)
|
if (code1 == REG)
|
{
|
{
|
switch (code2)
|
switch (code2)
|
{
|
{
|
case CONST_INT:
|
case CONST_INT:
|
if (c4x_J_constant (op2) && c4x_R_indirect (op1))
|
if (c4x_J_constant (op2) && c4x_R_indirect (op1))
|
return 1;
|
return 1;
|
break;
|
break;
|
|
|
case CONST_DOUBLE:
|
case CONST_DOUBLE:
|
if (! c4x_H_constant (op2))
|
if (! c4x_H_constant (op2))
|
return 0;
|
return 0;
|
break;
|
break;
|
|
|
/* Any valid memory operand screened by src_operand is OK. */
|
/* Any valid memory operand screened by src_operand is OK. */
|
case MEM:
|
case MEM:
|
break;
|
break;
|
|
|
default:
|
default:
|
fatal_insn ("c4x_valid_operands: Internal error", op2);
|
fatal_insn ("c4x_valid_operands: Internal error", op2);
|
break;
|
break;
|
}
|
}
|
|
|
if (GET_CODE (op0) == SCRATCH)
|
if (GET_CODE (op0) == SCRATCH)
|
return 1;
|
return 1;
|
|
|
if (!REG_P (op0))
|
if (!REG_P (op0))
|
return 0;
|
return 0;
|
|
|
/* Check that we have a valid destination register for a two operand
|
/* Check that we have a valid destination register for a two operand
|
instruction. */
|
instruction. */
|
return ! force || code == COMPARE || REGNO (op1) == REGNO (op0);
|
return ! force || code == COMPARE || REGNO (op1) == REGNO (op0);
|
}
|
}
|
|
|
|
|
/* Check non-commutative operators. */
|
/* Check non-commutative operators. */
|
if (code == ASHIFTRT || code == LSHIFTRT
|
if (code == ASHIFTRT || code == LSHIFTRT
|
|| code == ASHIFT || code == COMPARE)
|
|| code == ASHIFT || code == COMPARE)
|
return code2 == REG
|
return code2 == REG
|
&& (c4x_S_indirect (op1) || c4x_R_indirect (op1));
|
&& (c4x_S_indirect (op1) || c4x_R_indirect (op1));
|
|
|
|
|
/* Assume MINUS is commutative since the subtract patterns
|
/* Assume MINUS is commutative since the subtract patterns
|
also support the reverse subtract instructions. Since op1
|
also support the reverse subtract instructions. Since op1
|
is not a register, and op2 is a register, op1 can only
|
is not a register, and op2 is a register, op1 can only
|
be a restricted memory operand for a shift instruction. */
|
be a restricted memory operand for a shift instruction. */
|
if (code2 == REG)
|
if (code2 == REG)
|
{
|
{
|
switch (code1)
|
switch (code1)
|
{
|
{
|
case CONST_INT:
|
case CONST_INT:
|
break;
|
break;
|
|
|
case CONST_DOUBLE:
|
case CONST_DOUBLE:
|
if (! c4x_H_constant (op1))
|
if (! c4x_H_constant (op1))
|
return 0;
|
return 0;
|
break;
|
break;
|
|
|
/* Any valid memory operand screened by src_operand is OK. */
|
/* Any valid memory operand screened by src_operand is OK. */
|
case MEM:
|
case MEM:
|
break;
|
break;
|
|
|
default:
|
default:
|
abort ();
|
abort ();
|
break;
|
break;
|
}
|
}
|
|
|
if (GET_CODE (op0) == SCRATCH)
|
if (GET_CODE (op0) == SCRATCH)
|
return 1;
|
return 1;
|
|
|
if (!REG_P (op0))
|
if (!REG_P (op0))
|
return 0;
|
return 0;
|
|
|
/* Check that we have a valid destination register for a two operand
|
/* Check that we have a valid destination register for a two operand
|
instruction. */
|
instruction. */
|
return ! force || REGNO (op1) == REGNO (op0);
|
return ! force || REGNO (op1) == REGNO (op0);
|
}
|
}
|
|
|
if (c4x_J_constant (op1) && c4x_R_indirect (op2))
|
if (c4x_J_constant (op1) && c4x_R_indirect (op2))
|
return 1;
|
return 1;
|
|
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
int valid_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
|
int valid_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
|
{
|
{
|
|
|
/* If we are not optimizing then we have to let anything go and let
|
/* If we are not optimizing then we have to let anything go and let
|
reload fix things up. instantiate_decl in function.c can produce
|
reload fix things up. instantiate_decl in function.c can produce
|
invalid insns by changing the offset of a memory operand from a
|
invalid insns by changing the offset of a memory operand from a
|
valid one into an invalid one, when the second operand is also a
|
valid one into an invalid one, when the second operand is also a
|
memory operand. The alternative is not to allow two memory
|
memory operand. The alternative is not to allow two memory
|
operands for an insn when not optimizing. The problem only rarely
|
operands for an insn when not optimizing. The problem only rarely
|
occurs, for example with the C-torture program DFcmp.c. */
|
occurs, for example with the C-torture program DFcmp.c. */
|
|
|
return ! optimize || c4x_valid_operands (code, operands, mode, 0);
|
return ! optimize || c4x_valid_operands (code, operands, mode, 0);
|
}
|
}
|
|
|
|
|
int
|
int
|
legitimize_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
|
legitimize_operands (enum rtx_code code, rtx *operands, enum machine_mode mode)
|
{
|
{
|
/* Compare only has 2 operands. */
|
/* Compare only has 2 operands. */
|
if (code == COMPARE)
|
if (code == COMPARE)
|
{
|
{
|
/* During RTL generation, force constants into pseudos so that
|
/* During RTL generation, force constants into pseudos so that
|
they can get hoisted out of loops. This will tie up an extra
|
they can get hoisted out of loops. This will tie up an extra
|
register but can save an extra cycle. Only do this if loop
|
register but can save an extra cycle. Only do this if loop
|
optimization enabled. (We cannot pull this trick for add and
|
optimization enabled. (We cannot pull this trick for add and
|
sub instructions since the flow pass won't find
|
sub instructions since the flow pass won't find
|
autoincrements etc.) This allows us to generate compare
|
autoincrements etc.) This allows us to generate compare
|
instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
|
instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
|
of LDI *AR0++, R0; CMPI 42, R0.
|
of LDI *AR0++, R0; CMPI 42, R0.
|
|
|
Note that expand_binops will try to load an expensive constant
|
Note that expand_binops will try to load an expensive constant
|
into a register if it is used within a loop. Unfortunately,
|
into a register if it is used within a loop. Unfortunately,
|
the cost mechanism doesn't allow us to look at the other
|
the cost mechanism doesn't allow us to look at the other
|
operand to decide whether the constant is expensive. */
|
operand to decide whether the constant is expensive. */
|
|
|
if (! reload_in_progress
|
if (! reload_in_progress
|
&& TARGET_HOIST
|
&& TARGET_HOIST
|
&& optimize > 0
|
&& optimize > 0
|
&& GET_CODE (operands[1]) == CONST_INT
|
&& GET_CODE (operands[1]) == CONST_INT
|
&& rtx_cost (operands[1], code) > 1)
|
&& rtx_cost (operands[1], code) > 1)
|
operands[1] = force_reg (mode, operands[1]);
|
operands[1] = force_reg (mode, operands[1]);
|
|
|
if (! reload_in_progress
|
if (! reload_in_progress
|
&& ! c4x_valid_operands (code, operands, mode, 0))
|
&& ! c4x_valid_operands (code, operands, mode, 0))
|
operands[0] = force_reg (mode, operands[0]);
|
operands[0] = force_reg (mode, operands[0]);
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* We cannot do this for ADDI/SUBI insns since we will
|
/* We cannot do this for ADDI/SUBI insns since we will
|
defeat the flow pass from finding autoincrement addressing
|
defeat the flow pass from finding autoincrement addressing
|
opportunities. */
|
opportunities. */
|
if (! reload_in_progress
|
if (! reload_in_progress
|
&& ! ((code == PLUS || code == MINUS) && mode == Pmode)
|
&& ! ((code == PLUS || code == MINUS) && mode == Pmode)
|
&& TARGET_HOIST
|
&& TARGET_HOIST
|
&& optimize > 1
|
&& optimize > 1
|
&& GET_CODE (operands[2]) == CONST_INT
|
&& GET_CODE (operands[2]) == CONST_INT
|
&& rtx_cost (operands[2], code) > 1)
|
&& rtx_cost (operands[2], code) > 1)
|
operands[2] = force_reg (mode, operands[2]);
|
operands[2] = force_reg (mode, operands[2]);
|
|
|
/* We can get better code on a C30 if we force constant shift counts
|
/* We can get better code on a C30 if we force constant shift counts
|
into a register. This way they can get hoisted out of loops,
|
into a register. This way they can get hoisted out of loops,
|
tying up a register but saving an instruction. The downside is
|
tying up a register but saving an instruction. The downside is
|
that they may get allocated to an address or index register, and
|
that they may get allocated to an address or index register, and
|
thus we will get a pipeline conflict if there is a nearby
|
thus we will get a pipeline conflict if there is a nearby
|
indirect address using an address register.
|
indirect address using an address register.
|
|
|
Note that expand_binops will not try to load an expensive constant
|
Note that expand_binops will not try to load an expensive constant
|
into a register if it is used within a loop for a shift insn. */
|
into a register if it is used within a loop for a shift insn. */
|
|
|
if (! reload_in_progress
|
if (! reload_in_progress
|
&& ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
|
&& ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
|
{
|
{
|
/* If the operand combination is invalid, we force operand1 into a
|
/* If the operand combination is invalid, we force operand1 into a
|
register, preventing reload from having doing to do this at a
|
register, preventing reload from having doing to do this at a
|
later stage. */
|
later stage. */
|
operands[1] = force_reg (mode, operands[1]);
|
operands[1] = force_reg (mode, operands[1]);
|
if (TARGET_FORCE)
|
if (TARGET_FORCE)
|
{
|
{
|
emit_move_insn (operands[0], operands[1]);
|
emit_move_insn (operands[0], operands[1]);
|
operands[1] = copy_rtx (operands[0]);
|
operands[1] = copy_rtx (operands[0]);
|
}
|
}
|
else
|
else
|
{
|
{
|
/* Just in case... */
|
/* Just in case... */
|
if (! c4x_valid_operands (code, operands, mode, 0))
|
if (! c4x_valid_operands (code, operands, mode, 0))
|
operands[2] = force_reg (mode, operands[2]);
|
operands[2] = force_reg (mode, operands[2]);
|
}
|
}
|
}
|
}
|
|
|
/* Right shifts require a negative shift count, but GCC expects
|
/* Right shifts require a negative shift count, but GCC expects
|
a positive count, so we emit a NEG. */
|
a positive count, so we emit a NEG. */
|
if ((code == ASHIFTRT || code == LSHIFTRT)
|
if ((code == ASHIFTRT || code == LSHIFTRT)
|
&& (GET_CODE (operands[2]) != CONST_INT))
|
&& (GET_CODE (operands[2]) != CONST_INT))
|
operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
|
operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
|
|
|
|
|
/* When the shift count is greater than 32 then the result
|
/* When the shift count is greater than 32 then the result
|
can be implementation dependent. We truncate the result to
|
can be implementation dependent. We truncate the result to
|
fit in 5 bits so that we do not emit invalid code when
|
fit in 5 bits so that we do not emit invalid code when
|
optimizing---such as trying to generate lhu2 with 20021124-1.c. */
|
optimizing---such as trying to generate lhu2 with 20021124-1.c. */
|
if (((code == ASHIFTRT || code == LSHIFTRT || code == ASHIFT)
|
if (((code == ASHIFTRT || code == LSHIFTRT || code == ASHIFT)
|
&& (GET_CODE (operands[2]) == CONST_INT))
|
&& (GET_CODE (operands[2]) == CONST_INT))
|
&& INTVAL (operands[2]) > (GET_MODE_BITSIZE (mode) - 1))
|
&& INTVAL (operands[2]) > (GET_MODE_BITSIZE (mode) - 1))
|
operands[2]
|
operands[2]
|
= GEN_INT (INTVAL (operands[2]) & (GET_MODE_BITSIZE (mode) - 1));
|
= GEN_INT (INTVAL (operands[2]) & (GET_MODE_BITSIZE (mode) - 1));
|
|
|
return 1;
|
return 1;
|
}
|
}
|
|
|
|
|
/* The following predicates are used for instruction scheduling. */
|
/* The following predicates are used for instruction scheduling. */
|
|
|
int
|
int
|
group1_reg_operand (rtx op, enum machine_mode mode)
|
group1_reg_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
if (mode != VOIDmode && mode != GET_MODE (op))
|
if (mode != VOIDmode && mode != GET_MODE (op))
|
return 0;
|
return 0;
|
if (GET_CODE (op) == SUBREG)
|
if (GET_CODE (op) == SUBREG)
|
op = SUBREG_REG (op);
|
op = SUBREG_REG (op);
|
return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
|
return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
|
}
|
}
|
|
|
|
|
int
|
int
|
group1_mem_operand (rtx op, enum machine_mode mode)
|
group1_mem_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
if (mode != VOIDmode && mode != GET_MODE (op))
|
if (mode != VOIDmode && mode != GET_MODE (op))
|
return 0;
|
return 0;
|
|
|
if (GET_CODE (op) == MEM)
|
if (GET_CODE (op) == MEM)
|
{
|
{
|
op = XEXP (op, 0);
|
op = XEXP (op, 0);
|
if (GET_CODE (op) == PLUS)
|
if (GET_CODE (op) == PLUS)
|
{
|
{
|
rtx op0 = XEXP (op, 0);
|
rtx op0 = XEXP (op, 0);
|
rtx op1 = XEXP (op, 1);
|
rtx op1 = XEXP (op, 1);
|
|
|
if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
|
if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
|
|| (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
|
|| (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
|
return 1;
|
return 1;
|
}
|
}
|
else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
|
else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
|
return 1;
|
return 1;
|
}
|
}
|
|
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
/* Return true if any one of the address registers. */
|
/* Return true if any one of the address registers. */
|
|
|
int
|
int
|
arx_reg_operand (rtx op, enum machine_mode mode)
|
arx_reg_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
if (mode != VOIDmode && mode != GET_MODE (op))
|
if (mode != VOIDmode && mode != GET_MODE (op))
|
return 0;
|
return 0;
|
if (GET_CODE (op) == SUBREG)
|
if (GET_CODE (op) == SUBREG)
|
op = SUBREG_REG (op);
|
op = SUBREG_REG (op);
|
return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
|
return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
|
}
|
}
|
|
|
|
|
static int
|
static int
|
c4x_arn_reg_operand (rtx op, enum machine_mode mode, unsigned int regno)
|
c4x_arn_reg_operand (rtx op, enum machine_mode mode, unsigned int regno)
|
{
|
{
|
if (mode != VOIDmode && mode != GET_MODE (op))
|
if (mode != VOIDmode && mode != GET_MODE (op))
|
return 0;
|
return 0;
|
if (GET_CODE (op) == SUBREG)
|
if (GET_CODE (op) == SUBREG)
|
op = SUBREG_REG (op);
|
op = SUBREG_REG (op);
|
return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
|
return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
|
}
|
}
|
|
|
|
|
static int
|
static int
|
c4x_arn_mem_operand (rtx op, enum machine_mode mode, unsigned int regno)
|
c4x_arn_mem_operand (rtx op, enum machine_mode mode, unsigned int regno)
|
{
|
{
|
if (mode != VOIDmode && mode != GET_MODE (op))
|
if (mode != VOIDmode && mode != GET_MODE (op))
|
return 0;
|
return 0;
|
|
|
if (GET_CODE (op) == MEM)
|
if (GET_CODE (op) == MEM)
|
{
|
{
|
op = XEXP (op, 0);
|
op = XEXP (op, 0);
|
switch (GET_CODE (op))
|
switch (GET_CODE (op))
|
{
|
{
|
case PRE_DEC:
|
case PRE_DEC:
|
case POST_DEC:
|
case POST_DEC:
|
case PRE_INC:
|
case PRE_INC:
|
case POST_INC:
|
case POST_INC:
|
op = XEXP (op, 0);
|
op = XEXP (op, 0);
|
|
|
case REG:
|
case REG:
|
return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
|
return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
|
|
|
case PRE_MODIFY:
|
case PRE_MODIFY:
|
case POST_MODIFY:
|
case POST_MODIFY:
|
if (REG_P (XEXP (op, 0)) && (! reload_completed
|
if (REG_P (XEXP (op, 0)) && (! reload_completed
|
|| (REGNO (XEXP (op, 0)) == regno)))
|
|| (REGNO (XEXP (op, 0)) == regno)))
|
return 1;
|
return 1;
|
if (REG_P (XEXP (XEXP (op, 1), 1))
|
if (REG_P (XEXP (XEXP (op, 1), 1))
|
&& (! reload_completed
|
&& (! reload_completed
|
|| (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
|
|| (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
|
return 1;
|
return 1;
|
break;
|
break;
|
|
|
case PLUS:
|
case PLUS:
|
{
|
{
|
rtx op0 = XEXP (op, 0);
|
rtx op0 = XEXP (op, 0);
|
rtx op1 = XEXP (op, 1);
|
rtx op1 = XEXP (op, 1);
|
|
|
if ((REG_P (op0) && (! reload_completed
|
if ((REG_P (op0) && (! reload_completed
|
|| (REGNO (op0) == regno)))
|
|| (REGNO (op0) == regno)))
|
|| (REG_P (op1) && (! reload_completed
|
|| (REG_P (op1) && (! reload_completed
|
|| (REGNO (op1) == regno))))
|
|| (REGNO (op1) == regno))))
|
return 1;
|
return 1;
|
}
|
}
|
break;
|
break;
|
|
|
default:
|
default:
|
break;
|
break;
|
}
|
}
|
}
|
}
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
int
|
int
|
ar0_reg_operand (rtx op, enum machine_mode mode)
|
ar0_reg_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_reg_operand (op, mode, AR0_REGNO);
|
return c4x_arn_reg_operand (op, mode, AR0_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ar0_mem_operand (rtx op, enum machine_mode mode)
|
ar0_mem_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_mem_operand (op, mode, AR0_REGNO);
|
return c4x_arn_mem_operand (op, mode, AR0_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ar1_reg_operand (rtx op, enum machine_mode mode)
|
ar1_reg_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_reg_operand (op, mode, AR1_REGNO);
|
return c4x_arn_reg_operand (op, mode, AR1_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ar1_mem_operand (rtx op, enum machine_mode mode)
|
ar1_mem_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_mem_operand (op, mode, AR1_REGNO);
|
return c4x_arn_mem_operand (op, mode, AR1_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ar2_reg_operand (rtx op, enum machine_mode mode)
|
ar2_reg_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_reg_operand (op, mode, AR2_REGNO);
|
return c4x_arn_reg_operand (op, mode, AR2_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ar2_mem_operand (rtx op, enum machine_mode mode)
|
ar2_mem_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_mem_operand (op, mode, AR2_REGNO);
|
return c4x_arn_mem_operand (op, mode, AR2_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ar3_reg_operand (rtx op, enum machine_mode mode)
|
ar3_reg_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_reg_operand (op, mode, AR3_REGNO);
|
return c4x_arn_reg_operand (op, mode, AR3_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ar3_mem_operand (rtx op, enum machine_mode mode)
|
ar3_mem_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_mem_operand (op, mode, AR3_REGNO);
|
return c4x_arn_mem_operand (op, mode, AR3_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ar4_reg_operand (rtx op, enum machine_mode mode)
|
ar4_reg_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_reg_operand (op, mode, AR4_REGNO);
|
return c4x_arn_reg_operand (op, mode, AR4_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ar4_mem_operand (rtx op, enum machine_mode mode)
|
ar4_mem_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_mem_operand (op, mode, AR4_REGNO);
|
return c4x_arn_mem_operand (op, mode, AR4_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ar5_reg_operand (rtx op, enum machine_mode mode)
|
ar5_reg_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_reg_operand (op, mode, AR5_REGNO);
|
return c4x_arn_reg_operand (op, mode, AR5_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ar5_mem_operand (rtx op, enum machine_mode mode)
|
ar5_mem_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_mem_operand (op, mode, AR5_REGNO);
|
return c4x_arn_mem_operand (op, mode, AR5_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ar6_reg_operand (rtx op, enum machine_mode mode)
|
ar6_reg_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_reg_operand (op, mode, AR6_REGNO);
|
return c4x_arn_reg_operand (op, mode, AR6_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ar6_mem_operand (rtx op, enum machine_mode mode)
|
ar6_mem_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_mem_operand (op, mode, AR6_REGNO);
|
return c4x_arn_mem_operand (op, mode, AR6_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ar7_reg_operand (rtx op, enum machine_mode mode)
|
ar7_reg_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_reg_operand (op, mode, AR7_REGNO);
|
return c4x_arn_reg_operand (op, mode, AR7_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ar7_mem_operand (rtx op, enum machine_mode mode)
|
ar7_mem_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_mem_operand (op, mode, AR7_REGNO);
|
return c4x_arn_mem_operand (op, mode, AR7_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ir0_reg_operand (rtx op, enum machine_mode mode)
|
ir0_reg_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_reg_operand (op, mode, IR0_REGNO);
|
return c4x_arn_reg_operand (op, mode, IR0_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ir0_mem_operand (rtx op, enum machine_mode mode)
|
ir0_mem_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_mem_operand (op, mode, IR0_REGNO);
|
return c4x_arn_mem_operand (op, mode, IR0_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ir1_reg_operand (rtx op, enum machine_mode mode)
|
ir1_reg_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_reg_operand (op, mode, IR1_REGNO);
|
return c4x_arn_reg_operand (op, mode, IR1_REGNO);
|
}
|
}
|
|
|
|
|
int
|
int
|
ir1_mem_operand (rtx op, enum machine_mode mode)
|
ir1_mem_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
return c4x_arn_mem_operand (op, mode, IR1_REGNO);
|
return c4x_arn_mem_operand (op, mode, IR1_REGNO);
|
}
|
}
|
|
|
|
|
/* This is similar to operand_subword but allows autoincrement
|
/* This is similar to operand_subword but allows autoincrement
|
addressing. */
|
addressing. */
|
|
|
rtx
|
rtx
|
c4x_operand_subword (rtx op, int i, int validate_address,
|
c4x_operand_subword (rtx op, int i, int validate_address,
|
enum machine_mode mode)
|
enum machine_mode mode)
|
{
|
{
|
if (mode != HImode && mode != HFmode)
|
if (mode != HImode && mode != HFmode)
|
fatal_insn ("c4x_operand_subword: invalid mode", op);
|
fatal_insn ("c4x_operand_subword: invalid mode", op);
|
|
|
if (mode == HFmode && REG_P (op))
|
if (mode == HFmode && REG_P (op))
|
fatal_insn ("c4x_operand_subword: invalid operand", op);
|
fatal_insn ("c4x_operand_subword: invalid operand", op);
|
|
|
if (GET_CODE (op) == MEM)
|
if (GET_CODE (op) == MEM)
|
{
|
{
|
enum rtx_code code = GET_CODE (XEXP (op, 0));
|
enum rtx_code code = GET_CODE (XEXP (op, 0));
|
enum machine_mode mode = GET_MODE (XEXP (op, 0));
|
enum machine_mode mode = GET_MODE (XEXP (op, 0));
|
enum machine_mode submode;
|
enum machine_mode submode;
|
|
|
submode = mode;
|
submode = mode;
|
if (mode == HImode)
|
if (mode == HImode)
|
submode = QImode;
|
submode = QImode;
|
else if (mode == HFmode)
|
else if (mode == HFmode)
|
submode = QFmode;
|
submode = QFmode;
|
|
|
switch (code)
|
switch (code)
|
{
|
{
|
case POST_INC:
|
case POST_INC:
|
case PRE_INC:
|
case PRE_INC:
|
return gen_rtx_MEM (submode, XEXP (op, 0));
|
return gen_rtx_MEM (submode, XEXP (op, 0));
|
|
|
case POST_DEC:
|
case POST_DEC:
|
case PRE_DEC:
|
case PRE_DEC:
|
case PRE_MODIFY:
|
case PRE_MODIFY:
|
case POST_MODIFY:
|
case POST_MODIFY:
|
/* We could handle these with some difficulty.
|
/* We could handle these with some difficulty.
|
e.g., *p-- => *(p-=2); *(p+1). */
|
e.g., *p-- => *(p-=2); *(p+1). */
|
fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
|
fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
|
|
|
case SYMBOL_REF:
|
case SYMBOL_REF:
|
case LABEL_REF:
|
case LABEL_REF:
|
case CONST:
|
case CONST:
|
case CONST_INT:
|
case CONST_INT:
|
fatal_insn ("c4x_operand_subword: invalid address", op);
|
fatal_insn ("c4x_operand_subword: invalid address", op);
|
|
|
/* Even though offsettable_address_p considers (MEM
|
/* Even though offsettable_address_p considers (MEM
|
(LO_SUM)) to be offsettable, it is not safe if the
|
(LO_SUM)) to be offsettable, it is not safe if the
|
address is at the end of the data page since we also have
|
address is at the end of the data page since we also have
|
to fix up the associated high PART. In this case where
|
to fix up the associated high PART. In this case where
|
we are trying to split a HImode or HFmode memory
|
we are trying to split a HImode or HFmode memory
|
reference, we would have to emit another insn to reload a
|
reference, we would have to emit another insn to reload a
|
new HIGH value. It's easier to disable LO_SUM memory references
|
new HIGH value. It's easier to disable LO_SUM memory references
|
in HImode or HFmode and we probably get better code. */
|
in HImode or HFmode and we probably get better code. */
|
case LO_SUM:
|
case LO_SUM:
|
fatal_insn ("c4x_operand_subword: address not offsettable", op);
|
fatal_insn ("c4x_operand_subword: address not offsettable", op);
|
|
|
default:
|
default:
|
break;
|
break;
|
}
|
}
|
}
|
}
|
|
|
return operand_subword (op, i, validate_address, mode);
|
return operand_subword (op, i, validate_address, mode);
|
}
|
}
|
|
|
struct name_list
|
struct name_list
|
{
|
{
|
struct name_list *next;
|
struct name_list *next;
|
const char *name;
|
const char *name;
|
};
|
};
|
|
|
static struct name_list *global_head;
|
static struct name_list *global_head;
|
static struct name_list *extern_head;
|
static struct name_list *extern_head;
|
|
|
|
|
/* Add NAME to list of global symbols and remove from external list if
|
/* Add NAME to list of global symbols and remove from external list if
|
present on external list. */
|
present on external list. */
|
|
|
void
|
void
|
c4x_global_label (const char *name)
|
c4x_global_label (const char *name)
|
{
|
{
|
struct name_list *p, *last;
|
struct name_list *p, *last;
|
|
|
/* Do not insert duplicate names, so linearly search through list of
|
/* Do not insert duplicate names, so linearly search through list of
|
existing names. */
|
existing names. */
|
p = global_head;
|
p = global_head;
|
while (p)
|
while (p)
|
{
|
{
|
if (strcmp (p->name, name) == 0)
|
if (strcmp (p->name, name) == 0)
|
return;
|
return;
|
p = p->next;
|
p = p->next;
|
}
|
}
|
p = (struct name_list *) xmalloc (sizeof *p);
|
p = (struct name_list *) xmalloc (sizeof *p);
|
p->next = global_head;
|
p->next = global_head;
|
p->name = name;
|
p->name = name;
|
global_head = p;
|
global_head = p;
|
|
|
/* Remove this name from ref list if present. */
|
/* Remove this name from ref list if present. */
|
last = NULL;
|
last = NULL;
|
p = extern_head;
|
p = extern_head;
|
while (p)
|
while (p)
|
{
|
{
|
if (strcmp (p->name, name) == 0)
|
if (strcmp (p->name, name) == 0)
|
{
|
{
|
if (last)
|
if (last)
|
last->next = p->next;
|
last->next = p->next;
|
else
|
else
|
extern_head = p->next;
|
extern_head = p->next;
|
break;
|
break;
|
}
|
}
|
last = p;
|
last = p;
|
p = p->next;
|
p = p->next;
|
}
|
}
|
}
|
}
|
|
|
|
|
/* Add NAME to list of external symbols. */
|
/* Add NAME to list of external symbols. */
|
|
|
void
|
void
|
c4x_external_ref (const char *name)
|
c4x_external_ref (const char *name)
|
{
|
{
|
struct name_list *p;
|
struct name_list *p;
|
|
|
/* Do not insert duplicate names. */
|
/* Do not insert duplicate names. */
|
p = extern_head;
|
p = extern_head;
|
while (p)
|
while (p)
|
{
|
{
|
if (strcmp (p->name, name) == 0)
|
if (strcmp (p->name, name) == 0)
|
return;
|
return;
|
p = p->next;
|
p = p->next;
|
}
|
}
|
|
|
/* Do not insert ref if global found. */
|
/* Do not insert ref if global found. */
|
p = global_head;
|
p = global_head;
|
while (p)
|
while (p)
|
{
|
{
|
if (strcmp (p->name, name) == 0)
|
if (strcmp (p->name, name) == 0)
|
return;
|
return;
|
p = p->next;
|
p = p->next;
|
}
|
}
|
p = (struct name_list *) xmalloc (sizeof *p);
|
p = (struct name_list *) xmalloc (sizeof *p);
|
p->next = extern_head;
|
p->next = extern_head;
|
p->name = name;
|
p->name = name;
|
extern_head = p;
|
extern_head = p;
|
}
|
}
|
|
|
/* We need to have a data section we can identify so that we can set
|
/* We need to have a data section we can identify so that we can set
|
the DP register back to a data pointer in the small memory model.
|
the DP register back to a data pointer in the small memory model.
|
This is only required for ISRs if we are paranoid that someone
|
This is only required for ISRs if we are paranoid that someone
|
may have quietly changed this register on the sly. */
|
may have quietly changed this register on the sly. */
|
static void
|
static void
|
c4x_file_start (void)
|
c4x_file_start (void)
|
{
|
{
|
default_file_start ();
|
default_file_start ();
|
fprintf (asm_out_file, "\t.version\t%d\n", c4x_cpu_version);
|
fprintf (asm_out_file, "\t.version\t%d\n", c4x_cpu_version);
|
fputs ("\n\t.data\ndata_sec:\n", asm_out_file);
|
fputs ("\n\t.data\ndata_sec:\n", asm_out_file);
|
}
|
}
|
|
|
|
|
static void
|
static void
|
c4x_file_end (void)
|
c4x_file_end (void)
|
{
|
{
|
struct name_list *p;
|
struct name_list *p;
|
|
|
/* Output all external names that are not global. */
|
/* Output all external names that are not global. */
|
p = extern_head;
|
p = extern_head;
|
while (p)
|
while (p)
|
{
|
{
|
fprintf (asm_out_file, "\t.ref\t");
|
fprintf (asm_out_file, "\t.ref\t");
|
assemble_name (asm_out_file, p->name);
|
assemble_name (asm_out_file, p->name);
|
fprintf (asm_out_file, "\n");
|
fprintf (asm_out_file, "\n");
|
p = p->next;
|
p = p->next;
|
}
|
}
|
fprintf (asm_out_file, "\t.end\n");
|
fprintf (asm_out_file, "\t.end\n");
|
}
|
}
|
|
|
|
|
static void
|
static void
|
c4x_check_attribute (const char *attrib, tree list, tree decl, tree *attributes)
|
c4x_check_attribute (const char *attrib, tree list, tree decl, tree *attributes)
|
{
|
{
|
while (list != NULL_TREE
|
while (list != NULL_TREE
|
&& IDENTIFIER_POINTER (TREE_PURPOSE (list))
|
&& IDENTIFIER_POINTER (TREE_PURPOSE (list))
|
!= IDENTIFIER_POINTER (DECL_NAME (decl)))
|
!= IDENTIFIER_POINTER (DECL_NAME (decl)))
|
list = TREE_CHAIN (list);
|
list = TREE_CHAIN (list);
|
if (list)
|
if (list)
|
*attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
|
*attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
|
*attributes);
|
*attributes);
|
}
|
}
|
|
|
|
|
static void
|
static void
|
c4x_insert_attributes (tree decl, tree *attributes)
|
c4x_insert_attributes (tree decl, tree *attributes)
|
{
|
{
|
switch (TREE_CODE (decl))
|
switch (TREE_CODE (decl))
|
{
|
{
|
case FUNCTION_DECL:
|
case FUNCTION_DECL:
|
c4x_check_attribute ("section", code_tree, decl, attributes);
|
c4x_check_attribute ("section", code_tree, decl, attributes);
|
c4x_check_attribute ("const", pure_tree, decl, attributes);
|
c4x_check_attribute ("const", pure_tree, decl, attributes);
|
c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
|
c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
|
c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
|
c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
|
c4x_check_attribute ("naked", naked_tree, decl, attributes);
|
c4x_check_attribute ("naked", naked_tree, decl, attributes);
|
break;
|
break;
|
|
|
case VAR_DECL:
|
case VAR_DECL:
|
c4x_check_attribute ("section", data_tree, decl, attributes);
|
c4x_check_attribute ("section", data_tree, decl, attributes);
|
break;
|
break;
|
|
|
default:
|
default:
|
break;
|
break;
|
}
|
}
|
}
|
}
|
|
|
/* Table of valid machine attributes. */
|
/* Table of valid machine attributes. */
|
const struct attribute_spec c4x_attribute_table[] =
|
const struct attribute_spec c4x_attribute_table[] =
|
{
|
{
|
/* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
|
/* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
|
{ "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
|
{ "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
|
{ "naked", 0, 0, false, true, true, c4x_handle_fntype_attribute },
|
{ "naked", 0, 0, false, true, true, c4x_handle_fntype_attribute },
|
{ "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
|
{ "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
|
{ NULL, 0, 0, false, false, false, NULL }
|
{ NULL, 0, 0, false, false, false, NULL }
|
};
|
};
|
|
|
/* Handle an attribute requiring a FUNCTION_TYPE;
|
/* Handle an attribute requiring a FUNCTION_TYPE;
|
arguments as in struct attribute_spec.handler. */
|
arguments as in struct attribute_spec.handler. */
|
static tree
|
static tree
|
c4x_handle_fntype_attribute (tree *node, tree name,
|
c4x_handle_fntype_attribute (tree *node, tree name,
|
tree args ATTRIBUTE_UNUSED,
|
tree args ATTRIBUTE_UNUSED,
|
int flags ATTRIBUTE_UNUSED,
|
int flags ATTRIBUTE_UNUSED,
|
bool *no_add_attrs)
|
bool *no_add_attrs)
|
{
|
{
|
if (TREE_CODE (*node) != FUNCTION_TYPE)
|
if (TREE_CODE (*node) != FUNCTION_TYPE)
|
{
|
{
|
warning (OPT_Wattributes, "%qs attribute only applies to functions",
|
warning (OPT_Wattributes, "%qs attribute only applies to functions",
|
IDENTIFIER_POINTER (name));
|
IDENTIFIER_POINTER (name));
|
*no_add_attrs = true;
|
*no_add_attrs = true;
|
}
|
}
|
|
|
return NULL_TREE;
|
return NULL_TREE;
|
}
|
}
|
|
|
|
|
/* !!! FIXME to emit RPTS correctly. */
|
/* !!! FIXME to emit RPTS correctly. */
|
|
|
int
|
int
|
c4x_rptb_rpts_p (rtx insn, rtx op)
|
c4x_rptb_rpts_p (rtx insn, rtx op)
|
{
|
{
|
/* The next insn should be our label marking where the
|
/* The next insn should be our label marking where the
|
repeat block starts. */
|
repeat block starts. */
|
insn = NEXT_INSN (insn);
|
insn = NEXT_INSN (insn);
|
if (GET_CODE (insn) != CODE_LABEL)
|
if (GET_CODE (insn) != CODE_LABEL)
|
{
|
{
|
/* Some insns may have been shifted between the RPTB insn
|
/* Some insns may have been shifted between the RPTB insn
|
and the top label... They were probably destined to
|
and the top label... They were probably destined to
|
be moved out of the loop. For now, let's leave them
|
be moved out of the loop. For now, let's leave them
|
where they are and print a warning. We should
|
where they are and print a warning. We should
|
probably move these insns before the repeat block insn. */
|
probably move these insns before the repeat block insn. */
|
if (TARGET_DEBUG)
|
if (TARGET_DEBUG)
|
fatal_insn ("c4x_rptb_rpts_p: Repeat block top label moved",
|
fatal_insn ("c4x_rptb_rpts_p: Repeat block top label moved",
|
insn);
|
insn);
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* Skip any notes. */
|
/* Skip any notes. */
|
insn = next_nonnote_insn (insn);
|
insn = next_nonnote_insn (insn);
|
|
|
/* This should be our first insn in the loop. */
|
/* This should be our first insn in the loop. */
|
if (! INSN_P (insn))
|
if (! INSN_P (insn))
|
return 0;
|
return 0;
|
|
|
/* Skip any notes. */
|
/* Skip any notes. */
|
insn = next_nonnote_insn (insn);
|
insn = next_nonnote_insn (insn);
|
|
|
if (! INSN_P (insn))
|
if (! INSN_P (insn))
|
return 0;
|
return 0;
|
|
|
if (recog_memoized (insn) != CODE_FOR_rptb_end)
|
if (recog_memoized (insn) != CODE_FOR_rptb_end)
|
return 0;
|
return 0;
|
|
|
if (TARGET_RPTS)
|
if (TARGET_RPTS)
|
return 1;
|
return 1;
|
|
|
return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
|
return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
|
}
|
}
|
|
|
|
|
/* Check if register r11 is used as the destination of an insn. */
|
/* Check if register r11 is used as the destination of an insn. */
|
|
|
static int
|
static int
|
c4x_r11_set_p(rtx x)
|
c4x_r11_set_p(rtx x)
|
{
|
{
|
rtx set;
|
rtx set;
|
int i, j;
|
int i, j;
|
const char *fmt;
|
const char *fmt;
|
|
|
if (x == 0)
|
if (x == 0)
|
return 0;
|
return 0;
|
|
|
if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
|
if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
|
x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
|
x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
|
|
|
if (INSN_P (x) && (set = single_set (x)))
|
if (INSN_P (x) && (set = single_set (x)))
|
x = SET_DEST (set);
|
x = SET_DEST (set);
|
|
|
if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
|
if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
|
return 1;
|
return 1;
|
|
|
fmt = GET_RTX_FORMAT (GET_CODE (x));
|
fmt = GET_RTX_FORMAT (GET_CODE (x));
|
for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
|
for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
|
{
|
{
|
if (fmt[i] == 'e')
|
if (fmt[i] == 'e')
|
{
|
{
|
if (c4x_r11_set_p (XEXP (x, i)))
|
if (c4x_r11_set_p (XEXP (x, i)))
|
return 1;
|
return 1;
|
}
|
}
|
else if (fmt[i] == 'E')
|
else if (fmt[i] == 'E')
|
for (j = XVECLEN (x, i) - 1; j >= 0; j--)
|
for (j = XVECLEN (x, i) - 1; j >= 0; j--)
|
if (c4x_r11_set_p (XVECEXP (x, i, j)))
|
if (c4x_r11_set_p (XVECEXP (x, i, j)))
|
return 1;
|
return 1;
|
}
|
}
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
/* The c4x sometimes has a problem when the insn before the laj insn
|
/* The c4x sometimes has a problem when the insn before the laj insn
|
sets the r11 register. Check for this situation. */
|
sets the r11 register. Check for this situation. */
|
|
|
int
|
int
|
c4x_check_laj_p (rtx insn)
|
c4x_check_laj_p (rtx insn)
|
{
|
{
|
insn = prev_nonnote_insn (insn);
|
insn = prev_nonnote_insn (insn);
|
|
|
/* If this is the start of the function no nop is needed. */
|
/* If this is the start of the function no nop is needed. */
|
if (insn == 0)
|
if (insn == 0)
|
return 0;
|
return 0;
|
|
|
/* If the previous insn is a code label we have to insert a nop. This
|
/* If the previous insn is a code label we have to insert a nop. This
|
could be a jump or table jump. We can find the normal jumps by
|
could be a jump or table jump. We can find the normal jumps by
|
scanning the function but this will not find table jumps. */
|
scanning the function but this will not find table jumps. */
|
if (GET_CODE (insn) == CODE_LABEL)
|
if (GET_CODE (insn) == CODE_LABEL)
|
return 1;
|
return 1;
|
|
|
/* If the previous insn sets register r11 we have to insert a nop. */
|
/* If the previous insn sets register r11 we have to insert a nop. */
|
if (c4x_r11_set_p (insn))
|
if (c4x_r11_set_p (insn))
|
return 1;
|
return 1;
|
|
|
/* No nop needed. */
|
/* No nop needed. */
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
/* Adjust the cost of a scheduling dependency. Return the new cost of
|
/* Adjust the cost of a scheduling dependency. Return the new cost of
|
a dependency LINK or INSN on DEP_INSN. COST is the current cost.
|
a dependency LINK or INSN on DEP_INSN. COST is the current cost.
|
A set of an address register followed by a use occurs a 2 cycle
|
A set of an address register followed by a use occurs a 2 cycle
|
stall (reduced to a single cycle on the c40 using LDA), while
|
stall (reduced to a single cycle on the c40 using LDA), while
|
a read of an address register followed by a use occurs a single cycle. */
|
a read of an address register followed by a use occurs a single cycle. */
|
|
|
#define SET_USE_COST 3
|
#define SET_USE_COST 3
|
#define SETLDA_USE_COST 2
|
#define SETLDA_USE_COST 2
|
#define READ_USE_COST 2
|
#define READ_USE_COST 2
|
|
|
static int
|
static int
|
c4x_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
|
c4x_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
|
{
|
{
|
/* Don't worry about this until we know what registers have been
|
/* Don't worry about this until we know what registers have been
|
assigned. */
|
assigned. */
|
if (flag_schedule_insns == 0 && ! reload_completed)
|
if (flag_schedule_insns == 0 && ! reload_completed)
|
return 0;
|
return 0;
|
|
|
/* How do we handle dependencies where a read followed by another
|
/* How do we handle dependencies where a read followed by another
|
read causes a pipeline stall? For example, a read of ar0 followed
|
read causes a pipeline stall? For example, a read of ar0 followed
|
by the use of ar0 for a memory reference. It looks like we
|
by the use of ar0 for a memory reference. It looks like we
|
need to extend the scheduler to handle this case. */
|
need to extend the scheduler to handle this case. */
|
|
|
/* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
|
/* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
|
(clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
|
(clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
|
so only deal with insns we know about. */
|
so only deal with insns we know about. */
|
if (recog_memoized (dep_insn) < 0)
|
if (recog_memoized (dep_insn) < 0)
|
return 0;
|
return 0;
|
|
|
if (REG_NOTE_KIND (link) == 0)
|
if (REG_NOTE_KIND (link) == 0)
|
{
|
{
|
int max = 0;
|
int max = 0;
|
|
|
/* Data dependency; DEP_INSN writes a register that INSN reads some
|
/* Data dependency; DEP_INSN writes a register that INSN reads some
|
cycles later. */
|
cycles later. */
|
if (TARGET_C3X)
|
if (TARGET_C3X)
|
{
|
{
|
if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
|
if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
|
if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
|
max = READ_USE_COST > max ? READ_USE_COST : max;
|
max = READ_USE_COST > max ? READ_USE_COST : max;
|
}
|
}
|
else
|
else
|
{
|
{
|
/* This could be significantly optimized. We should look
|
/* This could be significantly optimized. We should look
|
to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
|
to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
|
insn uses ar0-ar7. We then test if the same register
|
insn uses ar0-ar7. We then test if the same register
|
is used. The tricky bit is that some operands will
|
is used. The tricky bit is that some operands will
|
use several registers... */
|
use several registers... */
|
if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
|
if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
|
if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
|
if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
|
max = READ_USE_COST > max ? READ_USE_COST : max;
|
max = READ_USE_COST > max ? READ_USE_COST : max;
|
|
|
if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
|
if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
|
if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
|
if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
|
max = READ_USE_COST > max ? READ_USE_COST : max;
|
max = READ_USE_COST > max ? READ_USE_COST : max;
|
|
|
if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
|
if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
|
if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
|
if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
|
max = READ_USE_COST > max ? READ_USE_COST : max;
|
max = READ_USE_COST > max ? READ_USE_COST : max;
|
|
|
if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
|
if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
|
if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
|
if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
|
max = READ_USE_COST > max ? READ_USE_COST : max;
|
max = READ_USE_COST > max ? READ_USE_COST : max;
|
|
|
if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
|
if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
|
if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
|
if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
|
max = READ_USE_COST > max ? READ_USE_COST : max;
|
max = READ_USE_COST > max ? READ_USE_COST : max;
|
|
|
if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
|
if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
|
if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
|
if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
|
max = READ_USE_COST > max ? READ_USE_COST : max;
|
max = READ_USE_COST > max ? READ_USE_COST : max;
|
|
|
if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
|
if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
|
if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
|
if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
|
max = READ_USE_COST > max ? READ_USE_COST : max;
|
max = READ_USE_COST > max ? READ_USE_COST : max;
|
|
|
if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
|
if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
|
if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
|
if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
|
max = READ_USE_COST > max ? READ_USE_COST : max;
|
max = READ_USE_COST > max ? READ_USE_COST : max;
|
|
|
if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
|
if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
|
if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
|
|
if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
|
if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
max = SET_USE_COST > max ? SET_USE_COST : max;
|
if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
|
if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
|
}
|
}
|
|
|
if (max)
|
if (max)
|
cost = max;
|
cost = max;
|
|
|
/* For other data dependencies, the default cost specified in the
|
/* For other data dependencies, the default cost specified in the
|
md is correct. */
|
md is correct. */
|
return cost;
|
return cost;
|
}
|
}
|
else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
|
else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
|
{
|
{
|
/* Anti dependency; DEP_INSN reads a register that INSN writes some
|
/* Anti dependency; DEP_INSN reads a register that INSN writes some
|
cycles later. */
|
cycles later. */
|
|
|
/* For c4x anti dependencies, the cost is 0. */
|
/* For c4x anti dependencies, the cost is 0. */
|
return 0;
|
return 0;
|
}
|
}
|
else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
|
else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
|
{
|
{
|
/* Output dependency; DEP_INSN writes a register that INSN writes some
|
/* Output dependency; DEP_INSN writes a register that INSN writes some
|
cycles later. */
|
cycles later. */
|
|
|
/* For c4x output dependencies, the cost is 0. */
|
/* For c4x output dependencies, the cost is 0. */
|
return 0;
|
return 0;
|
}
|
}
|
else
|
else
|
abort ();
|
abort ();
|
}
|
}
|
|
|
void
|
void
|
c4x_init_builtins (void)
|
c4x_init_builtins (void)
|
{
|
{
|
tree endlink = void_list_node;
|
tree endlink = void_list_node;
|
|
|
lang_hooks.builtin_function ("fast_ftoi",
|
lang_hooks.builtin_function ("fast_ftoi",
|
build_function_type
|
build_function_type
|
(integer_type_node,
|
(integer_type_node,
|
tree_cons (NULL_TREE, double_type_node,
|
tree_cons (NULL_TREE, double_type_node,
|
endlink)),
|
endlink)),
|
C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
|
C4X_BUILTIN_FIX, BUILT_IN_MD, NULL, NULL_TREE);
|
lang_hooks.builtin_function ("ansi_ftoi",
|
lang_hooks.builtin_function ("ansi_ftoi",
|
build_function_type
|
build_function_type
|
(integer_type_node,
|
(integer_type_node,
|
tree_cons (NULL_TREE, double_type_node,
|
tree_cons (NULL_TREE, double_type_node,
|
endlink)),
|
endlink)),
|
C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL,
|
C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL,
|
NULL_TREE);
|
NULL_TREE);
|
if (TARGET_C3X)
|
if (TARGET_C3X)
|
lang_hooks.builtin_function ("fast_imult",
|
lang_hooks.builtin_function ("fast_imult",
|
build_function_type
|
build_function_type
|
(integer_type_node,
|
(integer_type_node,
|
tree_cons (NULL_TREE, integer_type_node,
|
tree_cons (NULL_TREE, integer_type_node,
|
tree_cons (NULL_TREE,
|
tree_cons (NULL_TREE,
|
integer_type_node,
|
integer_type_node,
|
endlink))),
|
endlink))),
|
C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL,
|
C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL,
|
NULL_TREE);
|
NULL_TREE);
|
else
|
else
|
{
|
{
|
lang_hooks.builtin_function ("toieee",
|
lang_hooks.builtin_function ("toieee",
|
build_function_type
|
build_function_type
|
(double_type_node,
|
(double_type_node,
|
tree_cons (NULL_TREE, double_type_node,
|
tree_cons (NULL_TREE, double_type_node,
|
endlink)),
|
endlink)),
|
C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL,
|
C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL,
|
NULL_TREE);
|
NULL_TREE);
|
lang_hooks.builtin_function ("frieee",
|
lang_hooks.builtin_function ("frieee",
|
build_function_type
|
build_function_type
|
(double_type_node,
|
(double_type_node,
|
tree_cons (NULL_TREE, double_type_node,
|
tree_cons (NULL_TREE, double_type_node,
|
endlink)),
|
endlink)),
|
C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL,
|
C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL,
|
NULL_TREE);
|
NULL_TREE);
|
lang_hooks.builtin_function ("fast_invf",
|
lang_hooks.builtin_function ("fast_invf",
|
build_function_type
|
build_function_type
|
(double_type_node,
|
(double_type_node,
|
tree_cons (NULL_TREE, double_type_node,
|
tree_cons (NULL_TREE, double_type_node,
|
endlink)),
|
endlink)),
|
C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL,
|
C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL,
|
NULL_TREE);
|
NULL_TREE);
|
}
|
}
|
}
|
}
|
|
|
|
|
rtx
|
rtx
|
c4x_expand_builtin (tree exp, rtx target,
|
c4x_expand_builtin (tree exp, rtx target,
|
rtx subtarget ATTRIBUTE_UNUSED,
|
rtx subtarget ATTRIBUTE_UNUSED,
|
enum machine_mode mode ATTRIBUTE_UNUSED,
|
enum machine_mode mode ATTRIBUTE_UNUSED,
|
int ignore ATTRIBUTE_UNUSED)
|
int ignore ATTRIBUTE_UNUSED)
|
{
|
{
|
tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
|
tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
|
unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
|
unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
|
tree arglist = TREE_OPERAND (exp, 1);
|
tree arglist = TREE_OPERAND (exp, 1);
|
tree arg0, arg1;
|
tree arg0, arg1;
|
rtx r0, r1;
|
rtx r0, r1;
|
|
|
switch (fcode)
|
switch (fcode)
|
{
|
{
|
case C4X_BUILTIN_FIX:
|
case C4X_BUILTIN_FIX:
|
arg0 = TREE_VALUE (arglist);
|
arg0 = TREE_VALUE (arglist);
|
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
|
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
|
if (! target || ! register_operand (target, QImode))
|
if (! target || ! register_operand (target, QImode))
|
target = gen_reg_rtx (QImode);
|
target = gen_reg_rtx (QImode);
|
emit_insn (gen_fixqfqi_clobber (target, r0));
|
emit_insn (gen_fixqfqi_clobber (target, r0));
|
return target;
|
return target;
|
|
|
case C4X_BUILTIN_FIX_ANSI:
|
case C4X_BUILTIN_FIX_ANSI:
|
arg0 = TREE_VALUE (arglist);
|
arg0 = TREE_VALUE (arglist);
|
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
|
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
|
if (! target || ! register_operand (target, QImode))
|
if (! target || ! register_operand (target, QImode))
|
target = gen_reg_rtx (QImode);
|
target = gen_reg_rtx (QImode);
|
emit_insn (gen_fix_truncqfqi2 (target, r0));
|
emit_insn (gen_fix_truncqfqi2 (target, r0));
|
return target;
|
return target;
|
|
|
case C4X_BUILTIN_MPYI:
|
case C4X_BUILTIN_MPYI:
|
if (! TARGET_C3X)
|
if (! TARGET_C3X)
|
break;
|
break;
|
arg0 = TREE_VALUE (arglist);
|
arg0 = TREE_VALUE (arglist);
|
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
|
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
|
r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
|
r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
|
r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
|
r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
|
if (! target || ! register_operand (target, QImode))
|
if (! target || ! register_operand (target, QImode))
|
target = gen_reg_rtx (QImode);
|
target = gen_reg_rtx (QImode);
|
emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
|
emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
|
return target;
|
return target;
|
|
|
case C4X_BUILTIN_TOIEEE:
|
case C4X_BUILTIN_TOIEEE:
|
if (TARGET_C3X)
|
if (TARGET_C3X)
|
break;
|
break;
|
arg0 = TREE_VALUE (arglist);
|
arg0 = TREE_VALUE (arglist);
|
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
|
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
|
if (! target || ! register_operand (target, QFmode))
|
if (! target || ! register_operand (target, QFmode))
|
target = gen_reg_rtx (QFmode);
|
target = gen_reg_rtx (QFmode);
|
emit_insn (gen_toieee (target, r0));
|
emit_insn (gen_toieee (target, r0));
|
return target;
|
return target;
|
|
|
case C4X_BUILTIN_FRIEEE:
|
case C4X_BUILTIN_FRIEEE:
|
if (TARGET_C3X)
|
if (TARGET_C3X)
|
break;
|
break;
|
arg0 = TREE_VALUE (arglist);
|
arg0 = TREE_VALUE (arglist);
|
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
|
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
|
if (register_operand (r0, QFmode))
|
if (register_operand (r0, QFmode))
|
{
|
{
|
r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
|
r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
|
emit_move_insn (r1, r0);
|
emit_move_insn (r1, r0);
|
r0 = r1;
|
r0 = r1;
|
}
|
}
|
if (! target || ! register_operand (target, QFmode))
|
if (! target || ! register_operand (target, QFmode))
|
target = gen_reg_rtx (QFmode);
|
target = gen_reg_rtx (QFmode);
|
emit_insn (gen_frieee (target, r0));
|
emit_insn (gen_frieee (target, r0));
|
return target;
|
return target;
|
|
|
case C4X_BUILTIN_RCPF:
|
case C4X_BUILTIN_RCPF:
|
if (TARGET_C3X)
|
if (TARGET_C3X)
|
break;
|
break;
|
arg0 = TREE_VALUE (arglist);
|
arg0 = TREE_VALUE (arglist);
|
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
|
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
|
if (! target || ! register_operand (target, QFmode))
|
if (! target || ! register_operand (target, QFmode))
|
target = gen_reg_rtx (QFmode);
|
target = gen_reg_rtx (QFmode);
|
emit_insn (gen_rcpfqf_clobber (target, r0));
|
emit_insn (gen_rcpfqf_clobber (target, r0));
|
return target;
|
return target;
|
}
|
}
|
return NULL_RTX;
|
return NULL_RTX;
|
}
|
}
|
|
|
static void
|
static void
|
c4x_init_libfuncs (void)
|
c4x_init_libfuncs (void)
|
{
|
{
|
set_optab_libfunc (smul_optab, QImode, "__mulqi3");
|
set_optab_libfunc (smul_optab, QImode, "__mulqi3");
|
set_optab_libfunc (sdiv_optab, QImode, "__divqi3");
|
set_optab_libfunc (sdiv_optab, QImode, "__divqi3");
|
set_optab_libfunc (udiv_optab, QImode, "__udivqi3");
|
set_optab_libfunc (udiv_optab, QImode, "__udivqi3");
|
set_optab_libfunc (smod_optab, QImode, "__modqi3");
|
set_optab_libfunc (smod_optab, QImode, "__modqi3");
|
set_optab_libfunc (umod_optab, QImode, "__umodqi3");
|
set_optab_libfunc (umod_optab, QImode, "__umodqi3");
|
set_optab_libfunc (sdiv_optab, QFmode, "__divqf3");
|
set_optab_libfunc (sdiv_optab, QFmode, "__divqf3");
|
set_optab_libfunc (smul_optab, HFmode, "__mulhf3");
|
set_optab_libfunc (smul_optab, HFmode, "__mulhf3");
|
set_optab_libfunc (sdiv_optab, HFmode, "__divhf3");
|
set_optab_libfunc (sdiv_optab, HFmode, "__divhf3");
|
set_optab_libfunc (smul_optab, HImode, "__mulhi3");
|
set_optab_libfunc (smul_optab, HImode, "__mulhi3");
|
set_optab_libfunc (sdiv_optab, HImode, "__divhi3");
|
set_optab_libfunc (sdiv_optab, HImode, "__divhi3");
|
set_optab_libfunc (udiv_optab, HImode, "__udivhi3");
|
set_optab_libfunc (udiv_optab, HImode, "__udivhi3");
|
set_optab_libfunc (smod_optab, HImode, "__modhi3");
|
set_optab_libfunc (smod_optab, HImode, "__modhi3");
|
set_optab_libfunc (umod_optab, HImode, "__umodhi3");
|
set_optab_libfunc (umod_optab, HImode, "__umodhi3");
|
set_optab_libfunc (ffs_optab, QImode, "__ffs");
|
set_optab_libfunc (ffs_optab, QImode, "__ffs");
|
smulhi3_libfunc = init_one_libfunc ("__smulhi3_high");
|
smulhi3_libfunc = init_one_libfunc ("__smulhi3_high");
|
umulhi3_libfunc = init_one_libfunc ("__umulhi3_high");
|
umulhi3_libfunc = init_one_libfunc ("__umulhi3_high");
|
fix_truncqfhi2_libfunc = init_one_libfunc ("__fix_truncqfhi2");
|
fix_truncqfhi2_libfunc = init_one_libfunc ("__fix_truncqfhi2");
|
fixuns_truncqfhi2_libfunc = init_one_libfunc ("__ufix_truncqfhi2");
|
fixuns_truncqfhi2_libfunc = init_one_libfunc ("__ufix_truncqfhi2");
|
fix_trunchfhi2_libfunc = init_one_libfunc ("__fix_trunchfhi2");
|
fix_trunchfhi2_libfunc = init_one_libfunc ("__fix_trunchfhi2");
|
fixuns_trunchfhi2_libfunc = init_one_libfunc ("__ufix_trunchfhi2");
|
fixuns_trunchfhi2_libfunc = init_one_libfunc ("__ufix_trunchfhi2");
|
floathiqf2_libfunc = init_one_libfunc ("__floathiqf2");
|
floathiqf2_libfunc = init_one_libfunc ("__floathiqf2");
|
floatunshiqf2_libfunc = init_one_libfunc ("__ufloathiqf2");
|
floatunshiqf2_libfunc = init_one_libfunc ("__ufloathiqf2");
|
floathihf2_libfunc = init_one_libfunc ("__floathihf2");
|
floathihf2_libfunc = init_one_libfunc ("__floathihf2");
|
floatunshihf2_libfunc = init_one_libfunc ("__ufloathihf2");
|
floatunshihf2_libfunc = init_one_libfunc ("__ufloathihf2");
|
}
|
}
|
|
|
static void
|
static void
|
c4x_asm_named_section (const char *name, unsigned int flags ATTRIBUTE_UNUSED,
|
c4x_asm_named_section (const char *name, unsigned int flags ATTRIBUTE_UNUSED,
|
tree decl ATTRIBUTE_UNUSED)
|
tree decl ATTRIBUTE_UNUSED)
|
{
|
{
|
fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
|
fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
|
}
|
}
|
|
|
static void
|
static void
|
c4x_globalize_label (FILE *stream, const char *name)
|
c4x_globalize_label (FILE *stream, const char *name)
|
{
|
{
|
default_globalize_label (stream, name);
|
default_globalize_label (stream, name);
|
c4x_global_label (name);
|
c4x_global_label (name);
|
}
|
}
|
|
|
#define SHIFT_CODE_P(C) \
|
#define SHIFT_CODE_P(C) \
|
((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
|
((C) == ASHIFT || (C) == ASHIFTRT || (C) == LSHIFTRT)
|
#define LOGICAL_CODE_P(C) \
|
#define LOGICAL_CODE_P(C) \
|
((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
|
((C) == NOT || (C) == AND || (C) == IOR || (C) == XOR)
|
|
|
/* Compute a (partial) cost for rtx X. Return true if the complete
|
/* Compute a (partial) cost for rtx X. Return true if the complete
|
cost has been computed, and false if subexpressions should be
|
cost has been computed, and false if subexpressions should be
|
scanned. In either case, *TOTAL contains the cost result. */
|
scanned. In either case, *TOTAL contains the cost result. */
|
|
|
static bool
|
static bool
|
c4x_rtx_costs (rtx x, int code, int outer_code, int *total)
|
c4x_rtx_costs (rtx x, int code, int outer_code, int *total)
|
{
|
{
|
HOST_WIDE_INT val;
|
HOST_WIDE_INT val;
|
|
|
switch (code)
|
switch (code)
|
{
|
{
|
/* Some small integers are effectively free for the C40. We should
|
/* Some small integers are effectively free for the C40. We should
|
also consider if we are using the small memory model. With
|
also consider if we are using the small memory model. With
|
the big memory model we require an extra insn for a constant
|
the big memory model we require an extra insn for a constant
|
loaded from memory. */
|
loaded from memory. */
|
|
|
case CONST_INT:
|
case CONST_INT:
|
val = INTVAL (x);
|
val = INTVAL (x);
|
if (c4x_J_constant (x))
|
if (c4x_J_constant (x))
|
*total = 0;
|
*total = 0;
|
else if (! TARGET_C3X
|
else if (! TARGET_C3X
|
&& outer_code == AND
|
&& outer_code == AND
|
&& (val == 255 || val == 65535))
|
&& (val == 255 || val == 65535))
|
*total = 0;
|
*total = 0;
|
else if (! TARGET_C3X
|
else if (! TARGET_C3X
|
&& (outer_code == ASHIFTRT || outer_code == LSHIFTRT)
|
&& (outer_code == ASHIFTRT || outer_code == LSHIFTRT)
|
&& (val == 16 || val == 24))
|
&& (val == 16 || val == 24))
|
*total = 0;
|
*total = 0;
|
else if (TARGET_C3X && SHIFT_CODE_P (outer_code))
|
else if (TARGET_C3X && SHIFT_CODE_P (outer_code))
|
*total = 3;
|
*total = 3;
|
else if (LOGICAL_CODE_P (outer_code)
|
else if (LOGICAL_CODE_P (outer_code)
|
? c4x_L_constant (x) : c4x_I_constant (x))
|
? c4x_L_constant (x) : c4x_I_constant (x))
|
*total = 2;
|
*total = 2;
|
else
|
else
|
*total = 4;
|
*total = 4;
|
return true;
|
return true;
|
|
|
case CONST:
|
case CONST:
|
case LABEL_REF:
|
case LABEL_REF:
|
case SYMBOL_REF:
|
case SYMBOL_REF:
|
*total = 4;
|
*total = 4;
|
return true;
|
return true;
|
|
|
case CONST_DOUBLE:
|
case CONST_DOUBLE:
|
if (c4x_H_constant (x))
|
if (c4x_H_constant (x))
|
*total = 2;
|
*total = 2;
|
else if (GET_MODE (x) == QFmode)
|
else if (GET_MODE (x) == QFmode)
|
*total = 4;
|
*total = 4;
|
else
|
else
|
*total = 8;
|
*total = 8;
|
return true;
|
return true;
|
|
|
/* ??? Note that we return true, rather than false so that rtx_cost
|
/* ??? Note that we return true, rather than false so that rtx_cost
|
doesn't include the constant costs. Otherwise expand_mult will
|
doesn't include the constant costs. Otherwise expand_mult will
|
think that it is cheaper to synthesize a multiply rather than to
|
think that it is cheaper to synthesize a multiply rather than to
|
use a multiply instruction. I think this is because the algorithm
|
use a multiply instruction. I think this is because the algorithm
|
synth_mult doesn't take into account the loading of the operands,
|
synth_mult doesn't take into account the loading of the operands,
|
whereas the calculation of mult_cost does. */
|
whereas the calculation of mult_cost does. */
|
case PLUS:
|
case PLUS:
|
case MINUS:
|
case MINUS:
|
case AND:
|
case AND:
|
case IOR:
|
case IOR:
|
case XOR:
|
case XOR:
|
case ASHIFT:
|
case ASHIFT:
|
case ASHIFTRT:
|
case ASHIFTRT:
|
case LSHIFTRT:
|
case LSHIFTRT:
|
*total = COSTS_N_INSNS (1);
|
*total = COSTS_N_INSNS (1);
|
return true;
|
return true;
|
|
|
case MULT:
|
case MULT:
|
*total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
|
*total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
|
|| TARGET_MPYI ? 1 : 14);
|
|| TARGET_MPYI ? 1 : 14);
|
return true;
|
return true;
|
|
|
case DIV:
|
case DIV:
|
case UDIV:
|
case UDIV:
|
case MOD:
|
case MOD:
|
case UMOD:
|
case UMOD:
|
*total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
|
*total = COSTS_N_INSNS (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
|
? 15 : 50);
|
? 15 : 50);
|
return true;
|
return true;
|
|
|
default:
|
default:
|
return false;
|
return false;
|
}
|
}
|
}
|
}
|
|
|
/* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
|
/* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
|
|
|
static void
|
static void
|
c4x_external_libcall (rtx fun)
|
c4x_external_libcall (rtx fun)
|
{
|
{
|
/* This is only needed to keep asm30 happy for ___divqf3 etc. */
|
/* This is only needed to keep asm30 happy for ___divqf3 etc. */
|
c4x_external_ref (XSTR (fun, 0));
|
c4x_external_ref (XSTR (fun, 0));
|
}
|
}
|
|
|
/* Worker function for TARGET_STRUCT_VALUE_RTX. */
|
/* Worker function for TARGET_STRUCT_VALUE_RTX. */
|
|
|
static rtx
|
static rtx
|
c4x_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
|
c4x_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
|
int incoming ATTRIBUTE_UNUSED)
|
int incoming ATTRIBUTE_UNUSED)
|
{
|
{
|
return gen_rtx_REG (Pmode, AR0_REGNO);
|
return gen_rtx_REG (Pmode, AR0_REGNO);
|
}
|
}
|
|
|