/* Target definitions for the MorphoRISC1
|
/* Target definitions for the MorphoRISC1
|
Copyright (C) 2005, 2007 Free Software Foundation, Inc.
|
Copyright (C) 2005, 2007 Free Software Foundation, Inc.
|
Contributed by Red Hat, Inc.
|
Contributed by Red Hat, Inc.
|
|
|
This file is part of GCC.
|
This file is part of GCC.
|
|
|
GCC is free software; you can redistribute it and/or modify it
|
GCC is free software; you can redistribute it and/or modify it
|
under the terms of the GNU General Public License as published
|
under the terms of the GNU General Public License as published
|
by the Free Software Foundation; either version 3, or (at your
|
by the Free Software Foundation; either version 3, or (at your
|
option) any later version.
|
option) any later version.
|
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT
|
GCC is distributed in the hope that it will be useful, but WITHOUT
|
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
|
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
|
License for more details.
|
License for more details.
|
|
|
You should have received a copy of the GNU General Public License
|
You should have received a copy of the GNU General Public License
|
along with GCC; see the file COPYING3. If not see
|
along with GCC; see the file COPYING3. If not see
|
<http://www.gnu.org/licenses/>. */
|
<http://www.gnu.org/licenses/>. */
|
|
|
#include "config.h"
|
#include "config.h"
|
#include "system.h"
|
#include "system.h"
|
#include "coretypes.h"
|
#include "coretypes.h"
|
#include "tm.h"
|
#include "tm.h"
|
#include "rtl.h"
|
#include "rtl.h"
|
#include "regs.h"
|
#include "regs.h"
|
#include "hard-reg-set.h"
|
#include "hard-reg-set.h"
|
#include "real.h"
|
#include "real.h"
|
#include "insn-config.h"
|
#include "insn-config.h"
|
#include "conditions.h"
|
#include "conditions.h"
|
#include "insn-attr.h"
|
#include "insn-attr.h"
|
#include "recog.h"
|
#include "recog.h"
|
#include "toplev.h"
|
#include "toplev.h"
|
#include "output.h"
|
#include "output.h"
|
#include "integrate.h"
|
#include "integrate.h"
|
#include "tree.h"
|
#include "tree.h"
|
#include "function.h"
|
#include "function.h"
|
#include "expr.h"
|
#include "expr.h"
|
#include "optabs.h"
|
#include "optabs.h"
|
#include "libfuncs.h"
|
#include "libfuncs.h"
|
#include "flags.h"
|
#include "flags.h"
|
#include "tm_p.h"
|
#include "tm_p.h"
|
#include "ggc.h"
|
#include "ggc.h"
|
#include "insn-flags.h"
|
#include "insn-flags.h"
|
#include "obstack.h"
|
#include "obstack.h"
|
#include "except.h"
|
#include "except.h"
|
#include "target.h"
|
#include "target.h"
|
#include "target-def.h"
|
#include "target-def.h"
|
#include "basic-block.h"
|
#include "basic-block.h"
|
|
|
/* Frame pointer register mask. */
|
/* Frame pointer register mask. */
|
#define FP_MASK (1 << (GPR_FP))
|
#define FP_MASK (1 << (GPR_FP))
|
|
|
/* Link register mask. */
|
/* Link register mask. */
|
#define LINK_MASK (1 << (GPR_LINK))
|
#define LINK_MASK (1 << (GPR_LINK))
|
|
|
/* Given a SIZE in bytes, advance to the next word. */
|
/* Given a SIZE in bytes, advance to the next word. */
|
#define ROUND_ADVANCE(SIZE) (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
|
#define ROUND_ADVANCE(SIZE) (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
|
|
|
/* A C structure for machine-specific, per-function data.
|
/* A C structure for machine-specific, per-function data.
|
This is added to the cfun structure. */
|
This is added to the cfun structure. */
|
struct machine_function GTY(())
|
struct machine_function GTY(())
|
{
|
{
|
/* Flags if __builtin_return_address (n) with n >= 1 was used. */
|
/* Flags if __builtin_return_address (n) with n >= 1 was used. */
|
int ra_needs_full_frame;
|
int ra_needs_full_frame;
|
struct rtx_def * eh_stack_adjust;
|
struct rtx_def * eh_stack_adjust;
|
int interrupt_handler;
|
int interrupt_handler;
|
int has_loops;
|
int has_loops;
|
};
|
};
|
|
|
/* Define the information needed to generate branch and scc insns.
|
/* Define the information needed to generate branch and scc insns.
|
This is stored from the compare operation. */
|
This is stored from the compare operation. */
|
struct rtx_def * mt_compare_op0;
|
struct rtx_def * mt_compare_op0;
|
struct rtx_def * mt_compare_op1;
|
struct rtx_def * mt_compare_op1;
|
|
|
/* Current frame information calculated by compute_frame_size. */
|
/* Current frame information calculated by compute_frame_size. */
|
struct mt_frame_info current_frame_info;
|
struct mt_frame_info current_frame_info;
|
|
|
/* Zero structure to initialize current_frame_info. */
|
/* Zero structure to initialize current_frame_info. */
|
struct mt_frame_info zero_frame_info;
|
struct mt_frame_info zero_frame_info;
|
|
|
/* mt doesn't have unsigned compares need a library call for this. */
|
/* mt doesn't have unsigned compares need a library call for this. */
|
struct rtx_def * mt_ucmpsi3_libcall;
|
struct rtx_def * mt_ucmpsi3_libcall;
|
|
|
static int mt_flag_delayed_branch;
|
static int mt_flag_delayed_branch;
|
|
|
|
|
static rtx
|
static rtx
|
mt_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
|
mt_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
|
int incoming ATTRIBUTE_UNUSED)
|
int incoming ATTRIBUTE_UNUSED)
|
{
|
{
|
return gen_rtx_REG (Pmode, RETVAL_REGNUM);
|
return gen_rtx_REG (Pmode, RETVAL_REGNUM);
|
}
|
}
|
|
|
/* Implement RETURN_ADDR_RTX. */
|
/* Implement RETURN_ADDR_RTX. */
|
rtx
|
rtx
|
mt_return_addr_rtx (int count)
|
mt_return_addr_rtx (int count)
|
{
|
{
|
if (count != 0)
|
if (count != 0)
|
return NULL_RTX;
|
return NULL_RTX;
|
|
|
return get_hard_reg_initial_val (Pmode, GPR_LINK);
|
return get_hard_reg_initial_val (Pmode, GPR_LINK);
|
}
|
}
|
|
|
/* The following variable value indicates the number of nops required
|
/* The following variable value indicates the number of nops required
|
between the current instruction and the next instruction to avoid
|
between the current instruction and the next instruction to avoid
|
any pipeline hazards. */
|
any pipeline hazards. */
|
static int mt_nops_required = 0;
|
static int mt_nops_required = 0;
|
static const char * mt_nop_reasons = "";
|
static const char * mt_nop_reasons = "";
|
|
|
/* Implement ASM_OUTPUT_OPCODE. */
|
/* Implement ASM_OUTPUT_OPCODE. */
|
const char *
|
const char *
|
mt_asm_output_opcode (FILE *f ATTRIBUTE_UNUSED, const char *ptr)
|
mt_asm_output_opcode (FILE *f ATTRIBUTE_UNUSED, const char *ptr)
|
{
|
{
|
if (mt_nops_required)
|
if (mt_nops_required)
|
fprintf (f, ";# need %d nops because of %s\n\t",
|
fprintf (f, ";# need %d nops because of %s\n\t",
|
mt_nops_required, mt_nop_reasons);
|
mt_nops_required, mt_nop_reasons);
|
|
|
while (mt_nops_required)
|
while (mt_nops_required)
|
{
|
{
|
fprintf (f, "nop\n\t");
|
fprintf (f, "nop\n\t");
|
-- mt_nops_required;
|
-- mt_nops_required;
|
}
|
}
|
|
|
return ptr;
|
return ptr;
|
}
|
}
|
|
|
/* Given an insn, return whether it's a memory operation or a branch
|
/* Given an insn, return whether it's a memory operation or a branch
|
operation, otherwise return TYPE_ARITH. */
|
operation, otherwise return TYPE_ARITH. */
|
static enum attr_type
|
static enum attr_type
|
mt_get_attr_type (rtx complete_insn)
|
mt_get_attr_type (rtx complete_insn)
|
{
|
{
|
rtx insn = PATTERN (complete_insn);
|
rtx insn = PATTERN (complete_insn);
|
|
|
if (JUMP_P (complete_insn))
|
if (JUMP_P (complete_insn))
|
return TYPE_BRANCH;
|
return TYPE_BRANCH;
|
if (CALL_P (complete_insn))
|
if (CALL_P (complete_insn))
|
return TYPE_BRANCH;
|
return TYPE_BRANCH;
|
|
|
if (GET_CODE (insn) != SET)
|
if (GET_CODE (insn) != SET)
|
return TYPE_ARITH;
|
return TYPE_ARITH;
|
|
|
if (SET_DEST (insn) == pc_rtx)
|
if (SET_DEST (insn) == pc_rtx)
|
return TYPE_BRANCH;
|
return TYPE_BRANCH;
|
|
|
if (GET_CODE (SET_DEST (insn)) == MEM)
|
if (GET_CODE (SET_DEST (insn)) == MEM)
|
return TYPE_STORE;
|
return TYPE_STORE;
|
|
|
if (GET_CODE (SET_SRC (insn)) == MEM)
|
if (GET_CODE (SET_SRC (insn)) == MEM)
|
return TYPE_LOAD;
|
return TYPE_LOAD;
|
|
|
return TYPE_ARITH;
|
return TYPE_ARITH;
|
}
|
}
|
|
|
/* A helper routine for insn_dependent_p called through note_stores. */
|
/* A helper routine for insn_dependent_p called through note_stores. */
|
|
|
static void
|
static void
|
insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
|
insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
|
{
|
{
|
rtx * pinsn = (rtx *) data;
|
rtx * pinsn = (rtx *) data;
|
|
|
if (*pinsn && reg_mentioned_p (x, *pinsn))
|
if (*pinsn && reg_mentioned_p (x, *pinsn))
|
*pinsn = NULL_RTX;
|
*pinsn = NULL_RTX;
|
}
|
}
|
|
|
/* Return true if anything in insn X is (anti,output,true)
|
/* Return true if anything in insn X is (anti,output,true)
|
dependent on anything in insn Y. */
|
dependent on anything in insn Y. */
|
|
|
static bool
|
static bool
|
insn_dependent_p (rtx x, rtx y)
|
insn_dependent_p (rtx x, rtx y)
|
{
|
{
|
rtx tmp;
|
rtx tmp;
|
|
|
if (! INSN_P (x) || ! INSN_P (y))
|
if (! INSN_P (x) || ! INSN_P (y))
|
return 0;
|
return 0;
|
|
|
tmp = PATTERN (y);
|
tmp = PATTERN (y);
|
note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
|
note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
|
if (tmp == NULL_RTX)
|
if (tmp == NULL_RTX)
|
return true;
|
return true;
|
|
|
tmp = PATTERN (x);
|
tmp = PATTERN (x);
|
note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
|
note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
|
return (tmp == NULL_RTX);
|
return (tmp == NULL_RTX);
|
}
|
}
|
|
|
|
|
/* Return true if anything in insn X is true dependent on anything in
|
/* Return true if anything in insn X is true dependent on anything in
|
insn Y. */
|
insn Y. */
|
static bool
|
static bool
|
insn_true_dependent_p (rtx x, rtx y)
|
insn_true_dependent_p (rtx x, rtx y)
|
{
|
{
|
rtx tmp;
|
rtx tmp;
|
|
|
if (! INSN_P (x) || ! INSN_P (y))
|
if (! INSN_P (x) || ! INSN_P (y))
|
return 0;
|
return 0;
|
|
|
tmp = PATTERN (y);
|
tmp = PATTERN (y);
|
note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
|
note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
|
return (tmp == NULL_RTX);
|
return (tmp == NULL_RTX);
|
}
|
}
|
|
|
/* The following determines the number of nops that need to be
|
/* The following determines the number of nops that need to be
|
inserted between the previous instructions and current instruction
|
inserted between the previous instructions and current instruction
|
to avoid pipeline hazards on the mt processor. Remember that
|
to avoid pipeline hazards on the mt processor. Remember that
|
the function is not called for asm insns. */
|
the function is not called for asm insns. */
|
|
|
void
|
void
|
mt_final_prescan_insn (rtx insn,
|
mt_final_prescan_insn (rtx insn,
|
rtx * opvec ATTRIBUTE_UNUSED,
|
rtx * opvec ATTRIBUTE_UNUSED,
|
int noperands ATTRIBUTE_UNUSED)
|
int noperands ATTRIBUTE_UNUSED)
|
{
|
{
|
rtx prev_i;
|
rtx prev_i;
|
enum attr_type prev_attr;
|
enum attr_type prev_attr;
|
|
|
mt_nops_required = 0;
|
mt_nops_required = 0;
|
mt_nop_reasons = "";
|
mt_nop_reasons = "";
|
|
|
/* ms2 constraints are dealt with in reorg. */
|
/* ms2 constraints are dealt with in reorg. */
|
if (TARGET_MS2)
|
if (TARGET_MS2)
|
return;
|
return;
|
|
|
/* Only worry about real instructions. */
|
/* Only worry about real instructions. */
|
if (! INSN_P (insn))
|
if (! INSN_P (insn))
|
return;
|
return;
|
|
|
/* Find the previous real instructions. */
|
/* Find the previous real instructions. */
|
for (prev_i = PREV_INSN (insn);
|
for (prev_i = PREV_INSN (insn);
|
prev_i != NULL
|
prev_i != NULL
|
&& (! INSN_P (prev_i)
|
&& (! INSN_P (prev_i)
|
|| GET_CODE (PATTERN (prev_i)) == USE
|
|| GET_CODE (PATTERN (prev_i)) == USE
|
|| GET_CODE (PATTERN (prev_i)) == CLOBBER);
|
|| GET_CODE (PATTERN (prev_i)) == CLOBBER);
|
prev_i = PREV_INSN (prev_i))
|
prev_i = PREV_INSN (prev_i))
|
{
|
{
|
/* If we meet a barrier, there is no flow through here. */
|
/* If we meet a barrier, there is no flow through here. */
|
if (BARRIER_P (prev_i))
|
if (BARRIER_P (prev_i))
|
return;
|
return;
|
}
|
}
|
|
|
/* If there isn't one then there is nothing that we need do. */
|
/* If there isn't one then there is nothing that we need do. */
|
if (prev_i == NULL || ! INSN_P (prev_i))
|
if (prev_i == NULL || ! INSN_P (prev_i))
|
return;
|
return;
|
|
|
prev_attr = mt_get_attr_type (prev_i);
|
prev_attr = mt_get_attr_type (prev_i);
|
|
|
/* Delayed branch slots already taken care of by delay branch scheduling. */
|
/* Delayed branch slots already taken care of by delay branch scheduling. */
|
if (prev_attr == TYPE_BRANCH)
|
if (prev_attr == TYPE_BRANCH)
|
return;
|
return;
|
|
|
switch (mt_get_attr_type (insn))
|
switch (mt_get_attr_type (insn))
|
{
|
{
|
case TYPE_LOAD:
|
case TYPE_LOAD:
|
case TYPE_STORE:
|
case TYPE_STORE:
|
/* Avoid consecutive memory operation. */
|
/* Avoid consecutive memory operation. */
|
if ((prev_attr == TYPE_LOAD || prev_attr == TYPE_STORE)
|
if ((prev_attr == TYPE_LOAD || prev_attr == TYPE_STORE)
|
&& TARGET_MS1_64_001)
|
&& TARGET_MS1_64_001)
|
{
|
{
|
mt_nops_required = 1;
|
mt_nops_required = 1;
|
mt_nop_reasons = "consecutive mem ops";
|
mt_nop_reasons = "consecutive mem ops";
|
}
|
}
|
/* Drop through. */
|
/* Drop through. */
|
|
|
case TYPE_ARITH:
|
case TYPE_ARITH:
|
case TYPE_COMPLEX:
|
case TYPE_COMPLEX:
|
/* One cycle of delay is required between load
|
/* One cycle of delay is required between load
|
and the dependent arithmetic instruction. */
|
and the dependent arithmetic instruction. */
|
if (prev_attr == TYPE_LOAD
|
if (prev_attr == TYPE_LOAD
|
&& insn_true_dependent_p (prev_i, insn))
|
&& insn_true_dependent_p (prev_i, insn))
|
{
|
{
|
mt_nops_required = 1;
|
mt_nops_required = 1;
|
mt_nop_reasons = "load->arith dependency delay";
|
mt_nop_reasons = "load->arith dependency delay";
|
}
|
}
|
break;
|
break;
|
|
|
case TYPE_BRANCH:
|
case TYPE_BRANCH:
|
if (insn_dependent_p (prev_i, insn))
|
if (insn_dependent_p (prev_i, insn))
|
{
|
{
|
if (prev_attr == TYPE_ARITH && TARGET_MS1_64_001)
|
if (prev_attr == TYPE_ARITH && TARGET_MS1_64_001)
|
{
|
{
|
/* One cycle of delay between arith
|
/* One cycle of delay between arith
|
instructions and branch dependent on arith. */
|
instructions and branch dependent on arith. */
|
mt_nops_required = 1;
|
mt_nops_required = 1;
|
mt_nop_reasons = "arith->branch dependency delay";
|
mt_nop_reasons = "arith->branch dependency delay";
|
}
|
}
|
else if (prev_attr == TYPE_LOAD)
|
else if (prev_attr == TYPE_LOAD)
|
{
|
{
|
/* Two cycles of delay are required
|
/* Two cycles of delay are required
|
between load and dependent branch. */
|
between load and dependent branch. */
|
if (TARGET_MS1_64_001)
|
if (TARGET_MS1_64_001)
|
mt_nops_required = 2;
|
mt_nops_required = 2;
|
else
|
else
|
mt_nops_required = 1;
|
mt_nops_required = 1;
|
mt_nop_reasons = "load->branch dependency delay";
|
mt_nop_reasons = "load->branch dependency delay";
|
}
|
}
|
}
|
}
|
break;
|
break;
|
|
|
default:
|
default:
|
fatal_insn ("mt_final_prescan_insn, invalid insn #1", insn);
|
fatal_insn ("mt_final_prescan_insn, invalid insn #1", insn);
|
break;
|
break;
|
}
|
}
|
}
|
}
|
|
|
/* Print debugging information for a frame. */
|
/* Print debugging information for a frame. */
|
static void
|
static void
|
mt_debug_stack (struct mt_frame_info * info)
|
mt_debug_stack (struct mt_frame_info * info)
|
{
|
{
|
int regno;
|
int regno;
|
|
|
if (!info)
|
if (!info)
|
{
|
{
|
error ("info pointer NULL");
|
error ("info pointer NULL");
|
gcc_unreachable ();
|
gcc_unreachable ();
|
}
|
}
|
|
|
fprintf (stderr, "\nStack information for function %s:\n",
|
fprintf (stderr, "\nStack information for function %s:\n",
|
((current_function_decl && DECL_NAME (current_function_decl))
|
((current_function_decl && DECL_NAME (current_function_decl))
|
? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
|
? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
|
: "<unknown>"));
|
: "<unknown>"));
|
|
|
fprintf (stderr, "\ttotal_size = %d\n", info->total_size);
|
fprintf (stderr, "\ttotal_size = %d\n", info->total_size);
|
fprintf (stderr, "\tpretend_size = %d\n", info->pretend_size);
|
fprintf (stderr, "\tpretend_size = %d\n", info->pretend_size);
|
fprintf (stderr, "\targs_size = %d\n", info->args_size);
|
fprintf (stderr, "\targs_size = %d\n", info->args_size);
|
fprintf (stderr, "\textra_size = %d\n", info->extra_size);
|
fprintf (stderr, "\textra_size = %d\n", info->extra_size);
|
fprintf (stderr, "\treg_size = %d\n", info->reg_size);
|
fprintf (stderr, "\treg_size = %d\n", info->reg_size);
|
fprintf (stderr, "\tvar_size = %d\n", info->var_size);
|
fprintf (stderr, "\tvar_size = %d\n", info->var_size);
|
fprintf (stderr, "\tframe_size = %d\n", info->frame_size);
|
fprintf (stderr, "\tframe_size = %d\n", info->frame_size);
|
fprintf (stderr, "\treg_mask = 0x%x\n", info->reg_mask);
|
fprintf (stderr, "\treg_mask = 0x%x\n", info->reg_mask);
|
fprintf (stderr, "\tsave_fp = %d\n", info->save_fp);
|
fprintf (stderr, "\tsave_fp = %d\n", info->save_fp);
|
fprintf (stderr, "\tsave_lr = %d\n", info->save_lr);
|
fprintf (stderr, "\tsave_lr = %d\n", info->save_lr);
|
fprintf (stderr, "\tinitialized = %d\n", info->initialized);
|
fprintf (stderr, "\tinitialized = %d\n", info->initialized);
|
fprintf (stderr, "\tsaved registers =");
|
fprintf (stderr, "\tsaved registers =");
|
|
|
/* Print out reg_mask in a more readable format. */
|
/* Print out reg_mask in a more readable format. */
|
for (regno = GPR_R0; regno <= GPR_LAST; regno++)
|
for (regno = GPR_R0; regno <= GPR_LAST; regno++)
|
if ( (1 << regno) & info->reg_mask)
|
if ( (1 << regno) & info->reg_mask)
|
fprintf (stderr, " %s", reg_names[regno]);
|
fprintf (stderr, " %s", reg_names[regno]);
|
|
|
putc ('\n', stderr);
|
putc ('\n', stderr);
|
fflush (stderr);
|
fflush (stderr);
|
}
|
}
|
|
|
/* Print a memory address as an operand to reference that memory location. */
|
/* Print a memory address as an operand to reference that memory location. */
|
|
|
static void
|
static void
|
mt_print_operand_simple_address (FILE * file, rtx addr)
|
mt_print_operand_simple_address (FILE * file, rtx addr)
|
{
|
{
|
if (!addr)
|
if (!addr)
|
error ("PRINT_OPERAND_ADDRESS, null pointer");
|
error ("PRINT_OPERAND_ADDRESS, null pointer");
|
|
|
else
|
else
|
switch (GET_CODE (addr))
|
switch (GET_CODE (addr))
|
{
|
{
|
case REG:
|
case REG:
|
fprintf (file, "%s, #0", reg_names [REGNO (addr)]);
|
fprintf (file, "%s, #0", reg_names [REGNO (addr)]);
|
break;
|
break;
|
|
|
case PLUS:
|
case PLUS:
|
{
|
{
|
rtx reg = 0;
|
rtx reg = 0;
|
rtx offset = 0;
|
rtx offset = 0;
|
rtx arg0 = XEXP (addr, 0);
|
rtx arg0 = XEXP (addr, 0);
|
rtx arg1 = XEXP (addr, 1);
|
rtx arg1 = XEXP (addr, 1);
|
|
|
if (GET_CODE (arg0) == REG)
|
if (GET_CODE (arg0) == REG)
|
{
|
{
|
reg = arg0;
|
reg = arg0;
|
offset = arg1;
|
offset = arg1;
|
if (GET_CODE (offset) == REG)
|
if (GET_CODE (offset) == REG)
|
fatal_insn ("PRINT_OPERAND_ADDRESS, 2 regs", addr);
|
fatal_insn ("PRINT_OPERAND_ADDRESS, 2 regs", addr);
|
}
|
}
|
|
|
else if (GET_CODE (arg1) == REG)
|
else if (GET_CODE (arg1) == REG)
|
reg = arg1, offset = arg0;
|
reg = arg1, offset = arg0;
|
else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
|
else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
|
{
|
{
|
fprintf (file, "%s, #", reg_names [GPR_R0]);
|
fprintf (file, "%s, #", reg_names [GPR_R0]);
|
output_addr_const (file, addr);
|
output_addr_const (file, addr);
|
break;
|
break;
|
}
|
}
|
fprintf (file, "%s, #", reg_names [REGNO (reg)]);
|
fprintf (file, "%s, #", reg_names [REGNO (reg)]);
|
output_addr_const (file, offset);
|
output_addr_const (file, offset);
|
break;
|
break;
|
}
|
}
|
|
|
case LABEL_REF:
|
case LABEL_REF:
|
case SYMBOL_REF:
|
case SYMBOL_REF:
|
case CONST_INT:
|
case CONST_INT:
|
case CONST:
|
case CONST:
|
output_addr_const (file, addr);
|
output_addr_const (file, addr);
|
break;
|
break;
|
|
|
default:
|
default:
|
fatal_insn ("PRINT_OPERAND_ADDRESS, invalid insn #1", addr);
|
fatal_insn ("PRINT_OPERAND_ADDRESS, invalid insn #1", addr);
|
break;
|
break;
|
}
|
}
|
}
|
}
|
|
|
/* Implement PRINT_OPERAND_ADDRESS. */
|
/* Implement PRINT_OPERAND_ADDRESS. */
|
void
|
void
|
mt_print_operand_address (FILE * file, rtx addr)
|
mt_print_operand_address (FILE * file, rtx addr)
|
{
|
{
|
if (GET_CODE (addr) == AND
|
if (GET_CODE (addr) == AND
|
&& GET_CODE (XEXP (addr, 1)) == CONST_INT
|
&& GET_CODE (XEXP (addr, 1)) == CONST_INT
|
&& INTVAL (XEXP (addr, 1)) == -3)
|
&& INTVAL (XEXP (addr, 1)) == -3)
|
mt_print_operand_simple_address (file, XEXP (addr, 0));
|
mt_print_operand_simple_address (file, XEXP (addr, 0));
|
else
|
else
|
mt_print_operand_simple_address (file, addr);
|
mt_print_operand_simple_address (file, addr);
|
}
|
}
|
|
|
/* Implement PRINT_OPERAND. */
|
/* Implement PRINT_OPERAND. */
|
void
|
void
|
mt_print_operand (FILE * file, rtx x, int code)
|
mt_print_operand (FILE * file, rtx x, int code)
|
{
|
{
|
switch (code)
|
switch (code)
|
{
|
{
|
case '#':
|
case '#':
|
/* Output a nop if there's nothing for the delay slot. */
|
/* Output a nop if there's nothing for the delay slot. */
|
if (dbr_sequence_length () == 0)
|
if (dbr_sequence_length () == 0)
|
fputs ("\n\tnop", file);
|
fputs ("\n\tnop", file);
|
return;
|
return;
|
|
|
case 'H':
|
case 'H':
|
fprintf(file, "#%%hi16(");
|
fprintf(file, "#%%hi16(");
|
output_addr_const (file, x);
|
output_addr_const (file, x);
|
fprintf(file, ")");
|
fprintf(file, ")");
|
return;
|
return;
|
|
|
case 'L':
|
case 'L':
|
fprintf(file, "#%%lo16(");
|
fprintf(file, "#%%lo16(");
|
output_addr_const (file, x);
|
output_addr_const (file, x);
|
fprintf(file, ")");
|
fprintf(file, ")");
|
return;
|
return;
|
|
|
case 'N':
|
case 'N':
|
fprintf(file, "#%ld", ~INTVAL (x));
|
fprintf(file, "#%ld", ~INTVAL (x));
|
return;
|
return;
|
|
|
case 'z':
|
case 'z':
|
if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0)
|
if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0)
|
{
|
{
|
fputs (reg_names[GPR_R0], file);
|
fputs (reg_names[GPR_R0], file);
|
return;
|
return;
|
}
|
}
|
|
|
case 0:
|
case 0:
|
/* Handled below. */
|
/* Handled below. */
|
break;
|
break;
|
|
|
default:
|
default:
|
/* output_operand_lossage ("mt_print_operand: unknown code"); */
|
/* output_operand_lossage ("mt_print_operand: unknown code"); */
|
fprintf (file, "unknown code");
|
fprintf (file, "unknown code");
|
return;
|
return;
|
}
|
}
|
|
|
switch (GET_CODE (x))
|
switch (GET_CODE (x))
|
{
|
{
|
case REG:
|
case REG:
|
fputs (reg_names [REGNO (x)], file);
|
fputs (reg_names [REGNO (x)], file);
|
break;
|
break;
|
|
|
case CONST:
|
case CONST:
|
case CONST_INT:
|
case CONST_INT:
|
fprintf(file, "#%ld", INTVAL (x));
|
fprintf(file, "#%ld", INTVAL (x));
|
break;
|
break;
|
|
|
case MEM:
|
case MEM:
|
mt_print_operand_address(file, XEXP (x,0));
|
mt_print_operand_address(file, XEXP (x,0));
|
break;
|
break;
|
|
|
case LABEL_REF:
|
case LABEL_REF:
|
case SYMBOL_REF:
|
case SYMBOL_REF:
|
output_addr_const (file, x);
|
output_addr_const (file, x);
|
break;
|
break;
|
|
|
default:
|
default:
|
fprintf(file, "Uknown code: %d", GET_CODE (x));
|
fprintf(file, "Uknown code: %d", GET_CODE (x));
|
break;
|
break;
|
}
|
}
|
|
|
return;
|
return;
|
}
|
}
|
|
|
/* Implement INIT_CUMULATIVE_ARGS. */
|
/* Implement INIT_CUMULATIVE_ARGS. */
|
void
|
void
|
mt_init_cumulative_args (CUMULATIVE_ARGS * cum, tree fntype, rtx libname,
|
mt_init_cumulative_args (CUMULATIVE_ARGS * cum, tree fntype, rtx libname,
|
tree fndecl ATTRIBUTE_UNUSED, int incoming)
|
tree fndecl ATTRIBUTE_UNUSED, int incoming)
|
{
|
{
|
*cum = 0;
|
*cum = 0;
|
|
|
if (TARGET_DEBUG_ARG)
|
if (TARGET_DEBUG_ARG)
|
{
|
{
|
fprintf (stderr, "\nmt_init_cumulative_args:");
|
fprintf (stderr, "\nmt_init_cumulative_args:");
|
|
|
if (incoming)
|
if (incoming)
|
fputs (" incoming", stderr);
|
fputs (" incoming", stderr);
|
|
|
if (fntype)
|
if (fntype)
|
{
|
{
|
tree ret_type = TREE_TYPE (fntype);
|
tree ret_type = TREE_TYPE (fntype);
|
fprintf (stderr, " return = %s,",
|
fprintf (stderr, " return = %s,",
|
tree_code_name[ (int)TREE_CODE (ret_type) ]);
|
tree_code_name[ (int)TREE_CODE (ret_type) ]);
|
}
|
}
|
|
|
if (libname && GET_CODE (libname) == SYMBOL_REF)
|
if (libname && GET_CODE (libname) == SYMBOL_REF)
|
fprintf (stderr, " libname = %s", XSTR (libname, 0));
|
fprintf (stderr, " libname = %s", XSTR (libname, 0));
|
|
|
if (cfun->returns_struct)
|
if (cfun->returns_struct)
|
fprintf (stderr, " return-struct");
|
fprintf (stderr, " return-struct");
|
|
|
putc ('\n', stderr);
|
putc ('\n', stderr);
|
}
|
}
|
}
|
}
|
|
|
/* Compute the slot number to pass an argument in.
|
/* Compute the slot number to pass an argument in.
|
Returns the slot number or -1 if passing on the stack.
|
Returns the slot number or -1 if passing on the stack.
|
|
|
CUM is a variable of type CUMULATIVE_ARGS which gives info about
|
CUM is a variable of type CUMULATIVE_ARGS which gives info about
|
the preceding args and about the function being called.
|
the preceding args and about the function being called.
|
MODE is the argument's machine mode.
|
MODE is the argument's machine mode.
|
TYPE is the data type of the argument (as a tree).
|
TYPE is the data type of the argument (as a tree).
|
This is null for libcalls where that information may
|
This is null for libcalls where that information may
|
not be available.
|
not be available.
|
NAMED is nonzero if this argument is a named parameter
|
NAMED is nonzero if this argument is a named parameter
|
(otherwise it is an extra parameter matching an ellipsis).
|
(otherwise it is an extra parameter matching an ellipsis).
|
INCOMING_P is zero for FUNCTION_ARG, nonzero for FUNCTION_INCOMING_ARG.
|
INCOMING_P is zero for FUNCTION_ARG, nonzero for FUNCTION_INCOMING_ARG.
|
*PREGNO records the register number to use if scalar type. */
|
*PREGNO records the register number to use if scalar type. */
|
|
|
static int
|
static int
|
mt_function_arg_slotno (const CUMULATIVE_ARGS * cum,
|
mt_function_arg_slotno (const CUMULATIVE_ARGS * cum,
|
enum machine_mode mode,
|
enum machine_mode mode,
|
tree type,
|
tree type,
|
int named ATTRIBUTE_UNUSED,
|
int named ATTRIBUTE_UNUSED,
|
int incoming_p ATTRIBUTE_UNUSED,
|
int incoming_p ATTRIBUTE_UNUSED,
|
int * pregno)
|
int * pregno)
|
{
|
{
|
int regbase = FIRST_ARG_REGNUM;
|
int regbase = FIRST_ARG_REGNUM;
|
int slotno = * cum;
|
int slotno = * cum;
|
|
|
if (mode == VOIDmode || targetm.calls.must_pass_in_stack (mode, type))
|
if (mode == VOIDmode || targetm.calls.must_pass_in_stack (mode, type))
|
return -1;
|
return -1;
|
|
|
if (slotno >= MT_NUM_ARG_REGS)
|
if (slotno >= MT_NUM_ARG_REGS)
|
return -1;
|
return -1;
|
|
|
* pregno = regbase + slotno;
|
* pregno = regbase + slotno;
|
|
|
return slotno;
|
return slotno;
|
}
|
}
|
|
|
/* Implement FUNCTION_ARG. */
|
/* Implement FUNCTION_ARG. */
|
rtx
|
rtx
|
mt_function_arg (const CUMULATIVE_ARGS * cum,
|
mt_function_arg (const CUMULATIVE_ARGS * cum,
|
enum machine_mode mode,
|
enum machine_mode mode,
|
tree type,
|
tree type,
|
int named,
|
int named,
|
int incoming_p)
|
int incoming_p)
|
{
|
{
|
int slotno, regno;
|
int slotno, regno;
|
rtx reg;
|
rtx reg;
|
|
|
slotno = mt_function_arg_slotno (cum, mode, type, named, incoming_p, ®no);
|
slotno = mt_function_arg_slotno (cum, mode, type, named, incoming_p, ®no);
|
|
|
if (slotno == -1)
|
if (slotno == -1)
|
reg = NULL_RTX;
|
reg = NULL_RTX;
|
else
|
else
|
reg = gen_rtx_REG (mode, regno);
|
reg = gen_rtx_REG (mode, regno);
|
|
|
return reg;
|
return reg;
|
}
|
}
|
|
|
/* Implement FUNCTION_ARG_ADVANCE. */
|
/* Implement FUNCTION_ARG_ADVANCE. */
|
void
|
void
|
mt_function_arg_advance (CUMULATIVE_ARGS * cum,
|
mt_function_arg_advance (CUMULATIVE_ARGS * cum,
|
enum machine_mode mode,
|
enum machine_mode mode,
|
tree type ATTRIBUTE_UNUSED,
|
tree type ATTRIBUTE_UNUSED,
|
int named)
|
int named)
|
{
|
{
|
int slotno, regno;
|
int slotno, regno;
|
|
|
/* We pass 0 for incoming_p here, it doesn't matter. */
|
/* We pass 0 for incoming_p here, it doesn't matter. */
|
slotno = mt_function_arg_slotno (cum, mode, type, named, 0, ®no);
|
slotno = mt_function_arg_slotno (cum, mode, type, named, 0, ®no);
|
|
|
* cum += (mode != BLKmode
|
* cum += (mode != BLKmode
|
? ROUND_ADVANCE (GET_MODE_SIZE (mode))
|
? ROUND_ADVANCE (GET_MODE_SIZE (mode))
|
: ROUND_ADVANCE (int_size_in_bytes (type)));
|
: ROUND_ADVANCE (int_size_in_bytes (type)));
|
|
|
if (TARGET_DEBUG_ARG)
|
if (TARGET_DEBUG_ARG)
|
fprintf (stderr,
|
fprintf (stderr,
|
"mt_function_arg_advance: words = %2d, mode = %4s, named = %d, size = %3d\n",
|
"mt_function_arg_advance: words = %2d, mode = %4s, named = %d, size = %3d\n",
|
*cum, GET_MODE_NAME (mode), named,
|
*cum, GET_MODE_NAME (mode), named,
|
(*cum) * UNITS_PER_WORD);
|
(*cum) * UNITS_PER_WORD);
|
}
|
}
|
|
|
/* Implement hook TARGET_ARG_PARTIAL_BYTES.
|
/* Implement hook TARGET_ARG_PARTIAL_BYTES.
|
|
|
Returns the number of bytes at the beginning of an argument that
|
Returns the number of bytes at the beginning of an argument that
|
must be put in registers. The value must be zero for arguments
|
must be put in registers. The value must be zero for arguments
|
that are passed entirely in registers or that are entirely pushed
|
that are passed entirely in registers or that are entirely pushed
|
on the stack. */
|
on the stack. */
|
static int
|
static int
|
mt_arg_partial_bytes (CUMULATIVE_ARGS * pcum,
|
mt_arg_partial_bytes (CUMULATIVE_ARGS * pcum,
|
enum machine_mode mode,
|
enum machine_mode mode,
|
tree type,
|
tree type,
|
bool named ATTRIBUTE_UNUSED)
|
bool named ATTRIBUTE_UNUSED)
|
{
|
{
|
int cum = * pcum;
|
int cum = * pcum;
|
int words;
|
int words;
|
|
|
if (mode == BLKmode)
|
if (mode == BLKmode)
|
words = ((int_size_in_bytes (type) + UNITS_PER_WORD - 1)
|
words = ((int_size_in_bytes (type) + UNITS_PER_WORD - 1)
|
/ UNITS_PER_WORD);
|
/ UNITS_PER_WORD);
|
else
|
else
|
words = (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
|
words = (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
|
|
|
if (! targetm.calls.pass_by_reference (&cum, mode, type, named)
|
if (! targetm.calls.pass_by_reference (&cum, mode, type, named)
|
&& cum < MT_NUM_ARG_REGS
|
&& cum < MT_NUM_ARG_REGS
|
&& (cum + words) > MT_NUM_ARG_REGS)
|
&& (cum + words) > MT_NUM_ARG_REGS)
|
{
|
{
|
int bytes = (MT_NUM_ARG_REGS - cum) * UNITS_PER_WORD;
|
int bytes = (MT_NUM_ARG_REGS - cum) * UNITS_PER_WORD;
|
|
|
if (TARGET_DEBUG)
|
if (TARGET_DEBUG)
|
fprintf (stderr, "function_arg_partial_nregs = %d\n", bytes);
|
fprintf (stderr, "function_arg_partial_nregs = %d\n", bytes);
|
return bytes;
|
return bytes;
|
}
|
}
|
|
|
return 0;
|
return 0;
|
}
|
}
|
|
|
|
|
/* Implement TARGET_PASS_BY_REFERENCE hook. */
|
/* Implement TARGET_PASS_BY_REFERENCE hook. */
|
static bool
|
static bool
|
mt_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
|
mt_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
|
enum machine_mode mode ATTRIBUTE_UNUSED,
|
enum machine_mode mode ATTRIBUTE_UNUSED,
|
tree type,
|
tree type,
|
bool named ATTRIBUTE_UNUSED)
|
bool named ATTRIBUTE_UNUSED)
|
{
|
{
|
return (type && int_size_in_bytes (type) > 4 * UNITS_PER_WORD);
|
return (type && int_size_in_bytes (type) > 4 * UNITS_PER_WORD);
|
}
|
}
|
|
|
/* Implement FUNCTION_ARG_BOUNDARY. */
|
/* Implement FUNCTION_ARG_BOUNDARY. */
|
int
|
int
|
mt_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
|
mt_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
|
tree type ATTRIBUTE_UNUSED)
|
tree type ATTRIBUTE_UNUSED)
|
{
|
{
|
return BITS_PER_WORD;
|
return BITS_PER_WORD;
|
}
|
}
|
|
|
/* Implement REG_OK_FOR_BASE_P. */
|
/* Implement REG_OK_FOR_BASE_P. */
|
int
|
int
|
mt_reg_ok_for_base_p (rtx x, int strict)
|
mt_reg_ok_for_base_p (rtx x, int strict)
|
{
|
{
|
if (strict)
|
if (strict)
|
return (((unsigned) REGNO (x)) < FIRST_PSEUDO_REGISTER);
|
return (((unsigned) REGNO (x)) < FIRST_PSEUDO_REGISTER);
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Helper function of mt_legitimate_address_p. Return true if XINSN
|
/* Helper function of mt_legitimate_address_p. Return true if XINSN
|
is a simple address, otherwise false. */
|
is a simple address, otherwise false. */
|
static bool
|
static bool
|
mt_legitimate_simple_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
|
mt_legitimate_simple_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
|
rtx xinsn, int strict)
|
rtx xinsn, int strict)
|
{
|
{
|
if (TARGET_DEBUG)
|
if (TARGET_DEBUG)
|
{
|
{
|
fprintf (stderr, "\n========== GO_IF_LEGITIMATE_ADDRESS, %sstrict\n",
|
fprintf (stderr, "\n========== GO_IF_LEGITIMATE_ADDRESS, %sstrict\n",
|
strict ? "" : "not ");
|
strict ? "" : "not ");
|
debug_rtx (xinsn);
|
debug_rtx (xinsn);
|
}
|
}
|
|
|
if (GET_CODE (xinsn) == REG && mt_reg_ok_for_base_p (xinsn, strict))
|
if (GET_CODE (xinsn) == REG && mt_reg_ok_for_base_p (xinsn, strict))
|
return true;
|
return true;
|
|
|
if (GET_CODE (xinsn) == PLUS
|
if (GET_CODE (xinsn) == PLUS
|
&& GET_CODE (XEXP (xinsn, 0)) == REG
|
&& GET_CODE (XEXP (xinsn, 0)) == REG
|
&& mt_reg_ok_for_base_p (XEXP (xinsn, 0), strict)
|
&& mt_reg_ok_for_base_p (XEXP (xinsn, 0), strict)
|
&& GET_CODE (XEXP (xinsn, 1)) == CONST_INT
|
&& GET_CODE (XEXP (xinsn, 1)) == CONST_INT
|
&& SMALL_INT (XEXP (xinsn, 1)))
|
&& SMALL_INT (XEXP (xinsn, 1)))
|
return true;
|
return true;
|
|
|
return false;
|
return false;
|
}
|
}
|
|
|
|
|
/* Helper function of GO_IF_LEGITIMATE_ADDRESS. Return nonzero if
|
/* Helper function of GO_IF_LEGITIMATE_ADDRESS. Return nonzero if
|
XINSN is a legitimate address on MT. */
|
XINSN is a legitimate address on MT. */
|
int
|
int
|
mt_legitimate_address_p (enum machine_mode mode, rtx xinsn, int strict)
|
mt_legitimate_address_p (enum machine_mode mode, rtx xinsn, int strict)
|
{
|
{
|
if (mt_legitimate_simple_address_p (mode, xinsn, strict))
|
if (mt_legitimate_simple_address_p (mode, xinsn, strict))
|
return 1;
|
return 1;
|
|
|
if ((mode) == SImode
|
if ((mode) == SImode
|
&& GET_CODE (xinsn) == AND
|
&& GET_CODE (xinsn) == AND
|
&& GET_CODE (XEXP (xinsn, 1)) == CONST_INT
|
&& GET_CODE (XEXP (xinsn, 1)) == CONST_INT
|
&& INTVAL (XEXP (xinsn, 1)) == -3)
|
&& INTVAL (XEXP (xinsn, 1)) == -3)
|
return mt_legitimate_simple_address_p (mode, XEXP (xinsn, 0), strict);
|
return mt_legitimate_simple_address_p (mode, XEXP (xinsn, 0), strict);
|
else
|
else
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* Return truth value of whether OP can be used as an operands where a
|
/* Return truth value of whether OP can be used as an operands where a
|
register or 16 bit unsigned integer is needed. */
|
register or 16 bit unsigned integer is needed. */
|
|
|
int
|
int
|
uns_arith_operand (rtx op, enum machine_mode mode)
|
uns_arith_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
if (GET_CODE (op) == CONST_INT && SMALL_INT_UNSIGNED (op))
|
if (GET_CODE (op) == CONST_INT && SMALL_INT_UNSIGNED (op))
|
return 1;
|
return 1;
|
|
|
return register_operand (op, mode);
|
return register_operand (op, mode);
|
}
|
}
|
|
|
/* Return truth value of whether OP can be used as an operands where a
|
/* Return truth value of whether OP can be used as an operands where a
|
16 bit integer is needed. */
|
16 bit integer is needed. */
|
|
|
int
|
int
|
arith_operand (rtx op, enum machine_mode mode)
|
arith_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
if (GET_CODE (op) == CONST_INT && SMALL_INT (op))
|
if (GET_CODE (op) == CONST_INT && SMALL_INT (op))
|
return 1;
|
return 1;
|
|
|
return register_operand (op, mode);
|
return register_operand (op, mode);
|
}
|
}
|
|
|
/* Return truth value of whether OP is a register or the constant 0. */
|
/* Return truth value of whether OP is a register or the constant 0. */
|
|
|
int
|
int
|
reg_or_0_operand (rtx op, enum machine_mode mode)
|
reg_or_0_operand (rtx op, enum machine_mode mode)
|
{
|
{
|
switch (GET_CODE (op))
|
switch (GET_CODE (op))
|
{
|
{
|
case CONST_INT:
|
case CONST_INT:
|
return INTVAL (op) == 0;
|
return INTVAL (op) == 0;
|
|
|
case REG:
|
case REG:
|
case SUBREG:
|
case SUBREG:
|
return register_operand (op, mode);
|
return register_operand (op, mode);
|
|
|
default:
|
default:
|
break;
|
break;
|
}
|
}
|
|
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* Return truth value of whether OP is a constant that requires two
|
/* Return truth value of whether OP is a constant that requires two
|
loads to put in a register. */
|
loads to put in a register. */
|
|
|
int
|
int
|
big_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
big_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
{
|
{
|
if (GET_CODE (op) == CONST_INT && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
|
if (GET_CODE (op) == CONST_INT && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
|
return 1;
|
return 1;
|
|
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* Return truth value of whether OP is a constant that require only
|
/* Return truth value of whether OP is a constant that require only
|
one load to put in a register. */
|
one load to put in a register. */
|
|
|
int
|
int
|
single_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
single_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
|
{
|
{
|
if (big_const_operand (op, mode)
|
if (big_const_operand (op, mode)
|
|| GET_CODE (op) == CONST
|
|| GET_CODE (op) == CONST
|
|| GET_CODE (op) == LABEL_REF
|
|| GET_CODE (op) == LABEL_REF
|
|| GET_CODE (op) == SYMBOL_REF)
|
|| GET_CODE (op) == SYMBOL_REF)
|
return 0;
|
return 0;
|
|
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* True if the current function is an interrupt handler
|
/* True if the current function is an interrupt handler
|
(either via #pragma or an attribute specification). */
|
(either via #pragma or an attribute specification). */
|
int interrupt_handler;
|
int interrupt_handler;
|
enum processor_type mt_cpu;
|
enum processor_type mt_cpu;
|
|
|
static struct machine_function *
|
static struct machine_function *
|
mt_init_machine_status (void)
|
mt_init_machine_status (void)
|
{
|
{
|
struct machine_function *f;
|
struct machine_function *f;
|
|
|
f = ggc_alloc_cleared (sizeof (struct machine_function));
|
f = ggc_alloc_cleared (sizeof (struct machine_function));
|
|
|
return f;
|
return f;
|
}
|
}
|
|
|
/* Implement OVERRIDE_OPTIONS. */
|
/* Implement OVERRIDE_OPTIONS. */
|
void
|
void
|
mt_override_options (void)
|
mt_override_options (void)
|
{
|
{
|
if (mt_cpu_string != NULL)
|
if (mt_cpu_string != NULL)
|
{
|
{
|
if (!strcmp (mt_cpu_string, "ms1-64-001"))
|
if (!strcmp (mt_cpu_string, "ms1-64-001"))
|
mt_cpu = PROCESSOR_MS1_64_001;
|
mt_cpu = PROCESSOR_MS1_64_001;
|
else if (!strcmp (mt_cpu_string, "ms1-16-002"))
|
else if (!strcmp (mt_cpu_string, "ms1-16-002"))
|
mt_cpu = PROCESSOR_MS1_16_002;
|
mt_cpu = PROCESSOR_MS1_16_002;
|
else if (!strcmp (mt_cpu_string, "ms1-16-003"))
|
else if (!strcmp (mt_cpu_string, "ms1-16-003"))
|
mt_cpu = PROCESSOR_MS1_16_003;
|
mt_cpu = PROCESSOR_MS1_16_003;
|
else if (!strcmp (mt_cpu_string, "ms2"))
|
else if (!strcmp (mt_cpu_string, "ms2"))
|
mt_cpu = PROCESSOR_MS2;
|
mt_cpu = PROCESSOR_MS2;
|
else
|
else
|
error ("bad value (%s) for -march= switch", mt_cpu_string);
|
error ("bad value (%s) for -march= switch", mt_cpu_string);
|
}
|
}
|
else
|
else
|
mt_cpu = PROCESSOR_MS1_16_002;
|
mt_cpu = PROCESSOR_MS1_16_002;
|
|
|
if (flag_exceptions)
|
if (flag_exceptions)
|
{
|
{
|
flag_omit_frame_pointer = 0;
|
flag_omit_frame_pointer = 0;
|
flag_gcse = 0;
|
flag_gcse = 0;
|
}
|
}
|
|
|
/* We do delayed branch filling in machine dependent reorg */
|
/* We do delayed branch filling in machine dependent reorg */
|
mt_flag_delayed_branch = flag_delayed_branch;
|
mt_flag_delayed_branch = flag_delayed_branch;
|
flag_delayed_branch = 0;
|
flag_delayed_branch = 0;
|
|
|
init_machine_status = mt_init_machine_status;
|
init_machine_status = mt_init_machine_status;
|
}
|
}
|
|
|
/* Do what is necessary for `va_start'. We look at the current function
|
/* Do what is necessary for `va_start'. We look at the current function
|
to determine if stdarg or varargs is used and return the address of the
|
to determine if stdarg or varargs is used and return the address of the
|
first unnamed parameter. */
|
first unnamed parameter. */
|
|
|
static void
|
static void
|
mt_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
|
mt_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
|
enum machine_mode mode ATTRIBUTE_UNUSED,
|
enum machine_mode mode ATTRIBUTE_UNUSED,
|
tree type ATTRIBUTE_UNUSED,
|
tree type ATTRIBUTE_UNUSED,
|
int *pretend_size, int no_rtl)
|
int *pretend_size, int no_rtl)
|
{
|
{
|
int regno;
|
int regno;
|
int regs = MT_NUM_ARG_REGS - *cum;
|
int regs = MT_NUM_ARG_REGS - *cum;
|
|
|
*pretend_size = regs < 0 ? 0 : GET_MODE_SIZE (SImode) * regs;
|
*pretend_size = regs < 0 ? 0 : GET_MODE_SIZE (SImode) * regs;
|
|
|
if (no_rtl)
|
if (no_rtl)
|
return;
|
return;
|
|
|
for (regno = *cum; regno < MT_NUM_ARG_REGS; regno++)
|
for (regno = *cum; regno < MT_NUM_ARG_REGS; regno++)
|
{
|
{
|
rtx reg = gen_rtx_REG (SImode, FIRST_ARG_REGNUM + regno);
|
rtx reg = gen_rtx_REG (SImode, FIRST_ARG_REGNUM + regno);
|
rtx slot = gen_rtx_PLUS (Pmode,
|
rtx slot = gen_rtx_PLUS (Pmode,
|
gen_rtx_REG (SImode, ARG_POINTER_REGNUM),
|
gen_rtx_REG (SImode, ARG_POINTER_REGNUM),
|
GEN_INT (UNITS_PER_WORD * regno));
|
GEN_INT (UNITS_PER_WORD * regno));
|
|
|
emit_move_insn (gen_rtx_MEM (SImode, slot), reg);
|
emit_move_insn (gen_rtx_MEM (SImode, slot), reg);
|
}
|
}
|
}
|
}
|
|
|
/* Returns the number of bytes offset between the frame pointer and the stack
|
/* Returns the number of bytes offset between the frame pointer and the stack
|
pointer for the current function. SIZE is the number of bytes of space
|
pointer for the current function. SIZE is the number of bytes of space
|
needed for local variables. */
|
needed for local variables. */
|
|
|
unsigned int
|
unsigned int
|
mt_compute_frame_size (int size)
|
mt_compute_frame_size (int size)
|
{
|
{
|
int regno;
|
int regno;
|
unsigned int total_size;
|
unsigned int total_size;
|
unsigned int var_size;
|
unsigned int var_size;
|
unsigned int args_size;
|
unsigned int args_size;
|
unsigned int pretend_size;
|
unsigned int pretend_size;
|
unsigned int extra_size;
|
unsigned int extra_size;
|
unsigned int reg_size;
|
unsigned int reg_size;
|
unsigned int frame_size;
|
unsigned int frame_size;
|
unsigned int reg_mask;
|
unsigned int reg_mask;
|
|
|
var_size = size;
|
var_size = size;
|
args_size = current_function_outgoing_args_size;
|
args_size = current_function_outgoing_args_size;
|
pretend_size = current_function_pretend_args_size;
|
pretend_size = current_function_pretend_args_size;
|
extra_size = FIRST_PARM_OFFSET (0);
|
extra_size = FIRST_PARM_OFFSET (0);
|
total_size = extra_size + pretend_size + args_size + var_size;
|
total_size = extra_size + pretend_size + args_size + var_size;
|
reg_size = 0;
|
reg_size = 0;
|
reg_mask = 0;
|
reg_mask = 0;
|
|
|
/* Calculate space needed for registers. */
|
/* Calculate space needed for registers. */
|
for (regno = GPR_R0; regno <= GPR_LAST; regno++)
|
for (regno = GPR_R0; regno <= GPR_LAST; regno++)
|
{
|
{
|
if (MUST_SAVE_REGISTER (regno))
|
if (MUST_SAVE_REGISTER (regno))
|
{
|
{
|
reg_size += UNITS_PER_WORD;
|
reg_size += UNITS_PER_WORD;
|
reg_mask |= 1 << regno;
|
reg_mask |= 1 << regno;
|
}
|
}
|
}
|
}
|
|
|
current_frame_info.save_fp = (regs_ever_live [GPR_FP]
|
current_frame_info.save_fp = (regs_ever_live [GPR_FP]
|
|| frame_pointer_needed
|
|| frame_pointer_needed
|
|| interrupt_handler);
|
|| interrupt_handler);
|
current_frame_info.save_lr = (regs_ever_live [GPR_LINK]
|
current_frame_info.save_lr = (regs_ever_live [GPR_LINK]
|
|| profile_flag
|
|| profile_flag
|
|| interrupt_handler);
|
|| interrupt_handler);
|
|
|
reg_size += (current_frame_info.save_fp + current_frame_info.save_lr)
|
reg_size += (current_frame_info.save_fp + current_frame_info.save_lr)
|
* UNITS_PER_WORD;
|
* UNITS_PER_WORD;
|
total_size += reg_size;
|
total_size += reg_size;
|
total_size = ((total_size + 3) & ~3);
|
total_size = ((total_size + 3) & ~3);
|
|
|
frame_size = total_size;
|
frame_size = total_size;
|
|
|
/* Save computed information. */
|
/* Save computed information. */
|
current_frame_info.pretend_size = pretend_size;
|
current_frame_info.pretend_size = pretend_size;
|
current_frame_info.var_size = var_size;
|
current_frame_info.var_size = var_size;
|
current_frame_info.args_size = args_size;
|
current_frame_info.args_size = args_size;
|
current_frame_info.reg_size = reg_size;
|
current_frame_info.reg_size = reg_size;
|
current_frame_info.frame_size = args_size + var_size;
|
current_frame_info.frame_size = args_size + var_size;
|
current_frame_info.total_size = total_size;
|
current_frame_info.total_size = total_size;
|
current_frame_info.extra_size = extra_size;
|
current_frame_info.extra_size = extra_size;
|
current_frame_info.reg_mask = reg_mask;
|
current_frame_info.reg_mask = reg_mask;
|
current_frame_info.initialized = reload_completed;
|
current_frame_info.initialized = reload_completed;
|
|
|
return total_size;
|
return total_size;
|
}
|
}
|
|
|
/* Emit code to save REG in stack offset pointed to by MEM.
|
/* Emit code to save REG in stack offset pointed to by MEM.
|
STACK_OFFSET is the offset from the SP where the save will happen.
|
STACK_OFFSET is the offset from the SP where the save will happen.
|
This function sets the REG_FRAME_RELATED_EXPR note accordingly. */
|
This function sets the REG_FRAME_RELATED_EXPR note accordingly. */
|
static void
|
static void
|
mt_emit_save_restore (enum save_direction direction,
|
mt_emit_save_restore (enum save_direction direction,
|
rtx reg, rtx mem, int stack_offset)
|
rtx reg, rtx mem, int stack_offset)
|
{
|
{
|
if (direction == FROM_PROCESSOR_TO_MEM)
|
if (direction == FROM_PROCESSOR_TO_MEM)
|
{
|
{
|
rtx insn;
|
rtx insn;
|
|
|
insn = emit_move_insn (mem, reg);
|
insn = emit_move_insn (mem, reg);
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
REG_NOTES (insn)
|
REG_NOTES (insn)
|
= gen_rtx_EXPR_LIST
|
= gen_rtx_EXPR_LIST
|
(REG_FRAME_RELATED_EXPR,
|
(REG_FRAME_RELATED_EXPR,
|
gen_rtx_SET (VOIDmode,
|
gen_rtx_SET (VOIDmode,
|
gen_rtx_MEM (SImode,
|
gen_rtx_MEM (SImode,
|
gen_rtx_PLUS (SImode,
|
gen_rtx_PLUS (SImode,
|
stack_pointer_rtx,
|
stack_pointer_rtx,
|
GEN_INT (stack_offset))),
|
GEN_INT (stack_offset))),
|
reg),
|
reg),
|
REG_NOTES (insn));
|
REG_NOTES (insn));
|
}
|
}
|
else
|
else
|
emit_move_insn (reg, mem);
|
emit_move_insn (reg, mem);
|
}
|
}
|
|
|
|
|
/* Emit code to save the frame pointer in the prologue and restore
|
/* Emit code to save the frame pointer in the prologue and restore
|
frame pointer in epilogue. */
|
frame pointer in epilogue. */
|
|
|
static void
|
static void
|
mt_emit_save_fp (enum save_direction direction,
|
mt_emit_save_fp (enum save_direction direction,
|
struct mt_frame_info info)
|
struct mt_frame_info info)
|
{
|
{
|
rtx base_reg;
|
rtx base_reg;
|
int reg_mask = info.reg_mask & ~(FP_MASK | LINK_MASK);
|
int reg_mask = info.reg_mask & ~(FP_MASK | LINK_MASK);
|
int offset = info.total_size;
|
int offset = info.total_size;
|
int stack_offset = info.total_size;
|
int stack_offset = info.total_size;
|
|
|
/* If there is nothing to save, get out now. */
|
/* If there is nothing to save, get out now. */
|
if (! info.save_fp && ! info.save_lr && ! reg_mask)
|
if (! info.save_fp && ! info.save_lr && ! reg_mask)
|
return;
|
return;
|
|
|
/* If offset doesn't fit in a 15-bit signed integer,
|
/* If offset doesn't fit in a 15-bit signed integer,
|
uses a scratch registers to get a smaller offset. */
|
uses a scratch registers to get a smaller offset. */
|
if (CONST_OK_FOR_LETTER_P(offset, 'O'))
|
if (CONST_OK_FOR_LETTER_P(offset, 'O'))
|
base_reg = stack_pointer_rtx;
|
base_reg = stack_pointer_rtx;
|
else
|
else
|
{
|
{
|
/* Use the scratch register R9 that holds old stack pointer. */
|
/* Use the scratch register R9 that holds old stack pointer. */
|
base_reg = gen_rtx_REG (SImode, GPR_R9);
|
base_reg = gen_rtx_REG (SImode, GPR_R9);
|
offset = 0;
|
offset = 0;
|
}
|
}
|
|
|
if (info.save_fp)
|
if (info.save_fp)
|
{
|
{
|
offset -= UNITS_PER_WORD;
|
offset -= UNITS_PER_WORD;
|
stack_offset -= UNITS_PER_WORD;
|
stack_offset -= UNITS_PER_WORD;
|
mt_emit_save_restore
|
mt_emit_save_restore
|
(direction, gen_rtx_REG (SImode, GPR_FP),
|
(direction, gen_rtx_REG (SImode, GPR_FP),
|
gen_rtx_MEM (SImode,
|
gen_rtx_MEM (SImode,
|
gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
|
gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
|
stack_offset);
|
stack_offset);
|
}
|
}
|
}
|
}
|
|
|
/* Emit code to save registers in the prologue and restore register
|
/* Emit code to save registers in the prologue and restore register
|
in epilogue. */
|
in epilogue. */
|
|
|
static void
|
static void
|
mt_emit_save_regs (enum save_direction direction,
|
mt_emit_save_regs (enum save_direction direction,
|
struct mt_frame_info info)
|
struct mt_frame_info info)
|
{
|
{
|
rtx base_reg;
|
rtx base_reg;
|
int regno;
|
int regno;
|
int reg_mask = info.reg_mask & ~(FP_MASK | LINK_MASK);
|
int reg_mask = info.reg_mask & ~(FP_MASK | LINK_MASK);
|
int offset = info.total_size;
|
int offset = info.total_size;
|
int stack_offset = info.total_size;
|
int stack_offset = info.total_size;
|
|
|
/* If there is nothing to save, get out now. */
|
/* If there is nothing to save, get out now. */
|
if (! info.save_fp && ! info.save_lr && ! reg_mask)
|
if (! info.save_fp && ! info.save_lr && ! reg_mask)
|
return;
|
return;
|
|
|
/* If offset doesn't fit in a 15-bit signed integer,
|
/* If offset doesn't fit in a 15-bit signed integer,
|
uses a scratch registers to get a smaller offset. */
|
uses a scratch registers to get a smaller offset. */
|
if (CONST_OK_FOR_LETTER_P(offset, 'O'))
|
if (CONST_OK_FOR_LETTER_P(offset, 'O'))
|
base_reg = stack_pointer_rtx;
|
base_reg = stack_pointer_rtx;
|
else
|
else
|
{
|
{
|
/* Use the scratch register R9 that holds old stack pointer. */
|
/* Use the scratch register R9 that holds old stack pointer. */
|
base_reg = gen_rtx_REG (SImode, GPR_R9);
|
base_reg = gen_rtx_REG (SImode, GPR_R9);
|
offset = 0;
|
offset = 0;
|
}
|
}
|
|
|
if (info.save_fp)
|
if (info.save_fp)
|
{
|
{
|
/* This just records the space for it, the actual move generated in
|
/* This just records the space for it, the actual move generated in
|
mt_emit_save_fp (). */
|
mt_emit_save_fp (). */
|
offset -= UNITS_PER_WORD;
|
offset -= UNITS_PER_WORD;
|
stack_offset -= UNITS_PER_WORD;
|
stack_offset -= UNITS_PER_WORD;
|
}
|
}
|
|
|
if (info.save_lr)
|
if (info.save_lr)
|
{
|
{
|
offset -= UNITS_PER_WORD;
|
offset -= UNITS_PER_WORD;
|
stack_offset -= UNITS_PER_WORD;
|
stack_offset -= UNITS_PER_WORD;
|
mt_emit_save_restore
|
mt_emit_save_restore
|
(direction, gen_rtx_REG (SImode, GPR_LINK),
|
(direction, gen_rtx_REG (SImode, GPR_LINK),
|
gen_rtx_MEM (SImode,
|
gen_rtx_MEM (SImode,
|
gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
|
gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
|
stack_offset);
|
stack_offset);
|
}
|
}
|
|
|
/* Save any needed call-saved regs. */
|
/* Save any needed call-saved regs. */
|
for (regno = GPR_R0; regno <= GPR_LAST; regno++)
|
for (regno = GPR_R0; regno <= GPR_LAST; regno++)
|
{
|
{
|
if ((reg_mask & (1 << regno)) != 0)
|
if ((reg_mask & (1 << regno)) != 0)
|
{
|
{
|
offset -= UNITS_PER_WORD;
|
offset -= UNITS_PER_WORD;
|
stack_offset -= UNITS_PER_WORD;
|
stack_offset -= UNITS_PER_WORD;
|
mt_emit_save_restore
|
mt_emit_save_restore
|
(direction, gen_rtx_REG (SImode, regno),
|
(direction, gen_rtx_REG (SImode, regno),
|
gen_rtx_MEM (SImode,
|
gen_rtx_MEM (SImode,
|
gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
|
gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
|
stack_offset);
|
stack_offset);
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
/* Return true if FUNC is a function with the 'interrupt' attribute. */
|
/* Return true if FUNC is a function with the 'interrupt' attribute. */
|
static bool
|
static bool
|
mt_interrupt_function_p (tree func)
|
mt_interrupt_function_p (tree func)
|
{
|
{
|
tree a;
|
tree a;
|
|
|
if (TREE_CODE (func) != FUNCTION_DECL)
|
if (TREE_CODE (func) != FUNCTION_DECL)
|
return false;
|
return false;
|
|
|
a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
|
a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
|
return a != NULL_TREE;
|
return a != NULL_TREE;
|
}
|
}
|
|
|
/* Generate prologue code. */
|
/* Generate prologue code. */
|
void
|
void
|
mt_expand_prologue (void)
|
mt_expand_prologue (void)
|
{
|
{
|
rtx size_rtx, insn;
|
rtx size_rtx, insn;
|
unsigned int frame_size;
|
unsigned int frame_size;
|
|
|
if (mt_interrupt_function_p (current_function_decl))
|
if (mt_interrupt_function_p (current_function_decl))
|
{
|
{
|
interrupt_handler = 1;
|
interrupt_handler = 1;
|
if (cfun->machine)
|
if (cfun->machine)
|
cfun->machine->interrupt_handler = 1;
|
cfun->machine->interrupt_handler = 1;
|
}
|
}
|
|
|
mt_compute_frame_size (get_frame_size ());
|
mt_compute_frame_size (get_frame_size ());
|
|
|
if (TARGET_DEBUG_STACK)
|
if (TARGET_DEBUG_STACK)
|
mt_debug_stack (¤t_frame_info);
|
mt_debug_stack (¤t_frame_info);
|
|
|
/* Compute size of stack adjustment. */
|
/* Compute size of stack adjustment. */
|
frame_size = current_frame_info.total_size;
|
frame_size = current_frame_info.total_size;
|
|
|
/* If offset doesn't fit in a 15-bit signed integer,
|
/* If offset doesn't fit in a 15-bit signed integer,
|
uses a scratch registers to get a smaller offset. */
|
uses a scratch registers to get a smaller offset. */
|
if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
|
if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
|
size_rtx = GEN_INT (frame_size);
|
size_rtx = GEN_INT (frame_size);
|
else
|
else
|
{
|
{
|
/* We do not have any scratch registers. */
|
/* We do not have any scratch registers. */
|
gcc_assert (!interrupt_handler);
|
gcc_assert (!interrupt_handler);
|
|
|
size_rtx = gen_rtx_REG (SImode, GPR_R9);
|
size_rtx = gen_rtx_REG (SImode, GPR_R9);
|
insn = emit_move_insn (size_rtx, GEN_INT (frame_size & 0xffff0000));
|
insn = emit_move_insn (size_rtx, GEN_INT (frame_size & 0xffff0000));
|
insn = emit_insn (gen_iorsi3 (size_rtx, size_rtx,
|
insn = emit_insn (gen_iorsi3 (size_rtx, size_rtx,
|
GEN_INT (frame_size & 0x0000ffff)));
|
GEN_INT (frame_size & 0x0000ffff)));
|
}
|
}
|
|
|
/* Allocate stack for this frame. */
|
/* Allocate stack for this frame. */
|
/* Make stack adjustment and use scratch register if constant too
|
/* Make stack adjustment and use scratch register if constant too
|
large to fit as immediate. */
|
large to fit as immediate. */
|
if (frame_size)
|
if (frame_size)
|
{
|
{
|
insn = emit_insn (gen_subsi3 (stack_pointer_rtx,
|
insn = emit_insn (gen_subsi3 (stack_pointer_rtx,
|
stack_pointer_rtx,
|
stack_pointer_rtx,
|
size_rtx));
|
size_rtx));
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
REG_NOTES (insn)
|
REG_NOTES (insn)
|
= gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
|
= gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
|
gen_rtx_SET (VOIDmode,
|
gen_rtx_SET (VOIDmode,
|
stack_pointer_rtx,
|
stack_pointer_rtx,
|
gen_rtx_MINUS (SImode,
|
gen_rtx_MINUS (SImode,
|
stack_pointer_rtx,
|
stack_pointer_rtx,
|
GEN_INT (frame_size))),
|
GEN_INT (frame_size))),
|
REG_NOTES (insn));
|
REG_NOTES (insn));
|
}
|
}
|
|
|
/* Set R9 to point to old sp if required for access to register save
|
/* Set R9 to point to old sp if required for access to register save
|
area. */
|
area. */
|
if ( current_frame_info.reg_size != 0
|
if ( current_frame_info.reg_size != 0
|
&& !CONST_OK_FOR_LETTER_P (frame_size, 'O'))
|
&& !CONST_OK_FOR_LETTER_P (frame_size, 'O'))
|
emit_insn (gen_addsi3 (size_rtx, size_rtx, stack_pointer_rtx));
|
emit_insn (gen_addsi3 (size_rtx, size_rtx, stack_pointer_rtx));
|
|
|
/* Save the frame pointer. */
|
/* Save the frame pointer. */
|
mt_emit_save_fp (FROM_PROCESSOR_TO_MEM, current_frame_info);
|
mt_emit_save_fp (FROM_PROCESSOR_TO_MEM, current_frame_info);
|
|
|
/* Now put the frame pointer into the frame pointer register. */
|
/* Now put the frame pointer into the frame pointer register. */
|
if (frame_pointer_needed)
|
if (frame_pointer_needed)
|
{
|
{
|
insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
|
insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
|
RTX_FRAME_RELATED_P (insn) = 1;
|
RTX_FRAME_RELATED_P (insn) = 1;
|
}
|
}
|
|
|
/* Save the registers. */
|
/* Save the registers. */
|
mt_emit_save_regs (FROM_PROCESSOR_TO_MEM, current_frame_info);
|
mt_emit_save_regs (FROM_PROCESSOR_TO_MEM, current_frame_info);
|
|
|
/* If we are profiling, make sure no instructions are scheduled before
|
/* If we are profiling, make sure no instructions are scheduled before
|
the call to mcount. */
|
the call to mcount. */
|
if (profile_flag)
|
if (profile_flag)
|
emit_insn (gen_blockage ());
|
emit_insn (gen_blockage ());
|
}
|
}
|
|
|
/* Implement EPILOGUE_USES. */
|
/* Implement EPILOGUE_USES. */
|
int
|
int
|
mt_epilogue_uses (int regno)
|
mt_epilogue_uses (int regno)
|
{
|
{
|
if (cfun->machine && cfun->machine->interrupt_handler && reload_completed)
|
if (cfun->machine && cfun->machine->interrupt_handler && reload_completed)
|
return 1;
|
return 1;
|
return regno == GPR_LINK;
|
return regno == GPR_LINK;
|
}
|
}
|
|
|
/* Generate epilogue. EH_MODE is NORMAL_EPILOGUE when generating a
|
/* Generate epilogue. EH_MODE is NORMAL_EPILOGUE when generating a
|
function epilogue, or EH_EPILOGUE when generating an EH
|
function epilogue, or EH_EPILOGUE when generating an EH
|
epilogue. */
|
epilogue. */
|
void
|
void
|
mt_expand_epilogue (enum epilogue_type eh_mode)
|
mt_expand_epilogue (enum epilogue_type eh_mode)
|
{
|
{
|
rtx size_rtx, insn;
|
rtx size_rtx, insn;
|
unsigned frame_size;
|
unsigned frame_size;
|
|
|
mt_compute_frame_size (get_frame_size ());
|
mt_compute_frame_size (get_frame_size ());
|
|
|
if (TARGET_DEBUG_STACK)
|
if (TARGET_DEBUG_STACK)
|
mt_debug_stack (& current_frame_info);
|
mt_debug_stack (& current_frame_info);
|
|
|
/* Compute size of stack adjustment. */
|
/* Compute size of stack adjustment. */
|
frame_size = current_frame_info.total_size;
|
frame_size = current_frame_info.total_size;
|
|
|
/* If offset doesn't fit in a 15-bit signed integer,
|
/* If offset doesn't fit in a 15-bit signed integer,
|
uses a scratch registers to get a smaller offset. */
|
uses a scratch registers to get a smaller offset. */
|
if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
|
if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
|
size_rtx = GEN_INT (frame_size);
|
size_rtx = GEN_INT (frame_size);
|
else
|
else
|
{
|
{
|
/* We do not have any scratch registers. */
|
/* We do not have any scratch registers. */
|
gcc_assert (!interrupt_handler);
|
gcc_assert (!interrupt_handler);
|
|
|
size_rtx = gen_rtx_REG (SImode, GPR_R9);
|
size_rtx = gen_rtx_REG (SImode, GPR_R9);
|
insn = emit_move_insn (size_rtx, GEN_INT (frame_size & 0xffff0000));
|
insn = emit_move_insn (size_rtx, GEN_INT (frame_size & 0xffff0000));
|
insn = emit_insn (gen_iorsi3 (size_rtx, size_rtx,
|
insn = emit_insn (gen_iorsi3 (size_rtx, size_rtx,
|
GEN_INT (frame_size & 0x0000ffff)));
|
GEN_INT (frame_size & 0x0000ffff)));
|
/* Set R9 to point to old sp if required for access to register
|
/* Set R9 to point to old sp if required for access to register
|
save area. */
|
save area. */
|
emit_insn (gen_addsi3 (size_rtx, size_rtx, stack_pointer_rtx));
|
emit_insn (gen_addsi3 (size_rtx, size_rtx, stack_pointer_rtx));
|
}
|
}
|
|
|
/* Restore sp if there was some possible change to it. */
|
/* Restore sp if there was some possible change to it. */
|
if (frame_pointer_needed)
|
if (frame_pointer_needed)
|
insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
|
insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
|
|
|
/* Restore the registers. */
|
/* Restore the registers. */
|
mt_emit_save_fp (FROM_MEM_TO_PROCESSOR, current_frame_info);
|
mt_emit_save_fp (FROM_MEM_TO_PROCESSOR, current_frame_info);
|
mt_emit_save_regs (FROM_MEM_TO_PROCESSOR, current_frame_info);
|
mt_emit_save_regs (FROM_MEM_TO_PROCESSOR, current_frame_info);
|
|
|
/* Make stack adjustment and use scratch register if constant too
|
/* Make stack adjustment and use scratch register if constant too
|
large to fit as immediate. */
|
large to fit as immediate. */
|
if (frame_size)
|
if (frame_size)
|
{
|
{
|
if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
|
if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
|
/* Can handle this with simple add. */
|
/* Can handle this with simple add. */
|
insn = emit_insn (gen_addsi3 (stack_pointer_rtx,
|
insn = emit_insn (gen_addsi3 (stack_pointer_rtx,
|
stack_pointer_rtx,
|
stack_pointer_rtx,
|
size_rtx));
|
size_rtx));
|
else
|
else
|
/* Scratch reg R9 has the old sp value. */
|
/* Scratch reg R9 has the old sp value. */
|
insn = emit_move_insn (stack_pointer_rtx,
|
insn = emit_move_insn (stack_pointer_rtx,
|
gen_rtx_REG (SImode, GPR_R9));
|
gen_rtx_REG (SImode, GPR_R9));
|
|
|
REG_NOTES (insn)
|
REG_NOTES (insn)
|
= gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
|
= gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
|
gen_rtx_SET (VOIDmode,
|
gen_rtx_SET (VOIDmode,
|
stack_pointer_rtx,
|
stack_pointer_rtx,
|
gen_rtx_PLUS (SImode,
|
gen_rtx_PLUS (SImode,
|
stack_pointer_rtx,
|
stack_pointer_rtx,
|
GEN_INT (frame_size))),
|
GEN_INT (frame_size))),
|
REG_NOTES (insn));
|
REG_NOTES (insn));
|
}
|
}
|
|
|
if (cfun->machine && cfun->machine->eh_stack_adjust != NULL_RTX)
|
if (cfun->machine && cfun->machine->eh_stack_adjust != NULL_RTX)
|
/* Perform the additional bump for __throw. */
|
/* Perform the additional bump for __throw. */
|
emit_insn (gen_addsi3 (stack_pointer_rtx,
|
emit_insn (gen_addsi3 (stack_pointer_rtx,
|
stack_pointer_rtx,
|
stack_pointer_rtx,
|
cfun->machine->eh_stack_adjust));
|
cfun->machine->eh_stack_adjust));
|
|
|
/* Generate the appropriate return. */
|
/* Generate the appropriate return. */
|
if (eh_mode == EH_EPILOGUE)
|
if (eh_mode == EH_EPILOGUE)
|
{
|
{
|
emit_jump_insn (gen_eh_return_internal ());
|
emit_jump_insn (gen_eh_return_internal ());
|
emit_barrier ();
|
emit_barrier ();
|
}
|
}
|
else if (interrupt_handler)
|
else if (interrupt_handler)
|
emit_jump_insn (gen_return_interrupt_internal ());
|
emit_jump_insn (gen_return_interrupt_internal ());
|
else
|
else
|
emit_jump_insn (gen_return_internal ());
|
emit_jump_insn (gen_return_internal ());
|
|
|
/* Reset state info for each function. */
|
/* Reset state info for each function. */
|
interrupt_handler = 0;
|
interrupt_handler = 0;
|
current_frame_info = zero_frame_info;
|
current_frame_info = zero_frame_info;
|
if (cfun->machine)
|
if (cfun->machine)
|
cfun->machine->eh_stack_adjust = NULL_RTX;
|
cfun->machine->eh_stack_adjust = NULL_RTX;
|
}
|
}
|
|
|
|
|
/* Generate code for the "eh_return" pattern. */
|
/* Generate code for the "eh_return" pattern. */
|
void
|
void
|
mt_expand_eh_return (rtx * operands)
|
mt_expand_eh_return (rtx * operands)
|
{
|
{
|
if (GET_CODE (operands[0]) != REG
|
if (GET_CODE (operands[0]) != REG
|
|| REGNO (operands[0]) != EH_RETURN_STACKADJ_REGNO)
|
|| REGNO (operands[0]) != EH_RETURN_STACKADJ_REGNO)
|
{
|
{
|
rtx sp = EH_RETURN_STACKADJ_RTX;
|
rtx sp = EH_RETURN_STACKADJ_RTX;
|
|
|
emit_move_insn (sp, operands[0]);
|
emit_move_insn (sp, operands[0]);
|
operands[0] = sp;
|
operands[0] = sp;
|
}
|
}
|
|
|
emit_insn (gen_eh_epilogue (operands[0]));
|
emit_insn (gen_eh_epilogue (operands[0]));
|
}
|
}
|
|
|
/* Generate code for the "eh_epilogue" pattern. */
|
/* Generate code for the "eh_epilogue" pattern. */
|
void
|
void
|
mt_emit_eh_epilogue (rtx * operands ATTRIBUTE_UNUSED)
|
mt_emit_eh_epilogue (rtx * operands ATTRIBUTE_UNUSED)
|
{
|
{
|
cfun->machine->eh_stack_adjust = EH_RETURN_STACKADJ_RTX; /* operands[0]; */
|
cfun->machine->eh_stack_adjust = EH_RETURN_STACKADJ_RTX; /* operands[0]; */
|
mt_expand_epilogue (EH_EPILOGUE);
|
mt_expand_epilogue (EH_EPILOGUE);
|
}
|
}
|
|
|
/* Handle an "interrupt" attribute. */
|
/* Handle an "interrupt" attribute. */
|
static tree
|
static tree
|
mt_handle_interrupt_attribute (tree * node,
|
mt_handle_interrupt_attribute (tree * node,
|
tree name,
|
tree name,
|
tree args ATTRIBUTE_UNUSED,
|
tree args ATTRIBUTE_UNUSED,
|
int flags ATTRIBUTE_UNUSED,
|
int flags ATTRIBUTE_UNUSED,
|
bool * no_add_attrs)
|
bool * no_add_attrs)
|
{
|
{
|
if (TREE_CODE (*node) != FUNCTION_DECL)
|
if (TREE_CODE (*node) != FUNCTION_DECL)
|
{
|
{
|
warning (OPT_Wattributes,
|
warning (OPT_Wattributes,
|
"%qs attribute only applies to functions",
|
"%qs attribute only applies to functions",
|
IDENTIFIER_POINTER (name));
|
IDENTIFIER_POINTER (name));
|
*no_add_attrs = true;
|
*no_add_attrs = true;
|
}
|
}
|
|
|
return NULL_TREE;
|
return NULL_TREE;
|
}
|
}
|
|
|
/* Table of machine attributes. */
|
/* Table of machine attributes. */
|
const struct attribute_spec mt_attribute_table[] =
|
const struct attribute_spec mt_attribute_table[] =
|
{
|
{
|
/* name, min, max, decl?, type?, func?, handler */
|
/* name, min, max, decl?, type?, func?, handler */
|
{ "interrupt", 0, 0, false, false, false, mt_handle_interrupt_attribute },
|
{ "interrupt", 0, 0, false, false, false, mt_handle_interrupt_attribute },
|
{ NULL, 0, 0, false, false, false, NULL }
|
{ NULL, 0, 0, false, false, false, NULL }
|
};
|
};
|
|
|
/* Implement INITIAL_ELIMINATION_OFFSET. */
|
/* Implement INITIAL_ELIMINATION_OFFSET. */
|
int
|
int
|
mt_initial_elimination_offset (int from, int to)
|
mt_initial_elimination_offset (int from, int to)
|
{
|
{
|
mt_compute_frame_size (get_frame_size ());
|
mt_compute_frame_size (get_frame_size ());
|
|
|
if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
|
if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
|
return 0;
|
return 0;
|
|
|
else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
|
else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
|
return current_frame_info.total_size;
|
return current_frame_info.total_size;
|
|
|
else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
|
else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
|
return current_frame_info.total_size;
|
return current_frame_info.total_size;
|
|
|
else
|
else
|
gcc_unreachable ();
|
gcc_unreachable ();
|
}
|
}
|
|
|
/* Generate a compare for CODE. Return a brand-new rtx that
|
/* Generate a compare for CODE. Return a brand-new rtx that
|
represents the result of the compare. */
|
represents the result of the compare. */
|
|
|
static rtx
|
static rtx
|
mt_generate_compare (enum rtx_code code, rtx op0, rtx op1)
|
mt_generate_compare (enum rtx_code code, rtx op0, rtx op1)
|
{
|
{
|
rtx scratch0, scratch1, const_scratch;
|
rtx scratch0, scratch1, const_scratch;
|
|
|
switch (code)
|
switch (code)
|
{
|
{
|
case GTU:
|
case GTU:
|
case LTU:
|
case LTU:
|
case GEU:
|
case GEU:
|
case LEU:
|
case LEU:
|
/* Need to adjust ranges for faking unsigned compares. */
|
/* Need to adjust ranges for faking unsigned compares. */
|
scratch0 = gen_reg_rtx (SImode);
|
scratch0 = gen_reg_rtx (SImode);
|
scratch1 = gen_reg_rtx (SImode);
|
scratch1 = gen_reg_rtx (SImode);
|
const_scratch = force_reg (SImode, GEN_INT(MT_MIN_INT));
|
const_scratch = force_reg (SImode, GEN_INT(MT_MIN_INT));
|
emit_insn (gen_addsi3 (scratch0, const_scratch, op0));
|
emit_insn (gen_addsi3 (scratch0, const_scratch, op0));
|
emit_insn (gen_addsi3 (scratch1, const_scratch, op1));
|
emit_insn (gen_addsi3 (scratch1, const_scratch, op1));
|
break;
|
break;
|
default:
|
default:
|
scratch0 = op0;
|
scratch0 = op0;
|
scratch1 = op1;
|
scratch1 = op1;
|
break;
|
break;
|
}
|
}
|
|
|
/* Adjust compare operator to fake unsigned compares. */
|
/* Adjust compare operator to fake unsigned compares. */
|
switch (code)
|
switch (code)
|
{
|
{
|
case GTU:
|
case GTU:
|
code = GT; break;
|
code = GT; break;
|
case LTU:
|
case LTU:
|
code = LT; break;
|
code = LT; break;
|
case GEU:
|
case GEU:
|
code = GE; break;
|
code = GE; break;
|
case LEU:
|
case LEU:
|
code = LE; break;
|
code = LE; break;
|
default:
|
default:
|
/* do nothing */
|
/* do nothing */
|
break;
|
break;
|
}
|
}
|
|
|
/* Generate the actual compare. */
|
/* Generate the actual compare. */
|
return gen_rtx_fmt_ee (code, VOIDmode, scratch0, scratch1);
|
return gen_rtx_fmt_ee (code, VOIDmode, scratch0, scratch1);
|
}
|
}
|
|
|
/* Emit a branch of kind CODE to location LOC. */
|
/* Emit a branch of kind CODE to location LOC. */
|
|
|
void
|
void
|
mt_emit_cbranch (enum rtx_code code, rtx loc, rtx op0, rtx op1)
|
mt_emit_cbranch (enum rtx_code code, rtx loc, rtx op0, rtx op1)
|
{
|
{
|
rtx condition_rtx, loc_ref;
|
rtx condition_rtx, loc_ref;
|
|
|
if (! reg_or_0_operand (op0, SImode))
|
if (! reg_or_0_operand (op0, SImode))
|
op0 = copy_to_mode_reg (SImode, op0);
|
op0 = copy_to_mode_reg (SImode, op0);
|
|
|
if (! reg_or_0_operand (op1, SImode))
|
if (! reg_or_0_operand (op1, SImode))
|
op1 = copy_to_mode_reg (SImode, op1);
|
op1 = copy_to_mode_reg (SImode, op1);
|
|
|
condition_rtx = mt_generate_compare (code, op0, op1);
|
condition_rtx = mt_generate_compare (code, op0, op1);
|
loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
|
loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
|
emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
|
emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
|
gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
|
gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
|
loc_ref, pc_rtx)));
|
loc_ref, pc_rtx)));
|
}
|
}
|
|
|
/* Subfunction of the following function. Update the flags of any MEM
|
/* Subfunction of the following function. Update the flags of any MEM
|
found in part of X. */
|
found in part of X. */
|
|
|
static void
|
static void
|
mt_set_memflags_1 (rtx x, int in_struct_p, int volatile_p)
|
mt_set_memflags_1 (rtx x, int in_struct_p, int volatile_p)
|
{
|
{
|
int i;
|
int i;
|
|
|
switch (GET_CODE (x))
|
switch (GET_CODE (x))
|
{
|
{
|
case SEQUENCE:
|
case SEQUENCE:
|
case PARALLEL:
|
case PARALLEL:
|
for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
|
for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
|
mt_set_memflags_1 (XVECEXP (x, 0, i), in_struct_p, volatile_p);
|
mt_set_memflags_1 (XVECEXP (x, 0, i), in_struct_p, volatile_p);
|
break;
|
break;
|
|
|
case INSN:
|
case INSN:
|
mt_set_memflags_1 (PATTERN (x), in_struct_p, volatile_p);
|
mt_set_memflags_1 (PATTERN (x), in_struct_p, volatile_p);
|
break;
|
break;
|
|
|
case SET:
|
case SET:
|
mt_set_memflags_1 (SET_DEST (x), in_struct_p, volatile_p);
|
mt_set_memflags_1 (SET_DEST (x), in_struct_p, volatile_p);
|
mt_set_memflags_1 (SET_SRC (x), in_struct_p, volatile_p);
|
mt_set_memflags_1 (SET_SRC (x), in_struct_p, volatile_p);
|
break;
|
break;
|
|
|
case MEM:
|
case MEM:
|
MEM_IN_STRUCT_P (x) = in_struct_p;
|
MEM_IN_STRUCT_P (x) = in_struct_p;
|
MEM_VOLATILE_P (x) = volatile_p;
|
MEM_VOLATILE_P (x) = volatile_p;
|
/* Sadly, we cannot use alias sets because the extra aliasing
|
/* Sadly, we cannot use alias sets because the extra aliasing
|
produced by the AND interferes. Given that two-byte quantities
|
produced by the AND interferes. Given that two-byte quantities
|
are the only thing we would be able to differentiate anyway,
|
are the only thing we would be able to differentiate anyway,
|
there does not seem to be any point in convoluting the early
|
there does not seem to be any point in convoluting the early
|
out of the alias check. */
|
out of the alias check. */
|
/* set_mem_alias_set (x, alias_set); */
|
/* set_mem_alias_set (x, alias_set); */
|
break;
|
break;
|
|
|
default:
|
default:
|
break;
|
break;
|
}
|
}
|
}
|
}
|
|
|
/* Look for any MEMs in the current sequence of insns and set the
|
/* Look for any MEMs in the current sequence of insns and set the
|
in-struct, unchanging, and volatile flags from the flags in REF.
|
in-struct, unchanging, and volatile flags from the flags in REF.
|
If REF is not a MEM, don't do anything. */
|
If REF is not a MEM, don't do anything. */
|
|
|
void
|
void
|
mt_set_memflags (rtx ref)
|
mt_set_memflags (rtx ref)
|
{
|
{
|
rtx insn;
|
rtx insn;
|
int in_struct_p, volatile_p;
|
int in_struct_p, volatile_p;
|
|
|
if (GET_CODE (ref) != MEM)
|
if (GET_CODE (ref) != MEM)
|
return;
|
return;
|
|
|
in_struct_p = MEM_IN_STRUCT_P (ref);
|
in_struct_p = MEM_IN_STRUCT_P (ref);
|
volatile_p = MEM_VOLATILE_P (ref);
|
volatile_p = MEM_VOLATILE_P (ref);
|
|
|
/* This is only called from mt.md, after having had something
|
/* This is only called from mt.md, after having had something
|
generated from one of the insn patterns. So if everything is
|
generated from one of the insn patterns. So if everything is
|
zero, the pattern is already up-to-date. */
|
zero, the pattern is already up-to-date. */
|
if (! in_struct_p && ! volatile_p)
|
if (! in_struct_p && ! volatile_p)
|
return;
|
return;
|
|
|
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
mt_set_memflags_1 (insn, in_struct_p, volatile_p);
|
mt_set_memflags_1 (insn, in_struct_p, volatile_p);
|
}
|
}
|
|
|
/* Implement SECONDARY_RELOAD_CLASS. */
|
/* Implement SECONDARY_RELOAD_CLASS. */
|
enum reg_class
|
enum reg_class
|
mt_secondary_reload_class (enum reg_class class ATTRIBUTE_UNUSED,
|
mt_secondary_reload_class (enum reg_class class ATTRIBUTE_UNUSED,
|
enum machine_mode mode,
|
enum machine_mode mode,
|
rtx x)
|
rtx x)
|
{
|
{
|
if ((mode == QImode && (!TARGET_BYTE_ACCESS)) || mode == HImode)
|
if ((mode == QImode && (!TARGET_BYTE_ACCESS)) || mode == HImode)
|
{
|
{
|
if (GET_CODE (x) == MEM
|
if (GET_CODE (x) == MEM
|
|| (GET_CODE (x) == REG && true_regnum (x) == -1)
|
|| (GET_CODE (x) == REG && true_regnum (x) == -1)
|
|| (GET_CODE (x) == SUBREG
|
|| (GET_CODE (x) == SUBREG
|
&& (GET_CODE (SUBREG_REG (x)) == MEM
|
&& (GET_CODE (SUBREG_REG (x)) == MEM
|
|| (GET_CODE (SUBREG_REG (x)) == REG
|
|| (GET_CODE (SUBREG_REG (x)) == REG
|
&& true_regnum (SUBREG_REG (x)) == -1))))
|
&& true_regnum (SUBREG_REG (x)) == -1))))
|
return GENERAL_REGS;
|
return GENERAL_REGS;
|
}
|
}
|
|
|
return NO_REGS;
|
return NO_REGS;
|
}
|
}
|
|
|
/* Handle FUNCTION_VALUE, FUNCTION_OUTGOING_VALUE, and LIBCALL_VALUE
|
/* Handle FUNCTION_VALUE, FUNCTION_OUTGOING_VALUE, and LIBCALL_VALUE
|
macros. */
|
macros. */
|
rtx
|
rtx
|
mt_function_value (tree valtype, enum machine_mode mode, tree func_decl ATTRIBUTE_UNUSED)
|
mt_function_value (tree valtype, enum machine_mode mode, tree func_decl ATTRIBUTE_UNUSED)
|
{
|
{
|
if ((mode) == DImode || (mode) == DFmode)
|
if ((mode) == DImode || (mode) == DFmode)
|
return gen_rtx_MEM (mode, gen_rtx_REG (mode, RETURN_VALUE_REGNUM));
|
return gen_rtx_MEM (mode, gen_rtx_REG (mode, RETURN_VALUE_REGNUM));
|
|
|
if (valtype)
|
if (valtype)
|
mode = TYPE_MODE (valtype);
|
mode = TYPE_MODE (valtype);
|
|
|
return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
|
return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
|
}
|
}
|
|
|
/* Split a move into two smaller pieces.
|
/* Split a move into two smaller pieces.
|
MODE indicates the reduced mode. OPERANDS[0] is the original destination
|
MODE indicates the reduced mode. OPERANDS[0] is the original destination
|
OPERANDS[1] is the original src. The new destinations are
|
OPERANDS[1] is the original src. The new destinations are
|
OPERANDS[2] and OPERANDS[4], while the new sources are OPERANDS[3]
|
OPERANDS[2] and OPERANDS[4], while the new sources are OPERANDS[3]
|
and OPERANDS[5]. */
|
and OPERANDS[5]. */
|
|
|
void
|
void
|
mt_split_words (enum machine_mode nmode,
|
mt_split_words (enum machine_mode nmode,
|
enum machine_mode omode,
|
enum machine_mode omode,
|
rtx *operands)
|
rtx *operands)
|
{
|
{
|
rtx dl,dh; /* src/dest pieces. */
|
rtx dl,dh; /* src/dest pieces. */
|
rtx sl,sh;
|
rtx sl,sh;
|
int move_high_first = 0; /* Assume no overlap. */
|
int move_high_first = 0; /* Assume no overlap. */
|
|
|
switch (GET_CODE (operands[0])) /* Dest. */
|
switch (GET_CODE (operands[0])) /* Dest. */
|
{
|
{
|
case SUBREG:
|
case SUBREG:
|
case REG:
|
case REG:
|
if ((GET_CODE (operands[1]) == REG
|
if ((GET_CODE (operands[1]) == REG
|
|| GET_CODE (operands[1]) == SUBREG)
|
|| GET_CODE (operands[1]) == SUBREG)
|
&& true_regnum (operands[0]) <= true_regnum (operands[1]))
|
&& true_regnum (operands[0]) <= true_regnum (operands[1]))
|
move_high_first = 1;
|
move_high_first = 1;
|
|
|
if (GET_CODE (operands[0]) == SUBREG)
|
if (GET_CODE (operands[0]) == SUBREG)
|
{
|
{
|
dl = gen_rtx_SUBREG (nmode, SUBREG_REG (operands[0]),
|
dl = gen_rtx_SUBREG (nmode, SUBREG_REG (operands[0]),
|
SUBREG_BYTE (operands[0]) + GET_MODE_SIZE (nmode));
|
SUBREG_BYTE (operands[0]) + GET_MODE_SIZE (nmode));
|
dh = gen_rtx_SUBREG (nmode, SUBREG_REG (operands[0]), SUBREG_BYTE (operands[0]));
|
dh = gen_rtx_SUBREG (nmode, SUBREG_REG (operands[0]), SUBREG_BYTE (operands[0]));
|
}
|
}
|
else if (GET_CODE (operands[0]) == REG && ! IS_PSEUDO_P (operands[0]))
|
else if (GET_CODE (operands[0]) == REG && ! IS_PSEUDO_P (operands[0]))
|
{
|
{
|
int r = REGNO (operands[0]);
|
int r = REGNO (operands[0]);
|
dh = gen_rtx_REG (nmode, r);
|
dh = gen_rtx_REG (nmode, r);
|
dl = gen_rtx_REG (nmode, r + HARD_REGNO_NREGS (r, nmode));
|
dl = gen_rtx_REG (nmode, r + HARD_REGNO_NREGS (r, nmode));
|
}
|
}
|
else
|
else
|
{
|
{
|
dh = gen_rtx_SUBREG (nmode, operands[0], 0);
|
dh = gen_rtx_SUBREG (nmode, operands[0], 0);
|
dl = gen_rtx_SUBREG (nmode, operands[0], GET_MODE_SIZE (nmode));
|
dl = gen_rtx_SUBREG (nmode, operands[0], GET_MODE_SIZE (nmode));
|
}
|
}
|
break;
|
break;
|
|
|
case MEM:
|
case MEM:
|
switch (GET_CODE (XEXP (operands[0], 0)))
|
switch (GET_CODE (XEXP (operands[0], 0)))
|
{
|
{
|
case POST_INC:
|
case POST_INC:
|
case POST_DEC:
|
case POST_DEC:
|
gcc_unreachable ();
|
gcc_unreachable ();
|
default:
|
default:
|
dl = operand_subword (operands[0],
|
dl = operand_subword (operands[0],
|
GET_MODE_SIZE (nmode)/UNITS_PER_WORD,
|
GET_MODE_SIZE (nmode)/UNITS_PER_WORD,
|
0, omode);
|
0, omode);
|
dh = operand_subword (operands[0], 0, 0, omode);
|
dh = operand_subword (operands[0], 0, 0, omode);
|
}
|
}
|
break;
|
break;
|
default:
|
default:
|
gcc_unreachable ();
|
gcc_unreachable ();
|
}
|
}
|
|
|
switch (GET_CODE (operands[1]))
|
switch (GET_CODE (operands[1]))
|
{
|
{
|
case REG:
|
case REG:
|
if (! IS_PSEUDO_P (operands[1]))
|
if (! IS_PSEUDO_P (operands[1]))
|
{
|
{
|
int r = REGNO (operands[1]);
|
int r = REGNO (operands[1]);
|
|
|
sh = gen_rtx_REG (nmode, r);
|
sh = gen_rtx_REG (nmode, r);
|
sl = gen_rtx_REG (nmode, r + HARD_REGNO_NREGS (r, nmode));
|
sl = gen_rtx_REG (nmode, r + HARD_REGNO_NREGS (r, nmode));
|
}
|
}
|
else
|
else
|
{
|
{
|
sh = gen_rtx_SUBREG (nmode, operands[1], 0);
|
sh = gen_rtx_SUBREG (nmode, operands[1], 0);
|
sl = gen_rtx_SUBREG (nmode, operands[1], GET_MODE_SIZE (nmode));
|
sl = gen_rtx_SUBREG (nmode, operands[1], GET_MODE_SIZE (nmode));
|
}
|
}
|
break;
|
break;
|
|
|
case CONST_DOUBLE:
|
case CONST_DOUBLE:
|
if (operands[1] == const0_rtx)
|
if (operands[1] == const0_rtx)
|
sh = sl = const0_rtx;
|
sh = sl = const0_rtx;
|
else
|
else
|
split_double (operands[1], & sh, & sl);
|
split_double (operands[1], & sh, & sl);
|
break;
|
break;
|
|
|
case CONST_INT:
|
case CONST_INT:
|
if (operands[1] == const0_rtx)
|
if (operands[1] == const0_rtx)
|
sh = sl = const0_rtx;
|
sh = sl = const0_rtx;
|
else
|
else
|
{
|
{
|
int vl, vh;
|
int vl, vh;
|
|
|
switch (nmode)
|
switch (nmode)
|
{
|
{
|
default:
|
default:
|
gcc_unreachable ();
|
gcc_unreachable ();
|
}
|
}
|
|
|
sl = GEN_INT (vl);
|
sl = GEN_INT (vl);
|
sh = GEN_INT (vh);
|
sh = GEN_INT (vh);
|
}
|
}
|
break;
|
break;
|
|
|
case SUBREG:
|
case SUBREG:
|
sl = gen_rtx_SUBREG (nmode,
|
sl = gen_rtx_SUBREG (nmode,
|
SUBREG_REG (operands[1]),
|
SUBREG_REG (operands[1]),
|
SUBREG_BYTE (operands[1]) + GET_MODE_SIZE (nmode));
|
SUBREG_BYTE (operands[1]) + GET_MODE_SIZE (nmode));
|
sh = gen_rtx_SUBREG (nmode,
|
sh = gen_rtx_SUBREG (nmode,
|
SUBREG_REG (operands[1]),
|
SUBREG_REG (operands[1]),
|
SUBREG_BYTE (operands[1]));
|
SUBREG_BYTE (operands[1]));
|
break;
|
break;
|
|
|
case MEM:
|
case MEM:
|
switch (GET_CODE (XEXP (operands[1], 0)))
|
switch (GET_CODE (XEXP (operands[1], 0)))
|
{
|
{
|
case POST_DEC:
|
case POST_DEC:
|
case POST_INC:
|
case POST_INC:
|
gcc_unreachable ();
|
gcc_unreachable ();
|
break;
|
break;
|
default:
|
default:
|
sl = operand_subword (operands[1],
|
sl = operand_subword (operands[1],
|
GET_MODE_SIZE (nmode)/UNITS_PER_WORD,
|
GET_MODE_SIZE (nmode)/UNITS_PER_WORD,
|
0, omode);
|
0, omode);
|
sh = operand_subword (operands[1], 0, 0, omode);
|
sh = operand_subword (operands[1], 0, 0, omode);
|
|
|
/* Check if the DF load is going to clobber the register
|
/* Check if the DF load is going to clobber the register
|
used for the address, and if so make sure that is going
|
used for the address, and if so make sure that is going
|
to be the second move. */
|
to be the second move. */
|
if (GET_CODE (dl) == REG
|
if (GET_CODE (dl) == REG
|
&& true_regnum (dl)
|
&& true_regnum (dl)
|
== true_regnum (XEXP (XEXP (sl, 0 ), 0)))
|
== true_regnum (XEXP (XEXP (sl, 0 ), 0)))
|
move_high_first = 1;
|
move_high_first = 1;
|
}
|
}
|
break;
|
break;
|
default:
|
default:
|
gcc_unreachable ();
|
gcc_unreachable ();
|
}
|
}
|
|
|
if (move_high_first)
|
if (move_high_first)
|
{
|
{
|
operands[2] = dh;
|
operands[2] = dh;
|
operands[3] = sh;
|
operands[3] = sh;
|
operands[4] = dl;
|
operands[4] = dl;
|
operands[5] = sl;
|
operands[5] = sl;
|
}
|
}
|
else
|
else
|
{
|
{
|
operands[2] = dl;
|
operands[2] = dl;
|
operands[3] = sl;
|
operands[3] = sl;
|
operands[4] = dh;
|
operands[4] = dh;
|
operands[5] = sh;
|
operands[5] = sh;
|
}
|
}
|
return;
|
return;
|
}
|
}
|
|
|
/* Implement TARGET_MUST_PASS_IN_STACK hook. */
|
/* Implement TARGET_MUST_PASS_IN_STACK hook. */
|
static bool
|
static bool
|
mt_pass_in_stack (enum machine_mode mode ATTRIBUTE_UNUSED, tree type)
|
mt_pass_in_stack (enum machine_mode mode ATTRIBUTE_UNUSED, tree type)
|
{
|
{
|
return (((type) != 0
|
return (((type) != 0
|
&& (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
|
&& (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
|
|| TREE_ADDRESSABLE (type))));
|
|| TREE_ADDRESSABLE (type))));
|
}
|
}
|
|
|
/* Increment the counter for the number of loop instructions in the
|
/* Increment the counter for the number of loop instructions in the
|
current function. */
|
current function. */
|
|
|
void mt_add_loop (void)
|
void mt_add_loop (void)
|
{
|
{
|
cfun->machine->has_loops++;
|
cfun->machine->has_loops++;
|
}
|
}
|
|
|
|
|
/* Maximum loop nesting depth. */
|
/* Maximum loop nesting depth. */
|
#define MAX_LOOP_DEPTH 4
|
#define MAX_LOOP_DEPTH 4
|
/* Maximum size of a loop (allows some headroom for delayed branch slot
|
/* Maximum size of a loop (allows some headroom for delayed branch slot
|
filling. */
|
filling. */
|
#define MAX_LOOP_LENGTH (200 * 4)
|
#define MAX_LOOP_LENGTH (200 * 4)
|
|
|
/* We need to keep a vector of loops */
|
/* We need to keep a vector of loops */
|
typedef struct loop_info *loop_info;
|
typedef struct loop_info *loop_info;
|
DEF_VEC_P (loop_info);
|
DEF_VEC_P (loop_info);
|
DEF_VEC_ALLOC_P (loop_info,heap);
|
DEF_VEC_ALLOC_P (loop_info,heap);
|
|
|
/* Information about a loop we have found (or are in the process of
|
/* Information about a loop we have found (or are in the process of
|
finding). */
|
finding). */
|
struct loop_info GTY (())
|
struct loop_info GTY (())
|
{
|
{
|
/* loop number, for dumps */
|
/* loop number, for dumps */
|
int loop_no;
|
int loop_no;
|
|
|
/* Predecessor block of the loop. This is the one that falls into
|
/* Predecessor block of the loop. This is the one that falls into
|
the loop and contains the initialization instruction. */
|
the loop and contains the initialization instruction. */
|
basic_block predecessor;
|
basic_block predecessor;
|
|
|
/* First block in the loop. This is the one branched to by the dbnz
|
/* First block in the loop. This is the one branched to by the dbnz
|
insn. */
|
insn. */
|
basic_block head;
|
basic_block head;
|
|
|
/* Last block in the loop (the one with the dbnz insn */
|
/* Last block in the loop (the one with the dbnz insn */
|
basic_block tail;
|
basic_block tail;
|
|
|
/* The successor block of the loop. This is the one the dbnz insn
|
/* The successor block of the loop. This is the one the dbnz insn
|
falls into. */
|
falls into. */
|
basic_block successor;
|
basic_block successor;
|
|
|
/* The dbnz insn. */
|
/* The dbnz insn. */
|
rtx dbnz;
|
rtx dbnz;
|
|
|
/* The initialization insn. */
|
/* The initialization insn. */
|
rtx init;
|
rtx init;
|
|
|
/* The new initialization instruction. */
|
/* The new initialization instruction. */
|
rtx loop_init;
|
rtx loop_init;
|
|
|
/* The new ending instruction. */
|
/* The new ending instruction. */
|
rtx loop_end;
|
rtx loop_end;
|
|
|
/* The new label placed at the end of the loop. */
|
/* The new label placed at the end of the loop. */
|
rtx end_label;
|
rtx end_label;
|
|
|
/* The nesting depth of the loop. Set to -1 for a bad loop. */
|
/* The nesting depth of the loop. Set to -1 for a bad loop. */
|
int depth;
|
int depth;
|
|
|
/* The length of the loop. */
|
/* The length of the loop. */
|
int length;
|
int length;
|
|
|
/* Next loop in the graph. */
|
/* Next loop in the graph. */
|
struct loop_info *next;
|
struct loop_info *next;
|
|
|
/* Vector of blocks only within the loop, (excluding those within
|
/* Vector of blocks only within the loop, (excluding those within
|
inner loops). */
|
inner loops). */
|
VEC (basic_block,heap) *blocks;
|
VEC (basic_block,heap) *blocks;
|
|
|
/* Vector of inner loops within this loop */
|
/* Vector of inner loops within this loop */
|
VEC (loop_info,heap) *loops;
|
VEC (loop_info,heap) *loops;
|
};
|
};
|
|
|
/* Information used during loop detection. */
|
/* Information used during loop detection. */
|
typedef struct loop_work GTY(())
|
typedef struct loop_work GTY(())
|
{
|
{
|
/* Basic block to be scanned. */
|
/* Basic block to be scanned. */
|
basic_block block;
|
basic_block block;
|
|
|
/* Loop it will be within. */
|
/* Loop it will be within. */
|
loop_info loop;
|
loop_info loop;
|
} loop_work;
|
} loop_work;
|
|
|
/* Work list. */
|
/* Work list. */
|
DEF_VEC_O (loop_work);
|
DEF_VEC_O (loop_work);
|
DEF_VEC_ALLOC_O (loop_work,heap);
|
DEF_VEC_ALLOC_O (loop_work,heap);
|
|
|
/* Determine the nesting and length of LOOP. Return false if the loop
|
/* Determine the nesting and length of LOOP. Return false if the loop
|
is bad. */
|
is bad. */
|
|
|
static bool
|
static bool
|
mt_loop_nesting (loop_info loop)
|
mt_loop_nesting (loop_info loop)
|
{
|
{
|
loop_info inner;
|
loop_info inner;
|
unsigned ix;
|
unsigned ix;
|
int inner_depth = 0;
|
int inner_depth = 0;
|
|
|
if (!loop->depth)
|
if (!loop->depth)
|
{
|
{
|
/* Make sure we only have one entry point. */
|
/* Make sure we only have one entry point. */
|
if (EDGE_COUNT (loop->head->preds) == 2)
|
if (EDGE_COUNT (loop->head->preds) == 2)
|
{
|
{
|
loop->predecessor = EDGE_PRED (loop->head, 0)->src;
|
loop->predecessor = EDGE_PRED (loop->head, 0)->src;
|
if (loop->predecessor == loop->tail)
|
if (loop->predecessor == loop->tail)
|
/* We wanted the other predecessor. */
|
/* We wanted the other predecessor. */
|
loop->predecessor = EDGE_PRED (loop->head, 1)->src;
|
loop->predecessor = EDGE_PRED (loop->head, 1)->src;
|
|
|
/* We can only place a loop insn on a fall through edge of a
|
/* We can only place a loop insn on a fall through edge of a
|
single exit block. */
|
single exit block. */
|
if (EDGE_COUNT (loop->predecessor->succs) != 1
|
if (EDGE_COUNT (loop->predecessor->succs) != 1
|
|| !(EDGE_SUCC (loop->predecessor, 0)->flags & EDGE_FALLTHRU))
|
|| !(EDGE_SUCC (loop->predecessor, 0)->flags & EDGE_FALLTHRU))
|
loop->predecessor = NULL;
|
loop->predecessor = NULL;
|
}
|
}
|
|
|
/* Mark this loop as bad for now. */
|
/* Mark this loop as bad for now. */
|
loop->depth = -1;
|
loop->depth = -1;
|
if (loop->predecessor)
|
if (loop->predecessor)
|
{
|
{
|
for (ix = 0; VEC_iterate (loop_info, loop->loops, ix++, inner);)
|
for (ix = 0; VEC_iterate (loop_info, loop->loops, ix++, inner);)
|
{
|
{
|
if (!inner->depth)
|
if (!inner->depth)
|
mt_loop_nesting (inner);
|
mt_loop_nesting (inner);
|
|
|
if (inner->depth < 0)
|
if (inner->depth < 0)
|
{
|
{
|
inner_depth = -1;
|
inner_depth = -1;
|
break;
|
break;
|
}
|
}
|
|
|
if (inner_depth < inner->depth)
|
if (inner_depth < inner->depth)
|
inner_depth = inner->depth;
|
inner_depth = inner->depth;
|
loop->length += inner->length;
|
loop->length += inner->length;
|
}
|
}
|
|
|
/* Set the proper loop depth, if it was good. */
|
/* Set the proper loop depth, if it was good. */
|
if (inner_depth >= 0)
|
if (inner_depth >= 0)
|
loop->depth = inner_depth + 1;
|
loop->depth = inner_depth + 1;
|
}
|
}
|
}
|
}
|
return (loop->depth > 0
|
return (loop->depth > 0
|
&& loop->predecessor
|
&& loop->predecessor
|
&& loop->depth < MAX_LOOP_DEPTH
|
&& loop->depth < MAX_LOOP_DEPTH
|
&& loop->length < MAX_LOOP_LENGTH);
|
&& loop->length < MAX_LOOP_LENGTH);
|
}
|
}
|
|
|
/* Determine the length of block BB. */
|
/* Determine the length of block BB. */
|
|
|
static int
|
static int
|
mt_block_length (basic_block bb)
|
mt_block_length (basic_block bb)
|
{
|
{
|
int length = 0;
|
int length = 0;
|
rtx insn;
|
rtx insn;
|
|
|
for (insn = BB_HEAD (bb);
|
for (insn = BB_HEAD (bb);
|
insn != NEXT_INSN (BB_END (bb));
|
insn != NEXT_INSN (BB_END (bb));
|
insn = NEXT_INSN (insn))
|
insn = NEXT_INSN (insn))
|
{
|
{
|
if (!INSN_P (insn))
|
if (!INSN_P (insn))
|
continue;
|
continue;
|
if (CALL_P (insn))
|
if (CALL_P (insn))
|
{
|
{
|
/* Calls are not allowed in loops. */
|
/* Calls are not allowed in loops. */
|
length = MAX_LOOP_LENGTH + 1;
|
length = MAX_LOOP_LENGTH + 1;
|
break;
|
break;
|
}
|
}
|
|
|
length += get_attr_length (insn);
|
length += get_attr_length (insn);
|
}
|
}
|
return length;
|
return length;
|
}
|
}
|
|
|
/* Scan the blocks of LOOP (and its inferiors) looking for uses of
|
/* Scan the blocks of LOOP (and its inferiors) looking for uses of
|
REG. Return true, if we find any. Don't count the loop's dbnz
|
REG. Return true, if we find any. Don't count the loop's dbnz
|
insn if it matches DBNZ. */
|
insn if it matches DBNZ. */
|
|
|
static bool
|
static bool
|
mt_scan_loop (loop_info loop, rtx reg, rtx dbnz)
|
mt_scan_loop (loop_info loop, rtx reg, rtx dbnz)
|
{
|
{
|
unsigned ix;
|
unsigned ix;
|
loop_info inner;
|
loop_info inner;
|
basic_block bb;
|
basic_block bb;
|
|
|
for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
|
for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
|
{
|
{
|
rtx insn;
|
rtx insn;
|
|
|
for (insn = BB_HEAD (bb);
|
for (insn = BB_HEAD (bb);
|
insn != NEXT_INSN (BB_END (bb));
|
insn != NEXT_INSN (BB_END (bb));
|
insn = NEXT_INSN (insn))
|
insn = NEXT_INSN (insn))
|
{
|
{
|
if (!INSN_P (insn))
|
if (!INSN_P (insn))
|
continue;
|
continue;
|
if (insn == dbnz)
|
if (insn == dbnz)
|
continue;
|
continue;
|
if (reg_mentioned_p (reg, PATTERN (insn)))
|
if (reg_mentioned_p (reg, PATTERN (insn)))
|
return true;
|
return true;
|
}
|
}
|
}
|
}
|
for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, inner); ix++)
|
for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, inner); ix++)
|
if (mt_scan_loop (inner, reg, NULL_RTX))
|
if (mt_scan_loop (inner, reg, NULL_RTX))
|
return true;
|
return true;
|
|
|
return false;
|
return false;
|
}
|
}
|
|
|
/* MS2 has a loop instruction which needs to be placed just before the
|
/* MS2 has a loop instruction which needs to be placed just before the
|
loop. It indicates the end of the loop and specifies the number of
|
loop. It indicates the end of the loop and specifies the number of
|
loop iterations. It can be nested with an automatically maintained
|
loop iterations. It can be nested with an automatically maintained
|
stack of counter and end address registers. It's an ideal
|
stack of counter and end address registers. It's an ideal
|
candidate for doloop. Unfortunately, gcc presumes that loops
|
candidate for doloop. Unfortunately, gcc presumes that loops
|
always end with an explicit instruction, and the doloop_begin
|
always end with an explicit instruction, and the doloop_begin
|
instruction is not a flow control instruction so it can be
|
instruction is not a flow control instruction so it can be
|
scheduled earlier than just before the start of the loop. To make
|
scheduled earlier than just before the start of the loop. To make
|
matters worse, the optimization pipeline can duplicate loop exit
|
matters worse, the optimization pipeline can duplicate loop exit
|
and entrance blocks and fails to track abnormally exiting loops.
|
and entrance blocks and fails to track abnormally exiting loops.
|
Thus we cannot simply use doloop.
|
Thus we cannot simply use doloop.
|
|
|
What we do is emit a dbnz pattern for the doloop optimization, and
|
What we do is emit a dbnz pattern for the doloop optimization, and
|
let that be optimized as normal. Then in machine dependent reorg
|
let that be optimized as normal. Then in machine dependent reorg
|
we have to repeat the loop searching algorithm. We use the
|
we have to repeat the loop searching algorithm. We use the
|
flow graph to find closed loops ending in a dbnz insn. We then try
|
flow graph to find closed loops ending in a dbnz insn. We then try
|
and convert it to use the loop instruction. The conditions are,
|
and convert it to use the loop instruction. The conditions are,
|
|
|
* the loop has no abnormal exits, duplicated end conditions or
|
* the loop has no abnormal exits, duplicated end conditions or
|
duplicated entrance blocks
|
duplicated entrance blocks
|
|
|
* the loop counter register is only used in the dbnz instruction
|
* the loop counter register is only used in the dbnz instruction
|
within the loop
|
within the loop
|
|
|
* we can find the instruction setting the initial value of the loop
|
* we can find the instruction setting the initial value of the loop
|
counter
|
counter
|
|
|
* the loop is not executed more than 65535 times. (This might be
|
* the loop is not executed more than 65535 times. (This might be
|
changed to 2^32-1, and would therefore allow variable initializers.)
|
changed to 2^32-1, and would therefore allow variable initializers.)
|
|
|
* the loop is not nested more than 4 deep 5) there are no
|
* the loop is not nested more than 4 deep 5) there are no
|
subroutine calls in the loop. */
|
subroutine calls in the loop. */
|
|
|
static void
|
static void
|
mt_reorg_loops (FILE *dump_file)
|
mt_reorg_loops (FILE *dump_file)
|
{
|
{
|
basic_block bb;
|
basic_block bb;
|
loop_info loops = NULL;
|
loop_info loops = NULL;
|
loop_info loop;
|
loop_info loop;
|
int nloops = 0;
|
int nloops = 0;
|
unsigned dwork = 0;
|
unsigned dwork = 0;
|
VEC (loop_work,heap) *works = VEC_alloc (loop_work,heap,20);
|
VEC (loop_work,heap) *works = VEC_alloc (loop_work,heap,20);
|
loop_work *work;
|
loop_work *work;
|
edge e;
|
edge e;
|
edge_iterator ei;
|
edge_iterator ei;
|
bool replaced = false;
|
bool replaced = false;
|
|
|
/* Find all the possible loop tails. This means searching for every
|
/* Find all the possible loop tails. This means searching for every
|
dbnz instruction. For each one found, create a loop_info
|
dbnz instruction. For each one found, create a loop_info
|
structure and add the head block to the work list. */
|
structure and add the head block to the work list. */
|
FOR_EACH_BB (bb)
|
FOR_EACH_BB (bb)
|
{
|
{
|
rtx tail = BB_END (bb);
|
rtx tail = BB_END (bb);
|
|
|
while (GET_CODE (tail) == NOTE)
|
while (GET_CODE (tail) == NOTE)
|
tail = PREV_INSN (tail);
|
tail = PREV_INSN (tail);
|
|
|
bb->aux = NULL;
|
bb->aux = NULL;
|
if (recog_memoized (tail) == CODE_FOR_decrement_and_branch_until_zero)
|
if (recog_memoized (tail) == CODE_FOR_decrement_and_branch_until_zero)
|
{
|
{
|
/* A possible loop end */
|
/* A possible loop end */
|
|
|
loop = XNEW (struct loop_info);
|
loop = XNEW (struct loop_info);
|
loop->next = loops;
|
loop->next = loops;
|
loops = loop;
|
loops = loop;
|
loop->tail = bb;
|
loop->tail = bb;
|
loop->head = BRANCH_EDGE (bb)->dest;
|
loop->head = BRANCH_EDGE (bb)->dest;
|
loop->successor = FALLTHRU_EDGE (bb)->dest;
|
loop->successor = FALLTHRU_EDGE (bb)->dest;
|
loop->predecessor = NULL;
|
loop->predecessor = NULL;
|
loop->dbnz = tail;
|
loop->dbnz = tail;
|
loop->depth = 0;
|
loop->depth = 0;
|
loop->length = mt_block_length (bb);
|
loop->length = mt_block_length (bb);
|
loop->blocks = VEC_alloc (basic_block, heap, 20);
|
loop->blocks = VEC_alloc (basic_block, heap, 20);
|
VEC_quick_push (basic_block, loop->blocks, bb);
|
VEC_quick_push (basic_block, loop->blocks, bb);
|
loop->loops = NULL;
|
loop->loops = NULL;
|
loop->loop_no = nloops++;
|
loop->loop_no = nloops++;
|
|
|
loop->init = loop->end_label = NULL_RTX;
|
loop->init = loop->end_label = NULL_RTX;
|
loop->loop_init = loop->loop_end = NULL_RTX;
|
loop->loop_init = loop->loop_end = NULL_RTX;
|
|
|
work = VEC_safe_push (loop_work, heap, works, NULL);
|
work = VEC_safe_push (loop_work, heap, works, NULL);
|
work->block = loop->head;
|
work->block = loop->head;
|
work->loop = loop;
|
work->loop = loop;
|
|
|
bb->aux = loop;
|
bb->aux = loop;
|
|
|
if (dump_file)
|
if (dump_file)
|
{
|
{
|
fprintf (dump_file, ";; potential loop %d ending at\n",
|
fprintf (dump_file, ";; potential loop %d ending at\n",
|
loop->loop_no);
|
loop->loop_no);
|
print_rtl_single (dump_file, tail);
|
print_rtl_single (dump_file, tail);
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
/* Now find all the closed loops.
|
/* Now find all the closed loops.
|
until work list empty,
|
until work list empty,
|
if block's auxptr is set
|
if block's auxptr is set
|
if != loop slot
|
if != loop slot
|
if block's loop's start != block
|
if block's loop's start != block
|
mark loop as bad
|
mark loop as bad
|
else
|
else
|
append block's loop's fallthrough block to worklist
|
append block's loop's fallthrough block to worklist
|
increment this loop's depth
|
increment this loop's depth
|
else if block is exit block
|
else if block is exit block
|
mark loop as bad
|
mark loop as bad
|
else
|
else
|
set auxptr
|
set auxptr
|
for each target of block
|
for each target of block
|
add to worklist */
|
add to worklist */
|
while (VEC_iterate (loop_work, works, dwork++, work))
|
while (VEC_iterate (loop_work, works, dwork++, work))
|
{
|
{
|
loop = work->loop;
|
loop = work->loop;
|
bb = work->block;
|
bb = work->block;
|
if (bb == EXIT_BLOCK_PTR)
|
if (bb == EXIT_BLOCK_PTR)
|
/* We've reached the exit block. The loop must be bad. */
|
/* We've reached the exit block. The loop must be bad. */
|
loop->depth = -1;
|
loop->depth = -1;
|
else if (!bb->aux)
|
else if (!bb->aux)
|
{
|
{
|
/* We've not seen this block before. Add it to the loop's
|
/* We've not seen this block before. Add it to the loop's
|
list and then add each successor to the work list. */
|
list and then add each successor to the work list. */
|
bb->aux = loop;
|
bb->aux = loop;
|
loop->length += mt_block_length (bb);
|
loop->length += mt_block_length (bb);
|
VEC_safe_push (basic_block, heap, loop->blocks, bb);
|
VEC_safe_push (basic_block, heap, loop->blocks, bb);
|
FOR_EACH_EDGE (e, ei, bb->succs)
|
FOR_EACH_EDGE (e, ei, bb->succs)
|
{
|
{
|
if (!VEC_space (loop_work, works, 1))
|
if (!VEC_space (loop_work, works, 1))
|
{
|
{
|
if (dwork)
|
if (dwork)
|
{
|
{
|
VEC_block_remove (loop_work, works, 0, dwork);
|
VEC_block_remove (loop_work, works, 0, dwork);
|
dwork = 0;
|
dwork = 0;
|
}
|
}
|
else
|
else
|
VEC_reserve (loop_work, heap, works, 1);
|
VEC_reserve (loop_work, heap, works, 1);
|
}
|
}
|
work = VEC_quick_push (loop_work, works, NULL);
|
work = VEC_quick_push (loop_work, works, NULL);
|
work->block = EDGE_SUCC (bb, ei.index)->dest;
|
work->block = EDGE_SUCC (bb, ei.index)->dest;
|
work->loop = loop;
|
work->loop = loop;
|
}
|
}
|
}
|
}
|
else if (bb->aux != loop)
|
else if (bb->aux != loop)
|
{
|
{
|
/* We've seen this block in a different loop. If it's not
|
/* We've seen this block in a different loop. If it's not
|
the other loop's head, then this loop must be bad.
|
the other loop's head, then this loop must be bad.
|
Otherwise, the other loop might be a nested loop, so
|
Otherwise, the other loop might be a nested loop, so
|
continue from that loop's successor. */
|
continue from that loop's successor. */
|
loop_info other = bb->aux;
|
loop_info other = bb->aux;
|
|
|
if (other->head != bb)
|
if (other->head != bb)
|
loop->depth = -1;
|
loop->depth = -1;
|
else
|
else
|
{
|
{
|
VEC_safe_push (loop_info, heap, loop->loops, other);
|
VEC_safe_push (loop_info, heap, loop->loops, other);
|
work = VEC_safe_push (loop_work, heap, works, NULL);
|
work = VEC_safe_push (loop_work, heap, works, NULL);
|
work->loop = loop;
|
work->loop = loop;
|
work->block = other->successor;
|
work->block = other->successor;
|
}
|
}
|
}
|
}
|
}
|
}
|
VEC_free (loop_work, heap, works);
|
VEC_free (loop_work, heap, works);
|
|
|
/* Now optimize the loops. */
|
/* Now optimize the loops. */
|
for (loop = loops; loop; loop = loop->next)
|
for (loop = loops; loop; loop = loop->next)
|
{
|
{
|
rtx iter_reg, insn, init_insn;
|
rtx iter_reg, insn, init_insn;
|
rtx init_val, loop_end, loop_init, end_label, head_label;
|
rtx init_val, loop_end, loop_init, end_label, head_label;
|
|
|
if (!mt_loop_nesting (loop))
|
if (!mt_loop_nesting (loop))
|
{
|
{
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file, ";; loop %d is bad\n", loop->loop_no);
|
fprintf (dump_file, ";; loop %d is bad\n", loop->loop_no);
|
continue;
|
continue;
|
}
|
}
|
|
|
/* Get the loop iteration register. */
|
/* Get the loop iteration register. */
|
iter_reg = SET_DEST (XVECEXP (PATTERN (loop->dbnz), 0, 1));
|
iter_reg = SET_DEST (XVECEXP (PATTERN (loop->dbnz), 0, 1));
|
|
|
if (!REG_P (iter_reg))
|
if (!REG_P (iter_reg))
|
{
|
{
|
/* Spilled */
|
/* Spilled */
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file, ";; loop %d has spilled iteration count\n",
|
fprintf (dump_file, ";; loop %d has spilled iteration count\n",
|
loop->loop_no);
|
loop->loop_no);
|
continue;
|
continue;
|
}
|
}
|
|
|
/* Look for the initializing insn */
|
/* Look for the initializing insn */
|
init_insn = NULL_RTX;
|
init_insn = NULL_RTX;
|
for (insn = BB_END (loop->predecessor);
|
for (insn = BB_END (loop->predecessor);
|
insn != PREV_INSN (BB_HEAD (loop->predecessor));
|
insn != PREV_INSN (BB_HEAD (loop->predecessor));
|
insn = PREV_INSN (insn))
|
insn = PREV_INSN (insn))
|
{
|
{
|
if (!INSN_P (insn))
|
if (!INSN_P (insn))
|
continue;
|
continue;
|
if (reg_mentioned_p (iter_reg, PATTERN (insn)))
|
if (reg_mentioned_p (iter_reg, PATTERN (insn)))
|
{
|
{
|
rtx set = single_set (insn);
|
rtx set = single_set (insn);
|
|
|
if (set && rtx_equal_p (iter_reg, SET_DEST (set)))
|
if (set && rtx_equal_p (iter_reg, SET_DEST (set)))
|
init_insn = insn;
|
init_insn = insn;
|
break;
|
break;
|
}
|
}
|
}
|
}
|
|
|
if (!init_insn)
|
if (!init_insn)
|
{
|
{
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file, ";; loop %d has no initializer\n",
|
fprintf (dump_file, ";; loop %d has no initializer\n",
|
loop->loop_no);
|
loop->loop_no);
|
continue;
|
continue;
|
}
|
}
|
if (dump_file)
|
if (dump_file)
|
{
|
{
|
fprintf (dump_file, ";; loop %d initialized by\n",
|
fprintf (dump_file, ";; loop %d initialized by\n",
|
loop->loop_no);
|
loop->loop_no);
|
print_rtl_single (dump_file, init_insn);
|
print_rtl_single (dump_file, init_insn);
|
}
|
}
|
|
|
init_val = PATTERN (init_insn);
|
init_val = PATTERN (init_insn);
|
if (GET_CODE (init_val) == SET)
|
if (GET_CODE (init_val) == SET)
|
init_val = SET_SRC (init_val);
|
init_val = SET_SRC (init_val);
|
if (GET_CODE (init_val) != CONST_INT || INTVAL (init_val) >= 65535)
|
if (GET_CODE (init_val) != CONST_INT || INTVAL (init_val) >= 65535)
|
{
|
{
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file, ";; loop %d has complex initializer\n",
|
fprintf (dump_file, ";; loop %d has complex initializer\n",
|
loop->loop_no);
|
loop->loop_no);
|
continue;
|
continue;
|
}
|
}
|
|
|
/* Scan all the blocks to make sure they don't use iter_reg. */
|
/* Scan all the blocks to make sure they don't use iter_reg. */
|
if (mt_scan_loop (loop, iter_reg, loop->dbnz))
|
if (mt_scan_loop (loop, iter_reg, loop->dbnz))
|
{
|
{
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file, ";; loop %d uses iterator\n",
|
fprintf (dump_file, ";; loop %d uses iterator\n",
|
loop->loop_no);
|
loop->loop_no);
|
continue;
|
continue;
|
}
|
}
|
|
|
/* The loop is good for replacement. */
|
/* The loop is good for replacement. */
|
|
|
/* loop is 1 based, dbnz is zero based. */
|
/* loop is 1 based, dbnz is zero based. */
|
init_val = GEN_INT (INTVAL (init_val) + 1);
|
init_val = GEN_INT (INTVAL (init_val) + 1);
|
|
|
iter_reg = gen_rtx_REG (SImode, LOOP_FIRST + loop->depth - 1);
|
iter_reg = gen_rtx_REG (SImode, LOOP_FIRST + loop->depth - 1);
|
end_label = gen_label_rtx ();
|
end_label = gen_label_rtx ();
|
head_label = XEXP (SET_SRC (XVECEXP (PATTERN (loop->dbnz), 0, 0)), 1);
|
head_label = XEXP (SET_SRC (XVECEXP (PATTERN (loop->dbnz), 0, 0)), 1);
|
loop_end = gen_loop_end (iter_reg, head_label);
|
loop_end = gen_loop_end (iter_reg, head_label);
|
loop_init = gen_loop_init (iter_reg, init_val, end_label);
|
loop_init = gen_loop_init (iter_reg, init_val, end_label);
|
loop->init = init_insn;
|
loop->init = init_insn;
|
loop->end_label = end_label;
|
loop->end_label = end_label;
|
loop->loop_init = loop_init;
|
loop->loop_init = loop_init;
|
loop->loop_end = loop_end;
|
loop->loop_end = loop_end;
|
replaced = true;
|
replaced = true;
|
|
|
if (dump_file)
|
if (dump_file)
|
{
|
{
|
fprintf (dump_file, ";; replacing loop %d initializer with\n",
|
fprintf (dump_file, ";; replacing loop %d initializer with\n",
|
loop->loop_no);
|
loop->loop_no);
|
print_rtl_single (dump_file, loop->loop_init);
|
print_rtl_single (dump_file, loop->loop_init);
|
fprintf (dump_file, ";; replacing loop %d terminator with\n",
|
fprintf (dump_file, ";; replacing loop %d terminator with\n",
|
loop->loop_no);
|
loop->loop_no);
|
print_rtl_single (dump_file, loop->loop_end);
|
print_rtl_single (dump_file, loop->loop_end);
|
}
|
}
|
}
|
}
|
|
|
/* Now apply the optimizations. Do it this way so we don't mess up
|
/* Now apply the optimizations. Do it this way so we don't mess up
|
the flow graph half way through. */
|
the flow graph half way through. */
|
for (loop = loops; loop; loop = loop->next)
|
for (loop = loops; loop; loop = loop->next)
|
if (loop->loop_init)
|
if (loop->loop_init)
|
{
|
{
|
emit_jump_insn_after (loop->loop_init, BB_END (loop->predecessor));
|
emit_jump_insn_after (loop->loop_init, BB_END (loop->predecessor));
|
delete_insn (loop->init);
|
delete_insn (loop->init);
|
emit_label_before (loop->end_label, loop->dbnz);
|
emit_label_before (loop->end_label, loop->dbnz);
|
emit_jump_insn_before (loop->loop_end, loop->dbnz);
|
emit_jump_insn_before (loop->loop_end, loop->dbnz);
|
delete_insn (loop->dbnz);
|
delete_insn (loop->dbnz);
|
}
|
}
|
|
|
/* Free up the loop structures */
|
/* Free up the loop structures */
|
while (loops)
|
while (loops)
|
{
|
{
|
loop = loops;
|
loop = loops;
|
loops = loop->next;
|
loops = loop->next;
|
VEC_free (loop_info, heap, loop->loops);
|
VEC_free (loop_info, heap, loop->loops);
|
VEC_free (basic_block, heap, loop->blocks);
|
VEC_free (basic_block, heap, loop->blocks);
|
XDELETE (loop);
|
XDELETE (loop);
|
}
|
}
|
|
|
if (replaced && dump_file)
|
if (replaced && dump_file)
|
{
|
{
|
fprintf (dump_file, ";; Replaced loops\n");
|
fprintf (dump_file, ";; Replaced loops\n");
|
print_rtl (dump_file, get_insns ());
|
print_rtl (dump_file, get_insns ());
|
}
|
}
|
}
|
}
|
|
|
/* Structures to hold branch information during reorg. */
|
/* Structures to hold branch information during reorg. */
|
typedef struct branch_info
|
typedef struct branch_info
|
{
|
{
|
rtx insn; /* The branch insn. */
|
rtx insn; /* The branch insn. */
|
|
|
struct branch_info *next;
|
struct branch_info *next;
|
} branch_info;
|
} branch_info;
|
|
|
typedef struct label_info
|
typedef struct label_info
|
{
|
{
|
rtx label; /* The label. */
|
rtx label; /* The label. */
|
branch_info *branches; /* branches to this label. */
|
branch_info *branches; /* branches to this label. */
|
struct label_info *next;
|
struct label_info *next;
|
} label_info;
|
} label_info;
|
|
|
/* Chain of labels found in current function, used during reorg. */
|
/* Chain of labels found in current function, used during reorg. */
|
static label_info *mt_labels;
|
static label_info *mt_labels;
|
|
|
/* If *X is a label, add INSN to the list of branches for that
|
/* If *X is a label, add INSN to the list of branches for that
|
label. */
|
label. */
|
|
|
static int
|
static int
|
mt_add_branches (rtx *x, void *insn)
|
mt_add_branches (rtx *x, void *insn)
|
{
|
{
|
if (GET_CODE (*x) == LABEL_REF)
|
if (GET_CODE (*x) == LABEL_REF)
|
{
|
{
|
branch_info *branch = xmalloc (sizeof (*branch));
|
branch_info *branch = xmalloc (sizeof (*branch));
|
rtx label = XEXP (*x, 0);
|
rtx label = XEXP (*x, 0);
|
label_info *info;
|
label_info *info;
|
|
|
for (info = mt_labels; info; info = info->next)
|
for (info = mt_labels; info; info = info->next)
|
if (info->label == label)
|
if (info->label == label)
|
break;
|
break;
|
|
|
if (!info)
|
if (!info)
|
{
|
{
|
info = xmalloc (sizeof (*info));
|
info = xmalloc (sizeof (*info));
|
info->next = mt_labels;
|
info->next = mt_labels;
|
mt_labels = info;
|
mt_labels = info;
|
|
|
info->label = label;
|
info->label = label;
|
info->branches = NULL;
|
info->branches = NULL;
|
}
|
}
|
|
|
branch->next = info->branches;
|
branch->next = info->branches;
|
info->branches = branch;
|
info->branches = branch;
|
branch->insn = insn;
|
branch->insn = insn;
|
}
|
}
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* If BRANCH has a filled delay slot, check if INSN is dependent upon
|
/* If BRANCH has a filled delay slot, check if INSN is dependent upon
|
it. If so, undo the delay slot fill. Returns the next insn, if
|
it. If so, undo the delay slot fill. Returns the next insn, if
|
we patch out the branch. Returns the branch insn, if we cannot
|
we patch out the branch. Returns the branch insn, if we cannot
|
patch out the branch (due to anti-dependency in the delay slot).
|
patch out the branch (due to anti-dependency in the delay slot).
|
In that case, the caller must insert nops at the branch target. */
|
In that case, the caller must insert nops at the branch target. */
|
|
|
static rtx
|
static rtx
|
mt_check_delay_slot (rtx branch, rtx insn)
|
mt_check_delay_slot (rtx branch, rtx insn)
|
{
|
{
|
rtx slot;
|
rtx slot;
|
rtx tmp;
|
rtx tmp;
|
rtx p;
|
rtx p;
|
rtx jmp;
|
rtx jmp;
|
|
|
gcc_assert (GET_CODE (PATTERN (branch)) == SEQUENCE);
|
gcc_assert (GET_CODE (PATTERN (branch)) == SEQUENCE);
|
if (INSN_DELETED_P (branch))
|
if (INSN_DELETED_P (branch))
|
return NULL_RTX;
|
return NULL_RTX;
|
slot = XVECEXP (PATTERN (branch), 0, 1);
|
slot = XVECEXP (PATTERN (branch), 0, 1);
|
|
|
tmp = PATTERN (insn);
|
tmp = PATTERN (insn);
|
note_stores (PATTERN (slot), insn_dependent_p_1, &tmp);
|
note_stores (PATTERN (slot), insn_dependent_p_1, &tmp);
|
if (tmp)
|
if (tmp)
|
/* Not dependent. */
|
/* Not dependent. */
|
return NULL_RTX;
|
return NULL_RTX;
|
|
|
/* Undo the delay slot. */
|
/* Undo the delay slot. */
|
jmp = XVECEXP (PATTERN (branch), 0, 0);
|
jmp = XVECEXP (PATTERN (branch), 0, 0);
|
|
|
tmp = PATTERN (jmp);
|
tmp = PATTERN (jmp);
|
note_stores (PATTERN (slot), insn_dependent_p_1, &tmp);
|
note_stores (PATTERN (slot), insn_dependent_p_1, &tmp);
|
if (!tmp)
|
if (!tmp)
|
/* Anti dependent. */
|
/* Anti dependent. */
|
return branch;
|
return branch;
|
|
|
p = PREV_INSN (branch);
|
p = PREV_INSN (branch);
|
NEXT_INSN (p) = slot;
|
NEXT_INSN (p) = slot;
|
PREV_INSN (slot) = p;
|
PREV_INSN (slot) = p;
|
NEXT_INSN (slot) = jmp;
|
NEXT_INSN (slot) = jmp;
|
PREV_INSN (jmp) = slot;
|
PREV_INSN (jmp) = slot;
|
NEXT_INSN (jmp) = branch;
|
NEXT_INSN (jmp) = branch;
|
PREV_INSN (branch) = jmp;
|
PREV_INSN (branch) = jmp;
|
XVECEXP (PATTERN (branch), 0, 0) = NULL_RTX;
|
XVECEXP (PATTERN (branch), 0, 0) = NULL_RTX;
|
XVECEXP (PATTERN (branch), 0, 1) = NULL_RTX;
|
XVECEXP (PATTERN (branch), 0, 1) = NULL_RTX;
|
delete_insn (branch);
|
delete_insn (branch);
|
return jmp;
|
return jmp;
|
}
|
}
|
|
|
/* Insert nops to satisfy pipeline constraints. We only deal with ms2
|
/* Insert nops to satisfy pipeline constraints. We only deal with ms2
|
constraints here. Earlier CPUs are dealt with by inserting nops with
|
constraints here. Earlier CPUs are dealt with by inserting nops with
|
final_prescan (but that can lead to inferior code, and is
|
final_prescan (but that can lead to inferior code, and is
|
impractical with ms2's JAL hazard).
|
impractical with ms2's JAL hazard).
|
|
|
ms2 dynamic constraints
|
ms2 dynamic constraints
|
1) a load and a following use must be separated by one insn
|
1) a load and a following use must be separated by one insn
|
2) an insn and a following dependent call must be separated by two insns
|
2) an insn and a following dependent call must be separated by two insns
|
|
|
only arith insns are placed in delay slots so #1 cannot happen with
|
only arith insns are placed in delay slots so #1 cannot happen with
|
a load in a delay slot. #2 can happen with an arith insn in the
|
a load in a delay slot. #2 can happen with an arith insn in the
|
delay slot. */
|
delay slot. */
|
|
|
static void
|
static void
|
mt_reorg_hazard (void)
|
mt_reorg_hazard (void)
|
{
|
{
|
rtx insn, next;
|
rtx insn, next;
|
|
|
/* Find all the branches */
|
/* Find all the branches */
|
for (insn = get_insns ();
|
for (insn = get_insns ();
|
insn;
|
insn;
|
insn = NEXT_INSN (insn))
|
insn = NEXT_INSN (insn))
|
{
|
{
|
rtx jmp;
|
rtx jmp;
|
|
|
if (!INSN_P (insn))
|
if (!INSN_P (insn))
|
continue;
|
continue;
|
|
|
jmp = PATTERN (insn);
|
jmp = PATTERN (insn);
|
|
|
if (GET_CODE (jmp) != SEQUENCE)
|
if (GET_CODE (jmp) != SEQUENCE)
|
/* If it's not got a filled delay slot, then it can't
|
/* If it's not got a filled delay slot, then it can't
|
conflict. */
|
conflict. */
|
continue;
|
continue;
|
|
|
jmp = XVECEXP (jmp, 0, 0);
|
jmp = XVECEXP (jmp, 0, 0);
|
|
|
if (recog_memoized (jmp) == CODE_FOR_tablejump)
|
if (recog_memoized (jmp) == CODE_FOR_tablejump)
|
for (jmp = XEXP (XEXP (XVECEXP (PATTERN (jmp), 0, 1), 0), 0);
|
for (jmp = XEXP (XEXP (XVECEXP (PATTERN (jmp), 0, 1), 0), 0);
|
!JUMP_TABLE_DATA_P (jmp);
|
!JUMP_TABLE_DATA_P (jmp);
|
jmp = NEXT_INSN (jmp))
|
jmp = NEXT_INSN (jmp))
|
continue;
|
continue;
|
|
|
for_each_rtx (&PATTERN (jmp), mt_add_branches, insn);
|
for_each_rtx (&PATTERN (jmp), mt_add_branches, insn);
|
}
|
}
|
|
|
/* Now scan for dependencies. */
|
/* Now scan for dependencies. */
|
for (insn = get_insns ();
|
for (insn = get_insns ();
|
insn && !INSN_P (insn);
|
insn && !INSN_P (insn);
|
insn = NEXT_INSN (insn))
|
insn = NEXT_INSN (insn))
|
continue;
|
continue;
|
|
|
for (;
|
for (;
|
insn;
|
insn;
|
insn = next)
|
insn = next)
|
{
|
{
|
rtx jmp, tmp;
|
rtx jmp, tmp;
|
enum attr_type attr;
|
enum attr_type attr;
|
|
|
gcc_assert (INSN_P (insn) && !INSN_DELETED_P (insn));
|
gcc_assert (INSN_P (insn) && !INSN_DELETED_P (insn));
|
for (next = NEXT_INSN (insn);
|
for (next = NEXT_INSN (insn);
|
next;
|
next;
|
next = NEXT_INSN (next))
|
next = NEXT_INSN (next))
|
{
|
{
|
if (!INSN_P (next))
|
if (!INSN_P (next))
|
continue;
|
continue;
|
if (GET_CODE (PATTERN (next)) != USE)
|
if (GET_CODE (PATTERN (next)) != USE)
|
break;
|
break;
|
}
|
}
|
|
|
jmp = insn;
|
jmp = insn;
|
if (GET_CODE (PATTERN (insn)) == SEQUENCE)
|
if (GET_CODE (PATTERN (insn)) == SEQUENCE)
|
jmp = XVECEXP (PATTERN (insn), 0, 0);
|
jmp = XVECEXP (PATTERN (insn), 0, 0);
|
|
|
attr = recog_memoized (jmp) >= 0 ? get_attr_type (jmp) : TYPE_UNKNOWN;
|
attr = recog_memoized (jmp) >= 0 ? get_attr_type (jmp) : TYPE_UNKNOWN;
|
|
|
if (next && attr == TYPE_LOAD)
|
if (next && attr == TYPE_LOAD)
|
{
|
{
|
/* A load. See if NEXT is dependent, and if so insert a
|
/* A load. See if NEXT is dependent, and if so insert a
|
nop. */
|
nop. */
|
|
|
tmp = PATTERN (next);
|
tmp = PATTERN (next);
|
if (GET_CODE (tmp) == SEQUENCE)
|
if (GET_CODE (tmp) == SEQUENCE)
|
tmp = PATTERN (XVECEXP (tmp, 0, 0));
|
tmp = PATTERN (XVECEXP (tmp, 0, 0));
|
note_stores (PATTERN (insn), insn_dependent_p_1, &tmp);
|
note_stores (PATTERN (insn), insn_dependent_p_1, &tmp);
|
if (!tmp)
|
if (!tmp)
|
emit_insn_after (gen_nop (), insn);
|
emit_insn_after (gen_nop (), insn);
|
}
|
}
|
|
|
if (attr == TYPE_CALL)
|
if (attr == TYPE_CALL)
|
{
|
{
|
/* A call. Make sure we're not dependent on either of the
|
/* A call. Make sure we're not dependent on either of the
|
previous two dynamic instructions. */
|
previous two dynamic instructions. */
|
int nops = 0;
|
int nops = 0;
|
int count;
|
int count;
|
rtx prev = insn;
|
rtx prev = insn;
|
rtx rescan = NULL_RTX;
|
rtx rescan = NULL_RTX;
|
|
|
for (count = 2; count && !nops;)
|
for (count = 2; count && !nops;)
|
{
|
{
|
int type;
|
int type;
|
|
|
prev = PREV_INSN (prev);
|
prev = PREV_INSN (prev);
|
if (!prev)
|
if (!prev)
|
{
|
{
|
/* If we reach the start of the function, we must
|
/* If we reach the start of the function, we must
|
presume the caller set the address in the delay
|
presume the caller set the address in the delay
|
slot of the call instruction. */
|
slot of the call instruction. */
|
nops = count;
|
nops = count;
|
break;
|
break;
|
}
|
}
|
|
|
if (BARRIER_P (prev))
|
if (BARRIER_P (prev))
|
break;
|
break;
|
if (LABEL_P (prev))
|
if (LABEL_P (prev))
|
{
|
{
|
/* Look at branches to this label. */
|
/* Look at branches to this label. */
|
label_info *label;
|
label_info *label;
|
branch_info *branch;
|
branch_info *branch;
|
|
|
for (label = mt_labels;
|
for (label = mt_labels;
|
label;
|
label;
|
label = label->next)
|
label = label->next)
|
if (label->label == prev)
|
if (label->label == prev)
|
{
|
{
|
for (branch = label->branches;
|
for (branch = label->branches;
|
branch;
|
branch;
|
branch = branch->next)
|
branch = branch->next)
|
{
|
{
|
tmp = mt_check_delay_slot (branch->insn, jmp);
|
tmp = mt_check_delay_slot (branch->insn, jmp);
|
|
|
if (tmp == branch->insn)
|
if (tmp == branch->insn)
|
{
|
{
|
nops = count;
|
nops = count;
|
break;
|
break;
|
}
|
}
|
|
|
if (tmp && branch->insn == next)
|
if (tmp && branch->insn == next)
|
rescan = tmp;
|
rescan = tmp;
|
}
|
}
|
break;
|
break;
|
}
|
}
|
continue;
|
continue;
|
}
|
}
|
if (!INSN_P (prev) || GET_CODE (PATTERN (prev)) == USE)
|
if (!INSN_P (prev) || GET_CODE (PATTERN (prev)) == USE)
|
continue;
|
continue;
|
|
|
if (GET_CODE (PATTERN (prev)) == SEQUENCE)
|
if (GET_CODE (PATTERN (prev)) == SEQUENCE)
|
{
|
{
|
/* Look at the delay slot. */
|
/* Look at the delay slot. */
|
tmp = mt_check_delay_slot (prev, jmp);
|
tmp = mt_check_delay_slot (prev, jmp);
|
if (tmp == prev)
|
if (tmp == prev)
|
nops = count;
|
nops = count;
|
break;
|
break;
|
}
|
}
|
|
|
type = (INSN_CODE (prev) >= 0 ? get_attr_type (prev)
|
type = (INSN_CODE (prev) >= 0 ? get_attr_type (prev)
|
: TYPE_COMPLEX);
|
: TYPE_COMPLEX);
|
if (type == TYPE_CALL || type == TYPE_BRANCH)
|
if (type == TYPE_CALL || type == TYPE_BRANCH)
|
break;
|
break;
|
|
|
if (type == TYPE_LOAD
|
if (type == TYPE_LOAD
|
|| type == TYPE_ARITH
|
|| type == TYPE_ARITH
|
|| type == TYPE_COMPLEX)
|
|| type == TYPE_COMPLEX)
|
{
|
{
|
tmp = PATTERN (jmp);
|
tmp = PATTERN (jmp);
|
note_stores (PATTERN (prev), insn_dependent_p_1, &tmp);
|
note_stores (PATTERN (prev), insn_dependent_p_1, &tmp);
|
if (!tmp)
|
if (!tmp)
|
{
|
{
|
nops = count;
|
nops = count;
|
break;
|
break;
|
}
|
}
|
}
|
}
|
|
|
if (INSN_CODE (prev) >= 0)
|
if (INSN_CODE (prev) >= 0)
|
count--;
|
count--;
|
}
|
}
|
|
|
if (rescan)
|
if (rescan)
|
for (next = NEXT_INSN (rescan);
|
for (next = NEXT_INSN (rescan);
|
next && !INSN_P (next);
|
next && !INSN_P (next);
|
next = NEXT_INSN (next))
|
next = NEXT_INSN (next))
|
continue;
|
continue;
|
while (nops--)
|
while (nops--)
|
emit_insn_before (gen_nop (), insn);
|
emit_insn_before (gen_nop (), insn);
|
}
|
}
|
}
|
}
|
|
|
/* Free the data structures. */
|
/* Free the data structures. */
|
while (mt_labels)
|
while (mt_labels)
|
{
|
{
|
label_info *label = mt_labels;
|
label_info *label = mt_labels;
|
branch_info *branch, *next;
|
branch_info *branch, *next;
|
|
|
mt_labels = label->next;
|
mt_labels = label->next;
|
for (branch = label->branches; branch; branch = next)
|
for (branch = label->branches; branch; branch = next)
|
{
|
{
|
next = branch->next;
|
next = branch->next;
|
free (branch);
|
free (branch);
|
}
|
}
|
free (label);
|
free (label);
|
}
|
}
|
}
|
}
|
|
|
/* Fixup the looping instructions, do delayed branch scheduling, fixup
|
/* Fixup the looping instructions, do delayed branch scheduling, fixup
|
scheduling hazards. */
|
scheduling hazards. */
|
|
|
static void
|
static void
|
mt_machine_reorg (void)
|
mt_machine_reorg (void)
|
{
|
{
|
if (cfun->machine->has_loops && TARGET_MS2)
|
if (cfun->machine->has_loops && TARGET_MS2)
|
mt_reorg_loops (dump_file);
|
mt_reorg_loops (dump_file);
|
|
|
if (mt_flag_delayed_branch)
|
if (mt_flag_delayed_branch)
|
dbr_schedule (get_insns ());
|
dbr_schedule (get_insns ());
|
|
|
if (TARGET_MS2)
|
if (TARGET_MS2)
|
{
|
{
|
/* Force all instructions to be split into their final form. */
|
/* Force all instructions to be split into their final form. */
|
split_all_insns_noflow ();
|
split_all_insns_noflow ();
|
mt_reorg_hazard ();
|
mt_reorg_hazard ();
|
}
|
}
|
}
|
}
|
|
|
/* Initialize the GCC target structure. */
|
/* Initialize the GCC target structure. */
|
const struct attribute_spec mt_attribute_table[];
|
const struct attribute_spec mt_attribute_table[];
|
|
|
#undef TARGET_ATTRIBUTE_TABLE
|
#undef TARGET_ATTRIBUTE_TABLE
|
#define TARGET_ATTRIBUTE_TABLE mt_attribute_table
|
#define TARGET_ATTRIBUTE_TABLE mt_attribute_table
|
#undef TARGET_STRUCT_VALUE_RTX
|
#undef TARGET_STRUCT_VALUE_RTX
|
#define TARGET_STRUCT_VALUE_RTX mt_struct_value_rtx
|
#define TARGET_STRUCT_VALUE_RTX mt_struct_value_rtx
|
#undef TARGET_PROMOTE_PROTOTYPES
|
#undef TARGET_PROMOTE_PROTOTYPES
|
#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
|
#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
|
#undef TARGET_PASS_BY_REFERENCE
|
#undef TARGET_PASS_BY_REFERENCE
|
#define TARGET_PASS_BY_REFERENCE mt_pass_by_reference
|
#define TARGET_PASS_BY_REFERENCE mt_pass_by_reference
|
#undef TARGET_MUST_PASS_IN_STACK
|
#undef TARGET_MUST_PASS_IN_STACK
|
#define TARGET_MUST_PASS_IN_STACK mt_pass_in_stack
|
#define TARGET_MUST_PASS_IN_STACK mt_pass_in_stack
|
#undef TARGET_ARG_PARTIAL_BYTES
|
#undef TARGET_ARG_PARTIAL_BYTES
|
#define TARGET_ARG_PARTIAL_BYTES mt_arg_partial_bytes
|
#define TARGET_ARG_PARTIAL_BYTES mt_arg_partial_bytes
|
#undef TARGET_SETUP_INCOMING_VARARGS
|
#undef TARGET_SETUP_INCOMING_VARARGS
|
#define TARGET_SETUP_INCOMING_VARARGS mt_setup_incoming_varargs
|
#define TARGET_SETUP_INCOMING_VARARGS mt_setup_incoming_varargs
|
#undef TARGET_MACHINE_DEPENDENT_REORG
|
#undef TARGET_MACHINE_DEPENDENT_REORG
|
#define TARGET_MACHINE_DEPENDENT_REORG mt_machine_reorg
|
#define TARGET_MACHINE_DEPENDENT_REORG mt_machine_reorg
|
|
|
struct gcc_target targetm = TARGET_INITIALIZER;
|
struct gcc_target targetm = TARGET_INITIALIZER;
|
|
|
#include "gt-mt.h"
|
#include "gt-mt.h"
|
|
|