/* Variable tracking routines for the GNU compiler.
|
/* Variable tracking routines for the GNU compiler.
|
Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
|
Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012
|
Free Software Foundation, Inc.
|
Free Software Foundation, Inc.
|
|
|
This file is part of GCC.
|
This file is part of GCC.
|
|
|
GCC is free software; you can redistribute it and/or modify it
|
GCC is free software; you can redistribute it and/or modify it
|
under the terms of the GNU General Public License as published by
|
under the terms of the GNU General Public License as published by
|
the Free Software Foundation; either version 3, or (at your option)
|
the Free Software Foundation; either version 3, or (at your option)
|
any later version.
|
any later version.
|
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT
|
GCC is distributed in the hope that it will be useful, but WITHOUT
|
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
|
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
|
License for more details.
|
License for more details.
|
|
|
You should have received a copy of the GNU General Public License
|
You should have received a copy of the GNU General Public License
|
along with GCC; see the file COPYING3. If not see
|
along with GCC; see the file COPYING3. If not see
|
<http://www.gnu.org/licenses/>. */
|
<http://www.gnu.org/licenses/>. */
|
|
|
/* This file contains the variable tracking pass. It computes where
|
/* This file contains the variable tracking pass. It computes where
|
variables are located (which registers or where in memory) at each position
|
variables are located (which registers or where in memory) at each position
|
in instruction stream and emits notes describing the locations.
|
in instruction stream and emits notes describing the locations.
|
Debug information (DWARF2 location lists) is finally generated from
|
Debug information (DWARF2 location lists) is finally generated from
|
these notes.
|
these notes.
|
With this debug information, it is possible to show variables
|
With this debug information, it is possible to show variables
|
even when debugging optimized code.
|
even when debugging optimized code.
|
|
|
How does the variable tracking pass work?
|
How does the variable tracking pass work?
|
|
|
First, it scans RTL code for uses, stores and clobbers (register/memory
|
First, it scans RTL code for uses, stores and clobbers (register/memory
|
references in instructions), for call insns and for stack adjustments
|
references in instructions), for call insns and for stack adjustments
|
separately for each basic block and saves them to an array of micro
|
separately for each basic block and saves them to an array of micro
|
operations.
|
operations.
|
The micro operations of one instruction are ordered so that
|
The micro operations of one instruction are ordered so that
|
pre-modifying stack adjustment < use < use with no var < call insn <
|
pre-modifying stack adjustment < use < use with no var < call insn <
|
< clobber < set < post-modifying stack adjustment
|
< clobber < set < post-modifying stack adjustment
|
|
|
Then, a forward dataflow analysis is performed to find out how locations
|
Then, a forward dataflow analysis is performed to find out how locations
|
of variables change through code and to propagate the variable locations
|
of variables change through code and to propagate the variable locations
|
along control flow graph.
|
along control flow graph.
|
The IN set for basic block BB is computed as a union of OUT sets of BB's
|
The IN set for basic block BB is computed as a union of OUT sets of BB's
|
predecessors, the OUT set for BB is copied from the IN set for BB and
|
predecessors, the OUT set for BB is copied from the IN set for BB and
|
is changed according to micro operations in BB.
|
is changed according to micro operations in BB.
|
|
|
The IN and OUT sets for basic blocks consist of a current stack adjustment
|
The IN and OUT sets for basic blocks consist of a current stack adjustment
|
(used for adjusting offset of variables addressed using stack pointer),
|
(used for adjusting offset of variables addressed using stack pointer),
|
the table of structures describing the locations of parts of a variable
|
the table of structures describing the locations of parts of a variable
|
and for each physical register a linked list for each physical register.
|
and for each physical register a linked list for each physical register.
|
The linked list is a list of variable parts stored in the register,
|
The linked list is a list of variable parts stored in the register,
|
i.e. it is a list of triplets (reg, decl, offset) where decl is
|
i.e. it is a list of triplets (reg, decl, offset) where decl is
|
REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
|
REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
|
effective deleting appropriate variable parts when we set or clobber the
|
effective deleting appropriate variable parts when we set or clobber the
|
register.
|
register.
|
|
|
There may be more than one variable part in a register. The linked lists
|
There may be more than one variable part in a register. The linked lists
|
should be pretty short so it is a good data structure here.
|
should be pretty short so it is a good data structure here.
|
For example in the following code, register allocator may assign same
|
For example in the following code, register allocator may assign same
|
register to variables A and B, and both of them are stored in the same
|
register to variables A and B, and both of them are stored in the same
|
register in CODE:
|
register in CODE:
|
|
|
if (cond)
|
if (cond)
|
set A;
|
set A;
|
else
|
else
|
set B;
|
set B;
|
CODE;
|
CODE;
|
if (cond)
|
if (cond)
|
use A;
|
use A;
|
else
|
else
|
use B;
|
use B;
|
|
|
Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
|
Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
|
are emitted to appropriate positions in RTL code. Each such a note describes
|
are emitted to appropriate positions in RTL code. Each such a note describes
|
the location of one variable at the point in instruction stream where the
|
the location of one variable at the point in instruction stream where the
|
note is. There is no need to emit a note for each variable before each
|
note is. There is no need to emit a note for each variable before each
|
instruction, we only emit these notes where the location of variable changes
|
instruction, we only emit these notes where the location of variable changes
|
(this means that we also emit notes for changes between the OUT set of the
|
(this means that we also emit notes for changes between the OUT set of the
|
previous block and the IN set of the current block).
|
previous block and the IN set of the current block).
|
|
|
The notes consist of two parts:
|
The notes consist of two parts:
|
1. the declaration (from REG_EXPR or MEM_EXPR)
|
1. the declaration (from REG_EXPR or MEM_EXPR)
|
2. the location of a variable - it is either a simple register/memory
|
2. the location of a variable - it is either a simple register/memory
|
reference (for simple variables, for example int),
|
reference (for simple variables, for example int),
|
or a parallel of register/memory references (for a large variables
|
or a parallel of register/memory references (for a large variables
|
which consist of several parts, for example long long).
|
which consist of several parts, for example long long).
|
|
|
*/
|
*/
|
|
|
#include "config.h"
|
#include "config.h"
|
#include "system.h"
|
#include "system.h"
|
#include "coretypes.h"
|
#include "coretypes.h"
|
#include "tm.h"
|
#include "tm.h"
|
#include "rtl.h"
|
#include "rtl.h"
|
#include "tree.h"
|
#include "tree.h"
|
#include "tm_p.h"
|
#include "tm_p.h"
|
#include "hard-reg-set.h"
|
#include "hard-reg-set.h"
|
#include "basic-block.h"
|
#include "basic-block.h"
|
#include "flags.h"
|
#include "flags.h"
|
#include "output.h"
|
#include "output.h"
|
#include "insn-config.h"
|
#include "insn-config.h"
|
#include "reload.h"
|
#include "reload.h"
|
#include "sbitmap.h"
|
#include "sbitmap.h"
|
#include "alloc-pool.h"
|
#include "alloc-pool.h"
|
#include "fibheap.h"
|
#include "fibheap.h"
|
#include "hashtab.h"
|
#include "hashtab.h"
|
#include "regs.h"
|
#include "regs.h"
|
#include "expr.h"
|
#include "expr.h"
|
#include "timevar.h"
|
#include "timevar.h"
|
#include "tree-pass.h"
|
#include "tree-pass.h"
|
#include "tree-flow.h"
|
#include "tree-flow.h"
|
#include "cselib.h"
|
#include "cselib.h"
|
#include "target.h"
|
#include "target.h"
|
#include "params.h"
|
#include "params.h"
|
#include "diagnostic.h"
|
#include "diagnostic.h"
|
#include "tree-pretty-print.h"
|
#include "tree-pretty-print.h"
|
#include "pointer-set.h"
|
#include "pointer-set.h"
|
#include "recog.h"
|
#include "recog.h"
|
#include "tm_p.h"
|
#include "tm_p.h"
|
|
|
/* var-tracking.c assumes that tree code with the same value as VALUE rtx code
|
/* var-tracking.c assumes that tree code with the same value as VALUE rtx code
|
has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
|
has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
|
Currently the value is the same as IDENTIFIER_NODE, which has such
|
Currently the value is the same as IDENTIFIER_NODE, which has such
|
a property. If this compile time assertion ever fails, make sure that
|
a property. If this compile time assertion ever fails, make sure that
|
the new tree code that equals (int) VALUE has the same property. */
|
the new tree code that equals (int) VALUE has the same property. */
|
extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
|
extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
|
|
|
/* Type of micro operation. */
|
/* Type of micro operation. */
|
enum micro_operation_type
|
enum micro_operation_type
|
{
|
{
|
MO_USE, /* Use location (REG or MEM). */
|
MO_USE, /* Use location (REG or MEM). */
|
MO_USE_NO_VAR,/* Use location which is not associated with a variable
|
MO_USE_NO_VAR,/* Use location which is not associated with a variable
|
or the variable is not trackable. */
|
or the variable is not trackable. */
|
MO_VAL_USE, /* Use location which is associated with a value. */
|
MO_VAL_USE, /* Use location which is associated with a value. */
|
MO_VAL_LOC, /* Use location which appears in a debug insn. */
|
MO_VAL_LOC, /* Use location which appears in a debug insn. */
|
MO_VAL_SET, /* Set location associated with a value. */
|
MO_VAL_SET, /* Set location associated with a value. */
|
MO_SET, /* Set location. */
|
MO_SET, /* Set location. */
|
MO_COPY, /* Copy the same portion of a variable from one
|
MO_COPY, /* Copy the same portion of a variable from one
|
location to another. */
|
location to another. */
|
MO_CLOBBER, /* Clobber location. */
|
MO_CLOBBER, /* Clobber location. */
|
MO_CALL, /* Call insn. */
|
MO_CALL, /* Call insn. */
|
MO_ADJUST /* Adjust stack pointer. */
|
MO_ADJUST /* Adjust stack pointer. */
|
|
|
};
|
};
|
|
|
static const char * const ATTRIBUTE_UNUSED
|
static const char * const ATTRIBUTE_UNUSED
|
micro_operation_type_name[] = {
|
micro_operation_type_name[] = {
|
"MO_USE",
|
"MO_USE",
|
"MO_USE_NO_VAR",
|
"MO_USE_NO_VAR",
|
"MO_VAL_USE",
|
"MO_VAL_USE",
|
"MO_VAL_LOC",
|
"MO_VAL_LOC",
|
"MO_VAL_SET",
|
"MO_VAL_SET",
|
"MO_SET",
|
"MO_SET",
|
"MO_COPY",
|
"MO_COPY",
|
"MO_CLOBBER",
|
"MO_CLOBBER",
|
"MO_CALL",
|
"MO_CALL",
|
"MO_ADJUST"
|
"MO_ADJUST"
|
};
|
};
|
|
|
/* Where shall the note be emitted? BEFORE or AFTER the instruction.
|
/* Where shall the note be emitted? BEFORE or AFTER the instruction.
|
Notes emitted as AFTER_CALL are to take effect during the call,
|
Notes emitted as AFTER_CALL are to take effect during the call,
|
rather than after the call. */
|
rather than after the call. */
|
enum emit_note_where
|
enum emit_note_where
|
{
|
{
|
EMIT_NOTE_BEFORE_INSN,
|
EMIT_NOTE_BEFORE_INSN,
|
EMIT_NOTE_AFTER_INSN,
|
EMIT_NOTE_AFTER_INSN,
|
EMIT_NOTE_AFTER_CALL_INSN
|
EMIT_NOTE_AFTER_CALL_INSN
|
};
|
};
|
|
|
/* Structure holding information about micro operation. */
|
/* Structure holding information about micro operation. */
|
typedef struct micro_operation_def
|
typedef struct micro_operation_def
|
{
|
{
|
/* Type of micro operation. */
|
/* Type of micro operation. */
|
enum micro_operation_type type;
|
enum micro_operation_type type;
|
|
|
/* The instruction which the micro operation is in, for MO_USE,
|
/* The instruction which the micro operation is in, for MO_USE,
|
MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
|
MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
|
instruction or note in the original flow (before any var-tracking
|
instruction or note in the original flow (before any var-tracking
|
notes are inserted, to simplify emission of notes), for MO_SET
|
notes are inserted, to simplify emission of notes), for MO_SET
|
and MO_CLOBBER. */
|
and MO_CLOBBER. */
|
rtx insn;
|
rtx insn;
|
|
|
union {
|
union {
|
/* Location. For MO_SET and MO_COPY, this is the SET that
|
/* Location. For MO_SET and MO_COPY, this is the SET that
|
performs the assignment, if known, otherwise it is the target
|
performs the assignment, if known, otherwise it is the target
|
of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
|
of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
|
CONCAT of the VALUE and the LOC associated with it. For
|
CONCAT of the VALUE and the LOC associated with it. For
|
MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
|
MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
|
associated with it. */
|
associated with it. */
|
rtx loc;
|
rtx loc;
|
|
|
/* Stack adjustment. */
|
/* Stack adjustment. */
|
HOST_WIDE_INT adjust;
|
HOST_WIDE_INT adjust;
|
} u;
|
} u;
|
} micro_operation;
|
} micro_operation;
|
|
|
DEF_VEC_O(micro_operation);
|
DEF_VEC_O(micro_operation);
|
DEF_VEC_ALLOC_O(micro_operation,heap);
|
DEF_VEC_ALLOC_O(micro_operation,heap);
|
|
|
/* A declaration of a variable, or an RTL value being handled like a
|
/* A declaration of a variable, or an RTL value being handled like a
|
declaration. */
|
declaration. */
|
typedef void *decl_or_value;
|
typedef void *decl_or_value;
|
|
|
/* Structure for passing some other parameters to function
|
/* Structure for passing some other parameters to function
|
emit_note_insn_var_location. */
|
emit_note_insn_var_location. */
|
typedef struct emit_note_data_def
|
typedef struct emit_note_data_def
|
{
|
{
|
/* The instruction which the note will be emitted before/after. */
|
/* The instruction which the note will be emitted before/after. */
|
rtx insn;
|
rtx insn;
|
|
|
/* Where the note will be emitted (before/after insn)? */
|
/* Where the note will be emitted (before/after insn)? */
|
enum emit_note_where where;
|
enum emit_note_where where;
|
|
|
/* The variables and values active at this point. */
|
/* The variables and values active at this point. */
|
htab_t vars;
|
htab_t vars;
|
} emit_note_data;
|
} emit_note_data;
|
|
|
/* Description of location of a part of a variable. The content of a physical
|
/* Description of location of a part of a variable. The content of a physical
|
register is described by a chain of these structures.
|
register is described by a chain of these structures.
|
The chains are pretty short (usually 1 or 2 elements) and thus
|
The chains are pretty short (usually 1 or 2 elements) and thus
|
chain is the best data structure. */
|
chain is the best data structure. */
|
typedef struct attrs_def
|
typedef struct attrs_def
|
{
|
{
|
/* Pointer to next member of the list. */
|
/* Pointer to next member of the list. */
|
struct attrs_def *next;
|
struct attrs_def *next;
|
|
|
/* The rtx of register. */
|
/* The rtx of register. */
|
rtx loc;
|
rtx loc;
|
|
|
/* The declaration corresponding to LOC. */
|
/* The declaration corresponding to LOC. */
|
decl_or_value dv;
|
decl_or_value dv;
|
|
|
/* Offset from start of DECL. */
|
/* Offset from start of DECL. */
|
HOST_WIDE_INT offset;
|
HOST_WIDE_INT offset;
|
} *attrs;
|
} *attrs;
|
|
|
/* Structure holding a refcounted hash table. If refcount > 1,
|
/* Structure holding a refcounted hash table. If refcount > 1,
|
it must be first unshared before modified. */
|
it must be first unshared before modified. */
|
typedef struct shared_hash_def
|
typedef struct shared_hash_def
|
{
|
{
|
/* Reference count. */
|
/* Reference count. */
|
int refcount;
|
int refcount;
|
|
|
/* Actual hash table. */
|
/* Actual hash table. */
|
htab_t htab;
|
htab_t htab;
|
} *shared_hash;
|
} *shared_hash;
|
|
|
/* Structure holding the IN or OUT set for a basic block. */
|
/* Structure holding the IN or OUT set for a basic block. */
|
typedef struct dataflow_set_def
|
typedef struct dataflow_set_def
|
{
|
{
|
/* Adjustment of stack offset. */
|
/* Adjustment of stack offset. */
|
HOST_WIDE_INT stack_adjust;
|
HOST_WIDE_INT stack_adjust;
|
|
|
/* Attributes for registers (lists of attrs). */
|
/* Attributes for registers (lists of attrs). */
|
attrs regs[FIRST_PSEUDO_REGISTER];
|
attrs regs[FIRST_PSEUDO_REGISTER];
|
|
|
/* Variable locations. */
|
/* Variable locations. */
|
shared_hash vars;
|
shared_hash vars;
|
|
|
/* Vars that is being traversed. */
|
/* Vars that is being traversed. */
|
shared_hash traversed_vars;
|
shared_hash traversed_vars;
|
} dataflow_set;
|
} dataflow_set;
|
|
|
/* The structure (one for each basic block) containing the information
|
/* The structure (one for each basic block) containing the information
|
needed for variable tracking. */
|
needed for variable tracking. */
|
typedef struct variable_tracking_info_def
|
typedef struct variable_tracking_info_def
|
{
|
{
|
/* The vector of micro operations. */
|
/* The vector of micro operations. */
|
VEC(micro_operation, heap) *mos;
|
VEC(micro_operation, heap) *mos;
|
|
|
/* The IN and OUT set for dataflow analysis. */
|
/* The IN and OUT set for dataflow analysis. */
|
dataflow_set in;
|
dataflow_set in;
|
dataflow_set out;
|
dataflow_set out;
|
|
|
/* The permanent-in dataflow set for this block. This is used to
|
/* The permanent-in dataflow set for this block. This is used to
|
hold values for which we had to compute entry values. ??? This
|
hold values for which we had to compute entry values. ??? This
|
should probably be dynamically allocated, to avoid using more
|
should probably be dynamically allocated, to avoid using more
|
memory in non-debug builds. */
|
memory in non-debug builds. */
|
dataflow_set *permp;
|
dataflow_set *permp;
|
|
|
/* Has the block been visited in DFS? */
|
/* Has the block been visited in DFS? */
|
bool visited;
|
bool visited;
|
|
|
/* Has the block been flooded in VTA? */
|
/* Has the block been flooded in VTA? */
|
bool flooded;
|
bool flooded;
|
|
|
} *variable_tracking_info;
|
} *variable_tracking_info;
|
|
|
/* Structure for chaining the locations. */
|
/* Structure for chaining the locations. */
|
typedef struct location_chain_def
|
typedef struct location_chain_def
|
{
|
{
|
/* Next element in the chain. */
|
/* Next element in the chain. */
|
struct location_chain_def *next;
|
struct location_chain_def *next;
|
|
|
/* The location (REG, MEM or VALUE). */
|
/* The location (REG, MEM or VALUE). */
|
rtx loc;
|
rtx loc;
|
|
|
/* The "value" stored in this location. */
|
/* The "value" stored in this location. */
|
rtx set_src;
|
rtx set_src;
|
|
|
/* Initialized? */
|
/* Initialized? */
|
enum var_init_status init;
|
enum var_init_status init;
|
} *location_chain;
|
} *location_chain;
|
|
|
/* A vector of loc_exp_dep holds the active dependencies of a one-part
|
/* A vector of loc_exp_dep holds the active dependencies of a one-part
|
DV on VALUEs, i.e., the VALUEs expanded so as to form the current
|
DV on VALUEs, i.e., the VALUEs expanded so as to form the current
|
location of DV. Each entry is also part of VALUE' s linked-list of
|
location of DV. Each entry is also part of VALUE' s linked-list of
|
backlinks back to DV. */
|
backlinks back to DV. */
|
typedef struct loc_exp_dep_s
|
typedef struct loc_exp_dep_s
|
{
|
{
|
/* The dependent DV. */
|
/* The dependent DV. */
|
decl_or_value dv;
|
decl_or_value dv;
|
/* The dependency VALUE or DECL_DEBUG. */
|
/* The dependency VALUE or DECL_DEBUG. */
|
rtx value;
|
rtx value;
|
/* The next entry in VALUE's backlinks list. */
|
/* The next entry in VALUE's backlinks list. */
|
struct loc_exp_dep_s *next;
|
struct loc_exp_dep_s *next;
|
/* A pointer to the pointer to this entry (head or prev's next) in
|
/* A pointer to the pointer to this entry (head or prev's next) in
|
the doubly-linked list. */
|
the doubly-linked list. */
|
struct loc_exp_dep_s **pprev;
|
struct loc_exp_dep_s **pprev;
|
} loc_exp_dep;
|
} loc_exp_dep;
|
|
|
DEF_VEC_O (loc_exp_dep);
|
DEF_VEC_O (loc_exp_dep);
|
|
|
/* This data structure is allocated for one-part variables at the time
|
/* This data structure is allocated for one-part variables at the time
|
of emitting notes. */
|
of emitting notes. */
|
struct onepart_aux
|
struct onepart_aux
|
{
|
{
|
/* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
|
/* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
|
computation used the expansion of this variable, and that ought
|
computation used the expansion of this variable, and that ought
|
to be notified should this variable change. If the DV's cur_loc
|
to be notified should this variable change. If the DV's cur_loc
|
expanded to NULL, all components of the loc list are regarded as
|
expanded to NULL, all components of the loc list are regarded as
|
active, so that any changes in them give us a chance to get a
|
active, so that any changes in them give us a chance to get a
|
location. Otherwise, only components of the loc that expanded to
|
location. Otherwise, only components of the loc that expanded to
|
non-NULL are regarded as active dependencies. */
|
non-NULL are regarded as active dependencies. */
|
loc_exp_dep *backlinks;
|
loc_exp_dep *backlinks;
|
/* This holds the LOC that was expanded into cur_loc. We need only
|
/* This holds the LOC that was expanded into cur_loc. We need only
|
mark a one-part variable as changed if the FROM loc is removed,
|
mark a one-part variable as changed if the FROM loc is removed,
|
or if it has no known location and a loc is added, or if it gets
|
or if it has no known location and a loc is added, or if it gets
|
a change notification from any of its active dependencies. */
|
a change notification from any of its active dependencies. */
|
rtx from;
|
rtx from;
|
/* The depth of the cur_loc expression. */
|
/* The depth of the cur_loc expression. */
|
int depth;
|
int depth;
|
/* Dependencies actively used when expand FROM into cur_loc. */
|
/* Dependencies actively used when expand FROM into cur_loc. */
|
VEC (loc_exp_dep, none) deps;
|
VEC (loc_exp_dep, none) deps;
|
};
|
};
|
|
|
/* Structure describing one part of variable. */
|
/* Structure describing one part of variable. */
|
typedef struct variable_part_def
|
typedef struct variable_part_def
|
{
|
{
|
/* Chain of locations of the part. */
|
/* Chain of locations of the part. */
|
location_chain loc_chain;
|
location_chain loc_chain;
|
|
|
/* Location which was last emitted to location list. */
|
/* Location which was last emitted to location list. */
|
rtx cur_loc;
|
rtx cur_loc;
|
|
|
union variable_aux
|
union variable_aux
|
{
|
{
|
/* The offset in the variable, if !var->onepart. */
|
/* The offset in the variable, if !var->onepart. */
|
HOST_WIDE_INT offset;
|
HOST_WIDE_INT offset;
|
|
|
/* Pointer to auxiliary data, if var->onepart and emit_notes. */
|
/* Pointer to auxiliary data, if var->onepart and emit_notes. */
|
struct onepart_aux *onepaux;
|
struct onepart_aux *onepaux;
|
} aux;
|
} aux;
|
} variable_part;
|
} variable_part;
|
|
|
/* Maximum number of location parts. */
|
/* Maximum number of location parts. */
|
#define MAX_VAR_PARTS 16
|
#define MAX_VAR_PARTS 16
|
|
|
/* Enumeration type used to discriminate various types of one-part
|
/* Enumeration type used to discriminate various types of one-part
|
variables. */
|
variables. */
|
typedef enum onepart_enum
|
typedef enum onepart_enum
|
{
|
{
|
/* Not a one-part variable. */
|
/* Not a one-part variable. */
|
NOT_ONEPART = 0,
|
NOT_ONEPART = 0,
|
/* A one-part DECL that is not a DEBUG_EXPR_DECL. */
|
/* A one-part DECL that is not a DEBUG_EXPR_DECL. */
|
ONEPART_VDECL = 1,
|
ONEPART_VDECL = 1,
|
/* A DEBUG_EXPR_DECL. */
|
/* A DEBUG_EXPR_DECL. */
|
ONEPART_DEXPR = 2,
|
ONEPART_DEXPR = 2,
|
/* A VALUE. */
|
/* A VALUE. */
|
ONEPART_VALUE = 3
|
ONEPART_VALUE = 3
|
} onepart_enum_t;
|
} onepart_enum_t;
|
|
|
/* Structure describing where the variable is located. */
|
/* Structure describing where the variable is located. */
|
typedef struct variable_def
|
typedef struct variable_def
|
{
|
{
|
/* The declaration of the variable, or an RTL value being handled
|
/* The declaration of the variable, or an RTL value being handled
|
like a declaration. */
|
like a declaration. */
|
decl_or_value dv;
|
decl_or_value dv;
|
|
|
/* Reference count. */
|
/* Reference count. */
|
int refcount;
|
int refcount;
|
|
|
/* Number of variable parts. */
|
/* Number of variable parts. */
|
char n_var_parts;
|
char n_var_parts;
|
|
|
/* What type of DV this is, according to enum onepart_enum. */
|
/* What type of DV this is, according to enum onepart_enum. */
|
ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
|
ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
|
|
|
/* True if this variable_def struct is currently in the
|
/* True if this variable_def struct is currently in the
|
changed_variables hash table. */
|
changed_variables hash table. */
|
bool in_changed_variables;
|
bool in_changed_variables;
|
|
|
/* The variable parts. */
|
/* The variable parts. */
|
variable_part var_part[1];
|
variable_part var_part[1];
|
} *variable;
|
} *variable;
|
typedef const struct variable_def *const_variable;
|
typedef const struct variable_def *const_variable;
|
|
|
/* Pointer to the BB's information specific to variable tracking pass. */
|
/* Pointer to the BB's information specific to variable tracking pass. */
|
#define VTI(BB) ((variable_tracking_info) (BB)->aux)
|
#define VTI(BB) ((variable_tracking_info) (BB)->aux)
|
|
|
/* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
|
/* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
|
#define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
|
#define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
|
|
|
#if ENABLE_CHECKING && (GCC_VERSION >= 2007)
|
#if ENABLE_CHECKING && (GCC_VERSION >= 2007)
|
|
|
/* Access VAR's Ith part's offset, checking that it's not a one-part
|
/* Access VAR's Ith part's offset, checking that it's not a one-part
|
variable. */
|
variable. */
|
#define VAR_PART_OFFSET(var, i) __extension__ \
|
#define VAR_PART_OFFSET(var, i) __extension__ \
|
(*({ variable const __v = (var); \
|
(*({ variable const __v = (var); \
|
gcc_checking_assert (!__v->onepart); \
|
gcc_checking_assert (!__v->onepart); \
|
&__v->var_part[(i)].aux.offset; }))
|
&__v->var_part[(i)].aux.offset; }))
|
|
|
/* Access VAR's one-part auxiliary data, checking that it is a
|
/* Access VAR's one-part auxiliary data, checking that it is a
|
one-part variable. */
|
one-part variable. */
|
#define VAR_LOC_1PAUX(var) __extension__ \
|
#define VAR_LOC_1PAUX(var) __extension__ \
|
(*({ variable const __v = (var); \
|
(*({ variable const __v = (var); \
|
gcc_checking_assert (__v->onepart); \
|
gcc_checking_assert (__v->onepart); \
|
&__v->var_part[0].aux.onepaux; }))
|
&__v->var_part[0].aux.onepaux; }))
|
|
|
#else
|
#else
|
#define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
|
#define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
|
#define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
|
#define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
|
#endif
|
#endif
|
|
|
/* These are accessor macros for the one-part auxiliary data. When
|
/* These are accessor macros for the one-part auxiliary data. When
|
convenient for users, they're guarded by tests that the data was
|
convenient for users, they're guarded by tests that the data was
|
allocated. */
|
allocated. */
|
#define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
|
#define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
|
? VAR_LOC_1PAUX (var)->backlinks \
|
? VAR_LOC_1PAUX (var)->backlinks \
|
: NULL)
|
: NULL)
|
#define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
|
#define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
|
? &VAR_LOC_1PAUX (var)->backlinks \
|
? &VAR_LOC_1PAUX (var)->backlinks \
|
: NULL)
|
: NULL)
|
#define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
|
#define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
|
#define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
|
#define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
|
#define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
|
#define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
|
? &VAR_LOC_1PAUX (var)->deps \
|
? &VAR_LOC_1PAUX (var)->deps \
|
: NULL)
|
: NULL)
|
|
|
/* Alloc pool for struct attrs_def. */
|
/* Alloc pool for struct attrs_def. */
|
static alloc_pool attrs_pool;
|
static alloc_pool attrs_pool;
|
|
|
/* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
|
/* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
|
static alloc_pool var_pool;
|
static alloc_pool var_pool;
|
|
|
/* Alloc pool for struct variable_def with a single var_part entry. */
|
/* Alloc pool for struct variable_def with a single var_part entry. */
|
static alloc_pool valvar_pool;
|
static alloc_pool valvar_pool;
|
|
|
/* Alloc pool for struct location_chain_def. */
|
/* Alloc pool for struct location_chain_def. */
|
static alloc_pool loc_chain_pool;
|
static alloc_pool loc_chain_pool;
|
|
|
/* Alloc pool for struct shared_hash_def. */
|
/* Alloc pool for struct shared_hash_def. */
|
static alloc_pool shared_hash_pool;
|
static alloc_pool shared_hash_pool;
|
|
|
/* Changed variables, notes will be emitted for them. */
|
/* Changed variables, notes will be emitted for them. */
|
static htab_t changed_variables;
|
static htab_t changed_variables;
|
|
|
/* Shall notes be emitted? */
|
/* Shall notes be emitted? */
|
static bool emit_notes;
|
static bool emit_notes;
|
|
|
/* Values whose dynamic location lists have gone empty, but whose
|
/* Values whose dynamic location lists have gone empty, but whose
|
cselib location lists are still usable. Use this to hold the
|
cselib location lists are still usable. Use this to hold the
|
current location, the backlinks, etc, during emit_notes. */
|
current location, the backlinks, etc, during emit_notes. */
|
static htab_t dropped_values;
|
static htab_t dropped_values;
|
|
|
/* Empty shared hashtable. */
|
/* Empty shared hashtable. */
|
static shared_hash empty_shared_hash;
|
static shared_hash empty_shared_hash;
|
|
|
/* Scratch register bitmap used by cselib_expand_value_rtx. */
|
/* Scratch register bitmap used by cselib_expand_value_rtx. */
|
static bitmap scratch_regs = NULL;
|
static bitmap scratch_regs = NULL;
|
|
|
#ifdef HAVE_window_save
|
#ifdef HAVE_window_save
|
typedef struct GTY(()) parm_reg {
|
typedef struct GTY(()) parm_reg {
|
rtx outgoing;
|
rtx outgoing;
|
rtx incoming;
|
rtx incoming;
|
} parm_reg_t;
|
} parm_reg_t;
|
|
|
DEF_VEC_O(parm_reg_t);
|
DEF_VEC_O(parm_reg_t);
|
DEF_VEC_ALLOC_O(parm_reg_t, gc);
|
DEF_VEC_ALLOC_O(parm_reg_t, gc);
|
|
|
/* Vector of windowed parameter registers, if any. */
|
/* Vector of windowed parameter registers, if any. */
|
static VEC(parm_reg_t, gc) *windowed_parm_regs = NULL;
|
static VEC(parm_reg_t, gc) *windowed_parm_regs = NULL;
|
#endif
|
#endif
|
|
|
/* Variable used to tell whether cselib_process_insn called our hook. */
|
/* Variable used to tell whether cselib_process_insn called our hook. */
|
static bool cselib_hook_called;
|
static bool cselib_hook_called;
|
|
|
/* Local function prototypes. */
|
/* Local function prototypes. */
|
static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
|
static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
|
HOST_WIDE_INT *);
|
HOST_WIDE_INT *);
|
static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
|
static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
|
HOST_WIDE_INT *);
|
HOST_WIDE_INT *);
|
static bool vt_stack_adjustments (void);
|
static bool vt_stack_adjustments (void);
|
static hashval_t variable_htab_hash (const void *);
|
static hashval_t variable_htab_hash (const void *);
|
static int variable_htab_eq (const void *, const void *);
|
static int variable_htab_eq (const void *, const void *);
|
static void variable_htab_free (void *);
|
static void variable_htab_free (void *);
|
|
|
static void init_attrs_list_set (attrs *);
|
static void init_attrs_list_set (attrs *);
|
static void attrs_list_clear (attrs *);
|
static void attrs_list_clear (attrs *);
|
static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
|
static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
|
static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
|
static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
|
static void attrs_list_copy (attrs *, attrs);
|
static void attrs_list_copy (attrs *, attrs);
|
static void attrs_list_union (attrs *, attrs);
|
static void attrs_list_union (attrs *, attrs);
|
|
|
static void **unshare_variable (dataflow_set *set, void **slot, variable var,
|
static void **unshare_variable (dataflow_set *set, void **slot, variable var,
|
enum var_init_status);
|
enum var_init_status);
|
static void vars_copy (htab_t, htab_t);
|
static void vars_copy (htab_t, htab_t);
|
static tree var_debug_decl (tree);
|
static tree var_debug_decl (tree);
|
static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
|
static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
|
static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
|
static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
|
enum var_init_status, rtx);
|
enum var_init_status, rtx);
|
static void var_reg_delete (dataflow_set *, rtx, bool);
|
static void var_reg_delete (dataflow_set *, rtx, bool);
|
static void var_regno_delete (dataflow_set *, int);
|
static void var_regno_delete (dataflow_set *, int);
|
static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
|
static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
|
static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
|
static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
|
enum var_init_status, rtx);
|
enum var_init_status, rtx);
|
static void var_mem_delete (dataflow_set *, rtx, bool);
|
static void var_mem_delete (dataflow_set *, rtx, bool);
|
|
|
static void dataflow_set_init (dataflow_set *);
|
static void dataflow_set_init (dataflow_set *);
|
static void dataflow_set_clear (dataflow_set *);
|
static void dataflow_set_clear (dataflow_set *);
|
static void dataflow_set_copy (dataflow_set *, dataflow_set *);
|
static void dataflow_set_copy (dataflow_set *, dataflow_set *);
|
static int variable_union_info_cmp_pos (const void *, const void *);
|
static int variable_union_info_cmp_pos (const void *, const void *);
|
static void dataflow_set_union (dataflow_set *, dataflow_set *);
|
static void dataflow_set_union (dataflow_set *, dataflow_set *);
|
static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
|
static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
|
static bool canon_value_cmp (rtx, rtx);
|
static bool canon_value_cmp (rtx, rtx);
|
static int loc_cmp (rtx, rtx);
|
static int loc_cmp (rtx, rtx);
|
static bool variable_part_different_p (variable_part *, variable_part *);
|
static bool variable_part_different_p (variable_part *, variable_part *);
|
static bool onepart_variable_different_p (variable, variable);
|
static bool onepart_variable_different_p (variable, variable);
|
static bool variable_different_p (variable, variable);
|
static bool variable_different_p (variable, variable);
|
static bool dataflow_set_different (dataflow_set *, dataflow_set *);
|
static bool dataflow_set_different (dataflow_set *, dataflow_set *);
|
static void dataflow_set_destroy (dataflow_set *);
|
static void dataflow_set_destroy (dataflow_set *);
|
|
|
static bool contains_symbol_ref (rtx);
|
static bool contains_symbol_ref (rtx);
|
static bool track_expr_p (tree, bool);
|
static bool track_expr_p (tree, bool);
|
static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
|
static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
|
static int add_uses (rtx *, void *);
|
static int add_uses (rtx *, void *);
|
static void add_uses_1 (rtx *, void *);
|
static void add_uses_1 (rtx *, void *);
|
static void add_stores (rtx, const_rtx, void *);
|
static void add_stores (rtx, const_rtx, void *);
|
static bool compute_bb_dataflow (basic_block);
|
static bool compute_bb_dataflow (basic_block);
|
static bool vt_find_locations (void);
|
static bool vt_find_locations (void);
|
|
|
static void dump_attrs_list (attrs);
|
static void dump_attrs_list (attrs);
|
static int dump_var_slot (void **, void *);
|
static int dump_var_slot (void **, void *);
|
static void dump_var (variable);
|
static void dump_var (variable);
|
static void dump_vars (htab_t);
|
static void dump_vars (htab_t);
|
static void dump_dataflow_set (dataflow_set *);
|
static void dump_dataflow_set (dataflow_set *);
|
static void dump_dataflow_sets (void);
|
static void dump_dataflow_sets (void);
|
|
|
static void set_dv_changed (decl_or_value, bool);
|
static void set_dv_changed (decl_or_value, bool);
|
static void variable_was_changed (variable, dataflow_set *);
|
static void variable_was_changed (variable, dataflow_set *);
|
static void **set_slot_part (dataflow_set *, rtx, void **,
|
static void **set_slot_part (dataflow_set *, rtx, void **,
|
decl_or_value, HOST_WIDE_INT,
|
decl_or_value, HOST_WIDE_INT,
|
enum var_init_status, rtx);
|
enum var_init_status, rtx);
|
static void set_variable_part (dataflow_set *, rtx,
|
static void set_variable_part (dataflow_set *, rtx,
|
decl_or_value, HOST_WIDE_INT,
|
decl_or_value, HOST_WIDE_INT,
|
enum var_init_status, rtx, enum insert_option);
|
enum var_init_status, rtx, enum insert_option);
|
static void **clobber_slot_part (dataflow_set *, rtx,
|
static void **clobber_slot_part (dataflow_set *, rtx,
|
void **, HOST_WIDE_INT, rtx);
|
void **, HOST_WIDE_INT, rtx);
|
static void clobber_variable_part (dataflow_set *, rtx,
|
static void clobber_variable_part (dataflow_set *, rtx,
|
decl_or_value, HOST_WIDE_INT, rtx);
|
decl_or_value, HOST_WIDE_INT, rtx);
|
static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
|
static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
|
static void delete_variable_part (dataflow_set *, rtx,
|
static void delete_variable_part (dataflow_set *, rtx,
|
decl_or_value, HOST_WIDE_INT);
|
decl_or_value, HOST_WIDE_INT);
|
static int emit_note_insn_var_location (void **, void *);
|
static int emit_note_insn_var_location (void **, void *);
|
static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
|
static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
|
static int emit_notes_for_differences_1 (void **, void *);
|
static int emit_notes_for_differences_1 (void **, void *);
|
static int emit_notes_for_differences_2 (void **, void *);
|
static int emit_notes_for_differences_2 (void **, void *);
|
static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
|
static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
|
static void emit_notes_in_bb (basic_block, dataflow_set *);
|
static void emit_notes_in_bb (basic_block, dataflow_set *);
|
static void vt_emit_notes (void);
|
static void vt_emit_notes (void);
|
|
|
static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
|
static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
|
static void vt_add_function_parameters (void);
|
static void vt_add_function_parameters (void);
|
static bool vt_initialize (void);
|
static bool vt_initialize (void);
|
static void vt_finalize (void);
|
static void vt_finalize (void);
|
|
|
/* Given a SET, calculate the amount of stack adjustment it contains
|
/* Given a SET, calculate the amount of stack adjustment it contains
|
PRE- and POST-modifying stack pointer.
|
PRE- and POST-modifying stack pointer.
|
This function is similar to stack_adjust_offset. */
|
This function is similar to stack_adjust_offset. */
|
|
|
static void
|
static void
|
stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
|
stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
|
HOST_WIDE_INT *post)
|
HOST_WIDE_INT *post)
|
{
|
{
|
rtx src = SET_SRC (pattern);
|
rtx src = SET_SRC (pattern);
|
rtx dest = SET_DEST (pattern);
|
rtx dest = SET_DEST (pattern);
|
enum rtx_code code;
|
enum rtx_code code;
|
|
|
if (dest == stack_pointer_rtx)
|
if (dest == stack_pointer_rtx)
|
{
|
{
|
/* (set (reg sp) (plus (reg sp) (const_int))) */
|
/* (set (reg sp) (plus (reg sp) (const_int))) */
|
code = GET_CODE (src);
|
code = GET_CODE (src);
|
if (! (code == PLUS || code == MINUS)
|
if (! (code == PLUS || code == MINUS)
|
|| XEXP (src, 0) != stack_pointer_rtx
|
|| XEXP (src, 0) != stack_pointer_rtx
|
|| !CONST_INT_P (XEXP (src, 1)))
|
|| !CONST_INT_P (XEXP (src, 1)))
|
return;
|
return;
|
|
|
if (code == MINUS)
|
if (code == MINUS)
|
*post += INTVAL (XEXP (src, 1));
|
*post += INTVAL (XEXP (src, 1));
|
else
|
else
|
*post -= INTVAL (XEXP (src, 1));
|
*post -= INTVAL (XEXP (src, 1));
|
}
|
}
|
else if (MEM_P (dest))
|
else if (MEM_P (dest))
|
{
|
{
|
/* (set (mem (pre_dec (reg sp))) (foo)) */
|
/* (set (mem (pre_dec (reg sp))) (foo)) */
|
src = XEXP (dest, 0);
|
src = XEXP (dest, 0);
|
code = GET_CODE (src);
|
code = GET_CODE (src);
|
|
|
switch (code)
|
switch (code)
|
{
|
{
|
case PRE_MODIFY:
|
case PRE_MODIFY:
|
case POST_MODIFY:
|
case POST_MODIFY:
|
if (XEXP (src, 0) == stack_pointer_rtx)
|
if (XEXP (src, 0) == stack_pointer_rtx)
|
{
|
{
|
rtx val = XEXP (XEXP (src, 1), 1);
|
rtx val = XEXP (XEXP (src, 1), 1);
|
/* We handle only adjustments by constant amount. */
|
/* We handle only adjustments by constant amount. */
|
gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
|
gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
|
CONST_INT_P (val));
|
CONST_INT_P (val));
|
|
|
if (code == PRE_MODIFY)
|
if (code == PRE_MODIFY)
|
*pre -= INTVAL (val);
|
*pre -= INTVAL (val);
|
else
|
else
|
*post -= INTVAL (val);
|
*post -= INTVAL (val);
|
break;
|
break;
|
}
|
}
|
return;
|
return;
|
|
|
case PRE_DEC:
|
case PRE_DEC:
|
if (XEXP (src, 0) == stack_pointer_rtx)
|
if (XEXP (src, 0) == stack_pointer_rtx)
|
{
|
{
|
*pre += GET_MODE_SIZE (GET_MODE (dest));
|
*pre += GET_MODE_SIZE (GET_MODE (dest));
|
break;
|
break;
|
}
|
}
|
return;
|
return;
|
|
|
case POST_DEC:
|
case POST_DEC:
|
if (XEXP (src, 0) == stack_pointer_rtx)
|
if (XEXP (src, 0) == stack_pointer_rtx)
|
{
|
{
|
*post += GET_MODE_SIZE (GET_MODE (dest));
|
*post += GET_MODE_SIZE (GET_MODE (dest));
|
break;
|
break;
|
}
|
}
|
return;
|
return;
|
|
|
case PRE_INC:
|
case PRE_INC:
|
if (XEXP (src, 0) == stack_pointer_rtx)
|
if (XEXP (src, 0) == stack_pointer_rtx)
|
{
|
{
|
*pre -= GET_MODE_SIZE (GET_MODE (dest));
|
*pre -= GET_MODE_SIZE (GET_MODE (dest));
|
break;
|
break;
|
}
|
}
|
return;
|
return;
|
|
|
case POST_INC:
|
case POST_INC:
|
if (XEXP (src, 0) == stack_pointer_rtx)
|
if (XEXP (src, 0) == stack_pointer_rtx)
|
{
|
{
|
*post -= GET_MODE_SIZE (GET_MODE (dest));
|
*post -= GET_MODE_SIZE (GET_MODE (dest));
|
break;
|
break;
|
}
|
}
|
return;
|
return;
|
|
|
default:
|
default:
|
return;
|
return;
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
/* Given an INSN, calculate the amount of stack adjustment it contains
|
/* Given an INSN, calculate the amount of stack adjustment it contains
|
PRE- and POST-modifying stack pointer. */
|
PRE- and POST-modifying stack pointer. */
|
|
|
static void
|
static void
|
insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
|
insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
|
HOST_WIDE_INT *post)
|
HOST_WIDE_INT *post)
|
{
|
{
|
rtx pattern;
|
rtx pattern;
|
|
|
*pre = 0;
|
*pre = 0;
|
*post = 0;
|
*post = 0;
|
|
|
pattern = PATTERN (insn);
|
pattern = PATTERN (insn);
|
if (RTX_FRAME_RELATED_P (insn))
|
if (RTX_FRAME_RELATED_P (insn))
|
{
|
{
|
rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
|
rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
|
if (expr)
|
if (expr)
|
pattern = XEXP (expr, 0);
|
pattern = XEXP (expr, 0);
|
}
|
}
|
|
|
if (GET_CODE (pattern) == SET)
|
if (GET_CODE (pattern) == SET)
|
stack_adjust_offset_pre_post (pattern, pre, post);
|
stack_adjust_offset_pre_post (pattern, pre, post);
|
else if (GET_CODE (pattern) == PARALLEL
|
else if (GET_CODE (pattern) == PARALLEL
|
|| GET_CODE (pattern) == SEQUENCE)
|
|| GET_CODE (pattern) == SEQUENCE)
|
{
|
{
|
int i;
|
int i;
|
|
|
/* There may be stack adjustments inside compound insns. Search
|
/* There may be stack adjustments inside compound insns. Search
|
for them. */
|
for them. */
|
for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
|
for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
|
if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
|
if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
|
stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
|
stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
|
}
|
}
|
}
|
}
|
|
|
/* Compute stack adjustments for all blocks by traversing DFS tree.
|
/* Compute stack adjustments for all blocks by traversing DFS tree.
|
Return true when the adjustments on all incoming edges are consistent.
|
Return true when the adjustments on all incoming edges are consistent.
|
Heavily borrowed from pre_and_rev_post_order_compute. */
|
Heavily borrowed from pre_and_rev_post_order_compute. */
|
|
|
static bool
|
static bool
|
vt_stack_adjustments (void)
|
vt_stack_adjustments (void)
|
{
|
{
|
edge_iterator *stack;
|
edge_iterator *stack;
|
int sp;
|
int sp;
|
|
|
/* Initialize entry block. */
|
/* Initialize entry block. */
|
VTI (ENTRY_BLOCK_PTR)->visited = true;
|
VTI (ENTRY_BLOCK_PTR)->visited = true;
|
VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
|
VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
|
VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
|
VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
|
|
|
/* Allocate stack for back-tracking up CFG. */
|
/* Allocate stack for back-tracking up CFG. */
|
stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
|
stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
|
sp = 0;
|
sp = 0;
|
|
|
/* Push the first edge on to the stack. */
|
/* Push the first edge on to the stack. */
|
stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
|
stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
|
|
|
while (sp)
|
while (sp)
|
{
|
{
|
edge_iterator ei;
|
edge_iterator ei;
|
basic_block src;
|
basic_block src;
|
basic_block dest;
|
basic_block dest;
|
|
|
/* Look at the edge on the top of the stack. */
|
/* Look at the edge on the top of the stack. */
|
ei = stack[sp - 1];
|
ei = stack[sp - 1];
|
src = ei_edge (ei)->src;
|
src = ei_edge (ei)->src;
|
dest = ei_edge (ei)->dest;
|
dest = ei_edge (ei)->dest;
|
|
|
/* Check if the edge destination has been visited yet. */
|
/* Check if the edge destination has been visited yet. */
|
if (!VTI (dest)->visited)
|
if (!VTI (dest)->visited)
|
{
|
{
|
rtx insn;
|
rtx insn;
|
HOST_WIDE_INT pre, post, offset;
|
HOST_WIDE_INT pre, post, offset;
|
VTI (dest)->visited = true;
|
VTI (dest)->visited = true;
|
VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
|
VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
|
|
|
if (dest != EXIT_BLOCK_PTR)
|
if (dest != EXIT_BLOCK_PTR)
|
for (insn = BB_HEAD (dest);
|
for (insn = BB_HEAD (dest);
|
insn != NEXT_INSN (BB_END (dest));
|
insn != NEXT_INSN (BB_END (dest));
|
insn = NEXT_INSN (insn))
|
insn = NEXT_INSN (insn))
|
if (INSN_P (insn))
|
if (INSN_P (insn))
|
{
|
{
|
insn_stack_adjust_offset_pre_post (insn, &pre, &post);
|
insn_stack_adjust_offset_pre_post (insn, &pre, &post);
|
offset += pre + post;
|
offset += pre + post;
|
}
|
}
|
|
|
VTI (dest)->out.stack_adjust = offset;
|
VTI (dest)->out.stack_adjust = offset;
|
|
|
if (EDGE_COUNT (dest->succs) > 0)
|
if (EDGE_COUNT (dest->succs) > 0)
|
/* Since the DEST node has been visited for the first
|
/* Since the DEST node has been visited for the first
|
time, check its successors. */
|
time, check its successors. */
|
stack[sp++] = ei_start (dest->succs);
|
stack[sp++] = ei_start (dest->succs);
|
}
|
}
|
else
|
else
|
{
|
{
|
/* Check whether the adjustments on the edges are the same. */
|
/* Check whether the adjustments on the edges are the same. */
|
if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
|
if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
|
{
|
{
|
free (stack);
|
free (stack);
|
return false;
|
return false;
|
}
|
}
|
|
|
if (! ei_one_before_end_p (ei))
|
if (! ei_one_before_end_p (ei))
|
/* Go to the next edge. */
|
/* Go to the next edge. */
|
ei_next (&stack[sp - 1]);
|
ei_next (&stack[sp - 1]);
|
else
|
else
|
/* Return to previous level if there are no more edges. */
|
/* Return to previous level if there are no more edges. */
|
sp--;
|
sp--;
|
}
|
}
|
}
|
}
|
|
|
free (stack);
|
free (stack);
|
return true;
|
return true;
|
}
|
}
|
|
|
/* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
|
/* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
|
hard_frame_pointer_rtx is being mapped to it and offset for it. */
|
hard_frame_pointer_rtx is being mapped to it and offset for it. */
|
static rtx cfa_base_rtx;
|
static rtx cfa_base_rtx;
|
static HOST_WIDE_INT cfa_base_offset;
|
static HOST_WIDE_INT cfa_base_offset;
|
|
|
/* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
|
/* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
|
or hard_frame_pointer_rtx. */
|
or hard_frame_pointer_rtx. */
|
|
|
static inline rtx
|
static inline rtx
|
compute_cfa_pointer (HOST_WIDE_INT adjustment)
|
compute_cfa_pointer (HOST_WIDE_INT adjustment)
|
{
|
{
|
return plus_constant (cfa_base_rtx, adjustment + cfa_base_offset);
|
return plus_constant (cfa_base_rtx, adjustment + cfa_base_offset);
|
}
|
}
|
|
|
/* Adjustment for hard_frame_pointer_rtx to cfa base reg,
|
/* Adjustment for hard_frame_pointer_rtx to cfa base reg,
|
or -1 if the replacement shouldn't be done. */
|
or -1 if the replacement shouldn't be done. */
|
static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
|
static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
|
|
|
/* Data for adjust_mems callback. */
|
/* Data for adjust_mems callback. */
|
|
|
struct adjust_mem_data
|
struct adjust_mem_data
|
{
|
{
|
bool store;
|
bool store;
|
enum machine_mode mem_mode;
|
enum machine_mode mem_mode;
|
HOST_WIDE_INT stack_adjust;
|
HOST_WIDE_INT stack_adjust;
|
rtx side_effects;
|
rtx side_effects;
|
};
|
};
|
|
|
/* Helper for adjust_mems. Return 1 if *loc is unsuitable for
|
/* Helper for adjust_mems. Return 1 if *loc is unsuitable for
|
transformation of wider mode arithmetics to narrower mode,
|
transformation of wider mode arithmetics to narrower mode,
|
-1 if it is suitable and subexpressions shouldn't be
|
-1 if it is suitable and subexpressions shouldn't be
|
traversed and 0 if it is suitable and subexpressions should
|
traversed and 0 if it is suitable and subexpressions should
|
be traversed. Called through for_each_rtx. */
|
be traversed. Called through for_each_rtx. */
|
|
|
static int
|
static int
|
use_narrower_mode_test (rtx *loc, void *data)
|
use_narrower_mode_test (rtx *loc, void *data)
|
{
|
{
|
rtx subreg = (rtx) data;
|
rtx subreg = (rtx) data;
|
|
|
if (CONSTANT_P (*loc))
|
if (CONSTANT_P (*loc))
|
return -1;
|
return -1;
|
switch (GET_CODE (*loc))
|
switch (GET_CODE (*loc))
|
{
|
{
|
case REG:
|
case REG:
|
if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
|
if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
|
return 1;
|
return 1;
|
if (!validate_subreg (GET_MODE (subreg), GET_MODE (*loc),
|
if (!validate_subreg (GET_MODE (subreg), GET_MODE (*loc),
|
*loc, subreg_lowpart_offset (GET_MODE (subreg),
|
*loc, subreg_lowpart_offset (GET_MODE (subreg),
|
GET_MODE (*loc))))
|
GET_MODE (*loc))))
|
return 1;
|
return 1;
|
return -1;
|
return -1;
|
case PLUS:
|
case PLUS:
|
case MINUS:
|
case MINUS:
|
case MULT:
|
case MULT:
|
return 0;
|
return 0;
|
case ASHIFT:
|
case ASHIFT:
|
if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
|
if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
|
return 1;
|
return 1;
|
else
|
else
|
return -1;
|
return -1;
|
default:
|
default:
|
return 1;
|
return 1;
|
}
|
}
|
}
|
}
|
|
|
/* Transform X into narrower mode MODE from wider mode WMODE. */
|
/* Transform X into narrower mode MODE from wider mode WMODE. */
|
|
|
static rtx
|
static rtx
|
use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
|
use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
|
{
|
{
|
rtx op0, op1;
|
rtx op0, op1;
|
if (CONSTANT_P (x))
|
if (CONSTANT_P (x))
|
return lowpart_subreg (mode, x, wmode);
|
return lowpart_subreg (mode, x, wmode);
|
switch (GET_CODE (x))
|
switch (GET_CODE (x))
|
{
|
{
|
case REG:
|
case REG:
|
return lowpart_subreg (mode, x, wmode);
|
return lowpart_subreg (mode, x, wmode);
|
case PLUS:
|
case PLUS:
|
case MINUS:
|
case MINUS:
|
case MULT:
|
case MULT:
|
op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
|
op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
|
op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
|
op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
|
return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
|
return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
|
case ASHIFT:
|
case ASHIFT:
|
op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
|
op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
|
return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
|
return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
|
default:
|
default:
|
gcc_unreachable ();
|
gcc_unreachable ();
|
}
|
}
|
}
|
}
|
|
|
/* Helper function for adjusting used MEMs. */
|
/* Helper function for adjusting used MEMs. */
|
|
|
static rtx
|
static rtx
|
adjust_mems (rtx loc, const_rtx old_rtx, void *data)
|
adjust_mems (rtx loc, const_rtx old_rtx, void *data)
|
{
|
{
|
struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
|
struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
|
rtx mem, addr = loc, tem;
|
rtx mem, addr = loc, tem;
|
enum machine_mode mem_mode_save;
|
enum machine_mode mem_mode_save;
|
bool store_save;
|
bool store_save;
|
switch (GET_CODE (loc))
|
switch (GET_CODE (loc))
|
{
|
{
|
case REG:
|
case REG:
|
/* Don't do any sp or fp replacements outside of MEM addresses
|
/* Don't do any sp or fp replacements outside of MEM addresses
|
on the LHS. */
|
on the LHS. */
|
if (amd->mem_mode == VOIDmode && amd->store)
|
if (amd->mem_mode == VOIDmode && amd->store)
|
return loc;
|
return loc;
|
if (loc == stack_pointer_rtx
|
if (loc == stack_pointer_rtx
|
&& !frame_pointer_needed
|
&& !frame_pointer_needed
|
&& cfa_base_rtx)
|
&& cfa_base_rtx)
|
return compute_cfa_pointer (amd->stack_adjust);
|
return compute_cfa_pointer (amd->stack_adjust);
|
else if (loc == hard_frame_pointer_rtx
|
else if (loc == hard_frame_pointer_rtx
|
&& frame_pointer_needed
|
&& frame_pointer_needed
|
&& hard_frame_pointer_adjustment != -1
|
&& hard_frame_pointer_adjustment != -1
|
&& cfa_base_rtx)
|
&& cfa_base_rtx)
|
return compute_cfa_pointer (hard_frame_pointer_adjustment);
|
return compute_cfa_pointer (hard_frame_pointer_adjustment);
|
gcc_checking_assert (loc != virtual_incoming_args_rtx);
|
gcc_checking_assert (loc != virtual_incoming_args_rtx);
|
return loc;
|
return loc;
|
case MEM:
|
case MEM:
|
mem = loc;
|
mem = loc;
|
if (!amd->store)
|
if (!amd->store)
|
{
|
{
|
mem = targetm.delegitimize_address (mem);
|
mem = targetm.delegitimize_address (mem);
|
if (mem != loc && !MEM_P (mem))
|
if (mem != loc && !MEM_P (mem))
|
return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
|
return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
|
}
|
}
|
|
|
addr = XEXP (mem, 0);
|
addr = XEXP (mem, 0);
|
mem_mode_save = amd->mem_mode;
|
mem_mode_save = amd->mem_mode;
|
amd->mem_mode = GET_MODE (mem);
|
amd->mem_mode = GET_MODE (mem);
|
store_save = amd->store;
|
store_save = amd->store;
|
amd->store = false;
|
amd->store = false;
|
addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
|
addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
|
amd->store = store_save;
|
amd->store = store_save;
|
amd->mem_mode = mem_mode_save;
|
amd->mem_mode = mem_mode_save;
|
if (mem == loc)
|
if (mem == loc)
|
addr = targetm.delegitimize_address (addr);
|
addr = targetm.delegitimize_address (addr);
|
if (addr != XEXP (mem, 0))
|
if (addr != XEXP (mem, 0))
|
mem = replace_equiv_address_nv (mem, addr);
|
mem = replace_equiv_address_nv (mem, addr);
|
if (!amd->store)
|
if (!amd->store)
|
mem = avoid_constant_pool_reference (mem);
|
mem = avoid_constant_pool_reference (mem);
|
return mem;
|
return mem;
|
case PRE_INC:
|
case PRE_INC:
|
case PRE_DEC:
|
case PRE_DEC:
|
addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
|
addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
|
GEN_INT (GET_CODE (loc) == PRE_INC
|
GEN_INT (GET_CODE (loc) == PRE_INC
|
? GET_MODE_SIZE (amd->mem_mode)
|
? GET_MODE_SIZE (amd->mem_mode)
|
: -GET_MODE_SIZE (amd->mem_mode)));
|
: -GET_MODE_SIZE (amd->mem_mode)));
|
case POST_INC:
|
case POST_INC:
|
case POST_DEC:
|
case POST_DEC:
|
if (addr == loc)
|
if (addr == loc)
|
addr = XEXP (loc, 0);
|
addr = XEXP (loc, 0);
|
gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
|
gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
|
addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
|
addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
|
tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
|
tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
|
GEN_INT ((GET_CODE (loc) == PRE_INC
|
GEN_INT ((GET_CODE (loc) == PRE_INC
|
|| GET_CODE (loc) == POST_INC)
|
|| GET_CODE (loc) == POST_INC)
|
? GET_MODE_SIZE (amd->mem_mode)
|
? GET_MODE_SIZE (amd->mem_mode)
|
: -GET_MODE_SIZE (amd->mem_mode)));
|
: -GET_MODE_SIZE (amd->mem_mode)));
|
amd->side_effects = alloc_EXPR_LIST (0,
|
amd->side_effects = alloc_EXPR_LIST (0,
|
gen_rtx_SET (VOIDmode,
|
gen_rtx_SET (VOIDmode,
|
XEXP (loc, 0),
|
XEXP (loc, 0),
|
tem),
|
tem),
|
amd->side_effects);
|
amd->side_effects);
|
return addr;
|
return addr;
|
case PRE_MODIFY:
|
case PRE_MODIFY:
|
addr = XEXP (loc, 1);
|
addr = XEXP (loc, 1);
|
case POST_MODIFY:
|
case POST_MODIFY:
|
if (addr == loc)
|
if (addr == loc)
|
addr = XEXP (loc, 0);
|
addr = XEXP (loc, 0);
|
gcc_assert (amd->mem_mode != VOIDmode);
|
gcc_assert (amd->mem_mode != VOIDmode);
|
addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
|
addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
|
amd->side_effects = alloc_EXPR_LIST (0,
|
amd->side_effects = alloc_EXPR_LIST (0,
|
gen_rtx_SET (VOIDmode,
|
gen_rtx_SET (VOIDmode,
|
XEXP (loc, 0),
|
XEXP (loc, 0),
|
XEXP (loc, 1)),
|
XEXP (loc, 1)),
|
amd->side_effects);
|
amd->side_effects);
|
return addr;
|
return addr;
|
case SUBREG:
|
case SUBREG:
|
/* First try without delegitimization of whole MEMs and
|
/* First try without delegitimization of whole MEMs and
|
avoid_constant_pool_reference, which is more likely to succeed. */
|
avoid_constant_pool_reference, which is more likely to succeed. */
|
store_save = amd->store;
|
store_save = amd->store;
|
amd->store = true;
|
amd->store = true;
|
addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
|
addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
|
data);
|
data);
|
amd->store = store_save;
|
amd->store = store_save;
|
mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
|
mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
|
if (mem == SUBREG_REG (loc))
|
if (mem == SUBREG_REG (loc))
|
{
|
{
|
tem = loc;
|
tem = loc;
|
goto finish_subreg;
|
goto finish_subreg;
|
}
|
}
|
tem = simplify_gen_subreg (GET_MODE (loc), mem,
|
tem = simplify_gen_subreg (GET_MODE (loc), mem,
|
GET_MODE (SUBREG_REG (loc)),
|
GET_MODE (SUBREG_REG (loc)),
|
SUBREG_BYTE (loc));
|
SUBREG_BYTE (loc));
|
if (tem)
|
if (tem)
|
goto finish_subreg;
|
goto finish_subreg;
|
tem = simplify_gen_subreg (GET_MODE (loc), addr,
|
tem = simplify_gen_subreg (GET_MODE (loc), addr,
|
GET_MODE (SUBREG_REG (loc)),
|
GET_MODE (SUBREG_REG (loc)),
|
SUBREG_BYTE (loc));
|
SUBREG_BYTE (loc));
|
if (tem == NULL_RTX)
|
if (tem == NULL_RTX)
|
tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
|
tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
|
finish_subreg:
|
finish_subreg:
|
if (MAY_HAVE_DEBUG_INSNS
|
if (MAY_HAVE_DEBUG_INSNS
|
&& GET_CODE (tem) == SUBREG
|
&& GET_CODE (tem) == SUBREG
|
&& (GET_CODE (SUBREG_REG (tem)) == PLUS
|
&& (GET_CODE (SUBREG_REG (tem)) == PLUS
|
|| GET_CODE (SUBREG_REG (tem)) == MINUS
|
|| GET_CODE (SUBREG_REG (tem)) == MINUS
|
|| GET_CODE (SUBREG_REG (tem)) == MULT
|
|| GET_CODE (SUBREG_REG (tem)) == MULT
|
|| GET_CODE (SUBREG_REG (tem)) == ASHIFT)
|
|| GET_CODE (SUBREG_REG (tem)) == ASHIFT)
|
&& GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
|
&& GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
|
&& GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
|
&& GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
|
&& GET_MODE_SIZE (GET_MODE (tem))
|
&& GET_MODE_SIZE (GET_MODE (tem))
|
< GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
|
< GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
|
&& subreg_lowpart_p (tem)
|
&& subreg_lowpart_p (tem)
|
&& !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
|
&& !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
|
return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
|
return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
|
GET_MODE (SUBREG_REG (tem)));
|
GET_MODE (SUBREG_REG (tem)));
|
return tem;
|
return tem;
|
case ASM_OPERANDS:
|
case ASM_OPERANDS:
|
/* Don't do any replacements in second and following
|
/* Don't do any replacements in second and following
|
ASM_OPERANDS of inline-asm with multiple sets.
|
ASM_OPERANDS of inline-asm with multiple sets.
|
ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
|
ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
|
and ASM_OPERANDS_LABEL_VEC need to be equal between
|
and ASM_OPERANDS_LABEL_VEC need to be equal between
|
all the ASM_OPERANDs in the insn and adjust_insn will
|
all the ASM_OPERANDs in the insn and adjust_insn will
|
fix this up. */
|
fix this up. */
|
if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
|
if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
|
return loc;
|
return loc;
|
break;
|
break;
|
default:
|
default:
|
break;
|
break;
|
}
|
}
|
return NULL_RTX;
|
return NULL_RTX;
|
}
|
}
|
|
|
/* Helper function for replacement of uses. */
|
/* Helper function for replacement of uses. */
|
|
|
static void
|
static void
|
adjust_mem_uses (rtx *x, void *data)
|
adjust_mem_uses (rtx *x, void *data)
|
{
|
{
|
rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
|
rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
|
if (new_x != *x)
|
if (new_x != *x)
|
validate_change (NULL_RTX, x, new_x, true);
|
validate_change (NULL_RTX, x, new_x, true);
|
}
|
}
|
|
|
/* Helper function for replacement of stores. */
|
/* Helper function for replacement of stores. */
|
|
|
static void
|
static void
|
adjust_mem_stores (rtx loc, const_rtx expr, void *data)
|
adjust_mem_stores (rtx loc, const_rtx expr, void *data)
|
{
|
{
|
if (MEM_P (loc))
|
if (MEM_P (loc))
|
{
|
{
|
rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
|
rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
|
adjust_mems, data);
|
adjust_mems, data);
|
if (new_dest != SET_DEST (expr))
|
if (new_dest != SET_DEST (expr))
|
{
|
{
|
rtx xexpr = CONST_CAST_RTX (expr);
|
rtx xexpr = CONST_CAST_RTX (expr);
|
validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
|
validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
/* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
|
/* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
|
replace them with their value in the insn and add the side-effects
|
replace them with their value in the insn and add the side-effects
|
as other sets to the insn. */
|
as other sets to the insn. */
|
|
|
static void
|
static void
|
adjust_insn (basic_block bb, rtx insn)
|
adjust_insn (basic_block bb, rtx insn)
|
{
|
{
|
struct adjust_mem_data amd;
|
struct adjust_mem_data amd;
|
rtx set;
|
rtx set;
|
|
|
#ifdef HAVE_window_save
|
#ifdef HAVE_window_save
|
/* If the target machine has an explicit window save instruction, the
|
/* If the target machine has an explicit window save instruction, the
|
transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
|
transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
|
if (RTX_FRAME_RELATED_P (insn)
|
if (RTX_FRAME_RELATED_P (insn)
|
&& find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
|
&& find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
|
{
|
{
|
unsigned int i, nregs = VEC_length(parm_reg_t, windowed_parm_regs);
|
unsigned int i, nregs = VEC_length(parm_reg_t, windowed_parm_regs);
|
rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
|
rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
|
parm_reg_t *p;
|
parm_reg_t *p;
|
|
|
FOR_EACH_VEC_ELT (parm_reg_t, windowed_parm_regs, i, p)
|
FOR_EACH_VEC_ELT (parm_reg_t, windowed_parm_regs, i, p)
|
{
|
{
|
XVECEXP (rtl, 0, i * 2)
|
XVECEXP (rtl, 0, i * 2)
|
= gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
|
= gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
|
/* Do not clobber the attached DECL, but only the REG. */
|
/* Do not clobber the attached DECL, but only the REG. */
|
XVECEXP (rtl, 0, i * 2 + 1)
|
XVECEXP (rtl, 0, i * 2 + 1)
|
= gen_rtx_CLOBBER (GET_MODE (p->outgoing),
|
= gen_rtx_CLOBBER (GET_MODE (p->outgoing),
|
gen_raw_REG (GET_MODE (p->outgoing),
|
gen_raw_REG (GET_MODE (p->outgoing),
|
REGNO (p->outgoing)));
|
REGNO (p->outgoing)));
|
}
|
}
|
|
|
validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
|
validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
|
return;
|
return;
|
}
|
}
|
#endif
|
#endif
|
|
|
amd.mem_mode = VOIDmode;
|
amd.mem_mode = VOIDmode;
|
amd.stack_adjust = -VTI (bb)->out.stack_adjust;
|
amd.stack_adjust = -VTI (bb)->out.stack_adjust;
|
amd.side_effects = NULL_RTX;
|
amd.side_effects = NULL_RTX;
|
|
|
amd.store = true;
|
amd.store = true;
|
note_stores (PATTERN (insn), adjust_mem_stores, &amd);
|
note_stores (PATTERN (insn), adjust_mem_stores, &amd);
|
|
|
amd.store = false;
|
amd.store = false;
|
if (GET_CODE (PATTERN (insn)) == PARALLEL
|
if (GET_CODE (PATTERN (insn)) == PARALLEL
|
&& asm_noperands (PATTERN (insn)) > 0
|
&& asm_noperands (PATTERN (insn)) > 0
|
&& GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
|
&& GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
|
{
|
{
|
rtx body, set0;
|
rtx body, set0;
|
int i;
|
int i;
|
|
|
/* inline-asm with multiple sets is tiny bit more complicated,
|
/* inline-asm with multiple sets is tiny bit more complicated,
|
because the 3 vectors in ASM_OPERANDS need to be shared between
|
because the 3 vectors in ASM_OPERANDS need to be shared between
|
all ASM_OPERANDS in the instruction. adjust_mems will
|
all ASM_OPERANDS in the instruction. adjust_mems will
|
not touch ASM_OPERANDS other than the first one, asm_noperands
|
not touch ASM_OPERANDS other than the first one, asm_noperands
|
test above needs to be called before that (otherwise it would fail)
|
test above needs to be called before that (otherwise it would fail)
|
and afterwards this code fixes it up. */
|
and afterwards this code fixes it up. */
|
note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
|
note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
|
body = PATTERN (insn);
|
body = PATTERN (insn);
|
set0 = XVECEXP (body, 0, 0);
|
set0 = XVECEXP (body, 0, 0);
|
gcc_checking_assert (GET_CODE (set0) == SET
|
gcc_checking_assert (GET_CODE (set0) == SET
|
&& GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
|
&& GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
|
&& ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
|
&& ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
|
for (i = 1; i < XVECLEN (body, 0); i++)
|
for (i = 1; i < XVECLEN (body, 0); i++)
|
if (GET_CODE (XVECEXP (body, 0, i)) != SET)
|
if (GET_CODE (XVECEXP (body, 0, i)) != SET)
|
break;
|
break;
|
else
|
else
|
{
|
{
|
set = XVECEXP (body, 0, i);
|
set = XVECEXP (body, 0, i);
|
gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
|
gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
|
&& ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
|
&& ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
|
== i);
|
== i);
|
if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
|
if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
|
!= ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
|
!= ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
|
|| ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
|
|| ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
|
!= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
|
!= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
|
|| ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
|
|| ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
|
!= ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
|
!= ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
|
{
|
{
|
rtx newsrc = shallow_copy_rtx (SET_SRC (set));
|
rtx newsrc = shallow_copy_rtx (SET_SRC (set));
|
ASM_OPERANDS_INPUT_VEC (newsrc)
|
ASM_OPERANDS_INPUT_VEC (newsrc)
|
= ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
|
= ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
|
ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
|
ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
|
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
|
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
|
ASM_OPERANDS_LABEL_VEC (newsrc)
|
ASM_OPERANDS_LABEL_VEC (newsrc)
|
= ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
|
= ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
|
validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
|
validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
|
}
|
}
|
}
|
}
|
}
|
}
|
else
|
else
|
note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
|
note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
|
|
|
/* For read-only MEMs containing some constant, prefer those
|
/* For read-only MEMs containing some constant, prefer those
|
constants. */
|
constants. */
|
set = single_set (insn);
|
set = single_set (insn);
|
if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
|
if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
|
{
|
{
|
rtx note = find_reg_equal_equiv_note (insn);
|
rtx note = find_reg_equal_equiv_note (insn);
|
|
|
if (note && CONSTANT_P (XEXP (note, 0)))
|
if (note && CONSTANT_P (XEXP (note, 0)))
|
validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
|
validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
|
}
|
}
|
|
|
if (amd.side_effects)
|
if (amd.side_effects)
|
{
|
{
|
rtx *pat, new_pat, s;
|
rtx *pat, new_pat, s;
|
int i, oldn, newn;
|
int i, oldn, newn;
|
|
|
pat = &PATTERN (insn);
|
pat = &PATTERN (insn);
|
if (GET_CODE (*pat) == COND_EXEC)
|
if (GET_CODE (*pat) == COND_EXEC)
|
pat = &COND_EXEC_CODE (*pat);
|
pat = &COND_EXEC_CODE (*pat);
|
if (GET_CODE (*pat) == PARALLEL)
|
if (GET_CODE (*pat) == PARALLEL)
|
oldn = XVECLEN (*pat, 0);
|
oldn = XVECLEN (*pat, 0);
|
else
|
else
|
oldn = 1;
|
oldn = 1;
|
for (s = amd.side_effects, newn = 0; s; newn++)
|
for (s = amd.side_effects, newn = 0; s; newn++)
|
s = XEXP (s, 1);
|
s = XEXP (s, 1);
|
new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
|
new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
|
if (GET_CODE (*pat) == PARALLEL)
|
if (GET_CODE (*pat) == PARALLEL)
|
for (i = 0; i < oldn; i++)
|
for (i = 0; i < oldn; i++)
|
XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
|
XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
|
else
|
else
|
XVECEXP (new_pat, 0, 0) = *pat;
|
XVECEXP (new_pat, 0, 0) = *pat;
|
for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
|
for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
|
XVECEXP (new_pat, 0, i) = XEXP (s, 0);
|
XVECEXP (new_pat, 0, i) = XEXP (s, 0);
|
free_EXPR_LIST_list (&amd.side_effects);
|
free_EXPR_LIST_list (&amd.side_effects);
|
validate_change (NULL_RTX, pat, new_pat, true);
|
validate_change (NULL_RTX, pat, new_pat, true);
|
}
|
}
|
}
|
}
|
|
|
/* Return true if a decl_or_value DV is a DECL or NULL. */
|
/* Return true if a decl_or_value DV is a DECL or NULL. */
|
static inline bool
|
static inline bool
|
dv_is_decl_p (decl_or_value dv)
|
dv_is_decl_p (decl_or_value dv)
|
{
|
{
|
return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
|
return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
|
}
|
}
|
|
|
/* Return true if a decl_or_value is a VALUE rtl. */
|
/* Return true if a decl_or_value is a VALUE rtl. */
|
static inline bool
|
static inline bool
|
dv_is_value_p (decl_or_value dv)
|
dv_is_value_p (decl_or_value dv)
|
{
|
{
|
return dv && !dv_is_decl_p (dv);
|
return dv && !dv_is_decl_p (dv);
|
}
|
}
|
|
|
/* Return the decl in the decl_or_value. */
|
/* Return the decl in the decl_or_value. */
|
static inline tree
|
static inline tree
|
dv_as_decl (decl_or_value dv)
|
dv_as_decl (decl_or_value dv)
|
{
|
{
|
gcc_checking_assert (dv_is_decl_p (dv));
|
gcc_checking_assert (dv_is_decl_p (dv));
|
return (tree) dv;
|
return (tree) dv;
|
}
|
}
|
|
|
/* Return the value in the decl_or_value. */
|
/* Return the value in the decl_or_value. */
|
static inline rtx
|
static inline rtx
|
dv_as_value (decl_or_value dv)
|
dv_as_value (decl_or_value dv)
|
{
|
{
|
gcc_checking_assert (dv_is_value_p (dv));
|
gcc_checking_assert (dv_is_value_p (dv));
|
return (rtx)dv;
|
return (rtx)dv;
|
}
|
}
|
|
|
/* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
|
/* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
|
static inline rtx
|
static inline rtx
|
dv_as_rtx (decl_or_value dv)
|
dv_as_rtx (decl_or_value dv)
|
{
|
{
|
tree decl;
|
tree decl;
|
|
|
if (dv_is_value_p (dv))
|
if (dv_is_value_p (dv))
|
return dv_as_value (dv);
|
return dv_as_value (dv);
|
|
|
decl = dv_as_decl (dv);
|
decl = dv_as_decl (dv);
|
|
|
gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
|
gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
|
return DECL_RTL_KNOWN_SET (decl);
|
return DECL_RTL_KNOWN_SET (decl);
|
}
|
}
|
|
|
/* Return the opaque pointer in the decl_or_value. */
|
/* Return the opaque pointer in the decl_or_value. */
|
static inline void *
|
static inline void *
|
dv_as_opaque (decl_or_value dv)
|
dv_as_opaque (decl_or_value dv)
|
{
|
{
|
return dv;
|
return dv;
|
}
|
}
|
|
|
/* Return nonzero if a decl_or_value must not have more than one
|
/* Return nonzero if a decl_or_value must not have more than one
|
variable part. The returned value discriminates among various
|
variable part. The returned value discriminates among various
|
kinds of one-part DVs ccording to enum onepart_enum. */
|
kinds of one-part DVs ccording to enum onepart_enum. */
|
static inline onepart_enum_t
|
static inline onepart_enum_t
|
dv_onepart_p (decl_or_value dv)
|
dv_onepart_p (decl_or_value dv)
|
{
|
{
|
tree decl;
|
tree decl;
|
|
|
if (!MAY_HAVE_DEBUG_INSNS)
|
if (!MAY_HAVE_DEBUG_INSNS)
|
return NOT_ONEPART;
|
return NOT_ONEPART;
|
|
|
if (dv_is_value_p (dv))
|
if (dv_is_value_p (dv))
|
return ONEPART_VALUE;
|
return ONEPART_VALUE;
|
|
|
decl = dv_as_decl (dv);
|
decl = dv_as_decl (dv);
|
|
|
if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
|
if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
|
return ONEPART_DEXPR;
|
return ONEPART_DEXPR;
|
|
|
if (target_for_debug_bind (decl) != NULL_TREE)
|
if (target_for_debug_bind (decl) != NULL_TREE)
|
return ONEPART_VDECL;
|
return ONEPART_VDECL;
|
|
|
return NOT_ONEPART;
|
return NOT_ONEPART;
|
}
|
}
|
|
|
/* Return the variable pool to be used for a dv of type ONEPART. */
|
/* Return the variable pool to be used for a dv of type ONEPART. */
|
static inline alloc_pool
|
static inline alloc_pool
|
onepart_pool (onepart_enum_t onepart)
|
onepart_pool (onepart_enum_t onepart)
|
{
|
{
|
return onepart ? valvar_pool : var_pool;
|
return onepart ? valvar_pool : var_pool;
|
}
|
}
|
|
|
/* Build a decl_or_value out of a decl. */
|
/* Build a decl_or_value out of a decl. */
|
static inline decl_or_value
|
static inline decl_or_value
|
dv_from_decl (tree decl)
|
dv_from_decl (tree decl)
|
{
|
{
|
decl_or_value dv;
|
decl_or_value dv;
|
dv = decl;
|
dv = decl;
|
gcc_checking_assert (dv_is_decl_p (dv));
|
gcc_checking_assert (dv_is_decl_p (dv));
|
return dv;
|
return dv;
|
}
|
}
|
|
|
/* Build a decl_or_value out of a value. */
|
/* Build a decl_or_value out of a value. */
|
static inline decl_or_value
|
static inline decl_or_value
|
dv_from_value (rtx value)
|
dv_from_value (rtx value)
|
{
|
{
|
decl_or_value dv;
|
decl_or_value dv;
|
dv = value;
|
dv = value;
|
gcc_checking_assert (dv_is_value_p (dv));
|
gcc_checking_assert (dv_is_value_p (dv));
|
return dv;
|
return dv;
|
}
|
}
|
|
|
/* Return a value or the decl of a debug_expr as a decl_or_value. */
|
/* Return a value or the decl of a debug_expr as a decl_or_value. */
|
static inline decl_or_value
|
static inline decl_or_value
|
dv_from_rtx (rtx x)
|
dv_from_rtx (rtx x)
|
{
|
{
|
decl_or_value dv;
|
decl_or_value dv;
|
|
|
switch (GET_CODE (x))
|
switch (GET_CODE (x))
|
{
|
{
|
case DEBUG_EXPR:
|
case DEBUG_EXPR:
|
dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
|
dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
|
gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
|
gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
|
break;
|
break;
|
|
|
case VALUE:
|
case VALUE:
|
dv = dv_from_value (x);
|
dv = dv_from_value (x);
|
break;
|
break;
|
|
|
default:
|
default:
|
gcc_unreachable ();
|
gcc_unreachable ();
|
}
|
}
|
|
|
return dv;
|
return dv;
|
}
|
}
|
|
|
extern void debug_dv (decl_or_value dv);
|
extern void debug_dv (decl_or_value dv);
|
|
|
DEBUG_FUNCTION void
|
DEBUG_FUNCTION void
|
debug_dv (decl_or_value dv)
|
debug_dv (decl_or_value dv)
|
{
|
{
|
if (dv_is_value_p (dv))
|
if (dv_is_value_p (dv))
|
debug_rtx (dv_as_value (dv));
|
debug_rtx (dv_as_value (dv));
|
else
|
else
|
debug_generic_stmt (dv_as_decl (dv));
|
debug_generic_stmt (dv_as_decl (dv));
|
}
|
}
|
|
|
typedef unsigned int dvuid;
|
typedef unsigned int dvuid;
|
|
|
/* Return the uid of DV. */
|
/* Return the uid of DV. */
|
|
|
static inline dvuid
|
static inline dvuid
|
dv_uid (decl_or_value dv)
|
dv_uid (decl_or_value dv)
|
{
|
{
|
if (dv_is_value_p (dv))
|
if (dv_is_value_p (dv))
|
return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
|
return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
|
else
|
else
|
return DECL_UID (dv_as_decl (dv));
|
return DECL_UID (dv_as_decl (dv));
|
}
|
}
|
|
|
/* Compute the hash from the uid. */
|
/* Compute the hash from the uid. */
|
|
|
static inline hashval_t
|
static inline hashval_t
|
dv_uid2hash (dvuid uid)
|
dv_uid2hash (dvuid uid)
|
{
|
{
|
return uid;
|
return uid;
|
}
|
}
|
|
|
/* The hash function for a mask table in a shared_htab chain. */
|
/* The hash function for a mask table in a shared_htab chain. */
|
|
|
static inline hashval_t
|
static inline hashval_t
|
dv_htab_hash (decl_or_value dv)
|
dv_htab_hash (decl_or_value dv)
|
{
|
{
|
return dv_uid2hash (dv_uid (dv));
|
return dv_uid2hash (dv_uid (dv));
|
}
|
}
|
|
|
/* The hash function for variable_htab, computes the hash value
|
/* The hash function for variable_htab, computes the hash value
|
from the declaration of variable X. */
|
from the declaration of variable X. */
|
|
|
static hashval_t
|
static hashval_t
|
variable_htab_hash (const void *x)
|
variable_htab_hash (const void *x)
|
{
|
{
|
const_variable const v = (const_variable) x;
|
const_variable const v = (const_variable) x;
|
|
|
return dv_htab_hash (v->dv);
|
return dv_htab_hash (v->dv);
|
}
|
}
|
|
|
/* Compare the declaration of variable X with declaration Y. */
|
/* Compare the declaration of variable X with declaration Y. */
|
|
|
static int
|
static int
|
variable_htab_eq (const void *x, const void *y)
|
variable_htab_eq (const void *x, const void *y)
|
{
|
{
|
const_variable const v = (const_variable) x;
|
const_variable const v = (const_variable) x;
|
decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
|
decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
|
|
|
return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
|
return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
|
}
|
}
|
|
|
static void loc_exp_dep_clear (variable var);
|
static void loc_exp_dep_clear (variable var);
|
|
|
/* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
|
/* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
|
|
|
static void
|
static void
|
variable_htab_free (void *elem)
|
variable_htab_free (void *elem)
|
{
|
{
|
int i;
|
int i;
|
variable var = (variable) elem;
|
variable var = (variable) elem;
|
location_chain node, next;
|
location_chain node, next;
|
|
|
gcc_checking_assert (var->refcount > 0);
|
gcc_checking_assert (var->refcount > 0);
|
|
|
var->refcount--;
|
var->refcount--;
|
if (var->refcount > 0)
|
if (var->refcount > 0)
|
return;
|
return;
|
|
|
for (i = 0; i < var->n_var_parts; i++)
|
for (i = 0; i < var->n_var_parts; i++)
|
{
|
{
|
for (node = var->var_part[i].loc_chain; node; node = next)
|
for (node = var->var_part[i].loc_chain; node; node = next)
|
{
|
{
|
next = node->next;
|
next = node->next;
|
pool_free (loc_chain_pool, node);
|
pool_free (loc_chain_pool, node);
|
}
|
}
|
var->var_part[i].loc_chain = NULL;
|
var->var_part[i].loc_chain = NULL;
|
}
|
}
|
if (var->onepart && VAR_LOC_1PAUX (var))
|
if (var->onepart && VAR_LOC_1PAUX (var))
|
{
|
{
|
loc_exp_dep_clear (var);
|
loc_exp_dep_clear (var);
|
if (VAR_LOC_DEP_LST (var))
|
if (VAR_LOC_DEP_LST (var))
|
VAR_LOC_DEP_LST (var)->pprev = NULL;
|
VAR_LOC_DEP_LST (var)->pprev = NULL;
|
XDELETE (VAR_LOC_1PAUX (var));
|
XDELETE (VAR_LOC_1PAUX (var));
|
/* These may be reused across functions, so reset
|
/* These may be reused across functions, so reset
|
e.g. NO_LOC_P. */
|
e.g. NO_LOC_P. */
|
if (var->onepart == ONEPART_DEXPR)
|
if (var->onepart == ONEPART_DEXPR)
|
set_dv_changed (var->dv, true);
|
set_dv_changed (var->dv, true);
|
}
|
}
|
pool_free (onepart_pool (var->onepart), var);
|
pool_free (onepart_pool (var->onepart), var);
|
}
|
}
|
|
|
/* Initialize the set (array) SET of attrs to empty lists. */
|
/* Initialize the set (array) SET of attrs to empty lists. */
|
|
|
static void
|
static void
|
init_attrs_list_set (attrs *set)
|
init_attrs_list_set (attrs *set)
|
{
|
{
|
int i;
|
int i;
|
|
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
set[i] = NULL;
|
set[i] = NULL;
|
}
|
}
|
|
|
/* Make the list *LISTP empty. */
|
/* Make the list *LISTP empty. */
|
|
|
static void
|
static void
|
attrs_list_clear (attrs *listp)
|
attrs_list_clear (attrs *listp)
|
{
|
{
|
attrs list, next;
|
attrs list, next;
|
|
|
for (list = *listp; list; list = next)
|
for (list = *listp; list; list = next)
|
{
|
{
|
next = list->next;
|
next = list->next;
|
pool_free (attrs_pool, list);
|
pool_free (attrs_pool, list);
|
}
|
}
|
*listp = NULL;
|
*listp = NULL;
|
}
|
}
|
|
|
/* Return true if the pair of DECL and OFFSET is the member of the LIST. */
|
/* Return true if the pair of DECL and OFFSET is the member of the LIST. */
|
|
|
static attrs
|
static attrs
|
attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
|
attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
|
{
|
{
|
for (; list; list = list->next)
|
for (; list; list = list->next)
|
if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
|
if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
|
return list;
|
return list;
|
return NULL;
|
return NULL;
|
}
|
}
|
|
|
/* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
|
/* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
|
|
|
static void
|
static void
|
attrs_list_insert (attrs *listp, decl_or_value dv,
|
attrs_list_insert (attrs *listp, decl_or_value dv,
|
HOST_WIDE_INT offset, rtx loc)
|
HOST_WIDE_INT offset, rtx loc)
|
{
|
{
|
attrs list;
|
attrs list;
|
|
|
list = (attrs) pool_alloc (attrs_pool);
|
list = (attrs) pool_alloc (attrs_pool);
|
list->loc = loc;
|
list->loc = loc;
|
list->dv = dv;
|
list->dv = dv;
|
list->offset = offset;
|
list->offset = offset;
|
list->next = *listp;
|
list->next = *listp;
|
*listp = list;
|
*listp = list;
|
}
|
}
|
|
|
/* Copy all nodes from SRC and create a list *DSTP of the copies. */
|
/* Copy all nodes from SRC and create a list *DSTP of the copies. */
|
|
|
static void
|
static void
|
attrs_list_copy (attrs *dstp, attrs src)
|
attrs_list_copy (attrs *dstp, attrs src)
|
{
|
{
|
attrs n;
|
attrs n;
|
|
|
attrs_list_clear (dstp);
|
attrs_list_clear (dstp);
|
for (; src; src = src->next)
|
for (; src; src = src->next)
|
{
|
{
|
n = (attrs) pool_alloc (attrs_pool);
|
n = (attrs) pool_alloc (attrs_pool);
|
n->loc = src->loc;
|
n->loc = src->loc;
|
n->dv = src->dv;
|
n->dv = src->dv;
|
n->offset = src->offset;
|
n->offset = src->offset;
|
n->next = *dstp;
|
n->next = *dstp;
|
*dstp = n;
|
*dstp = n;
|
}
|
}
|
}
|
}
|
|
|
/* Add all nodes from SRC which are not in *DSTP to *DSTP. */
|
/* Add all nodes from SRC which are not in *DSTP to *DSTP. */
|
|
|
static void
|
static void
|
attrs_list_union (attrs *dstp, attrs src)
|
attrs_list_union (attrs *dstp, attrs src)
|
{
|
{
|
for (; src; src = src->next)
|
for (; src; src = src->next)
|
{
|
{
|
if (!attrs_list_member (*dstp, src->dv, src->offset))
|
if (!attrs_list_member (*dstp, src->dv, src->offset))
|
attrs_list_insert (dstp, src->dv, src->offset, src->loc);
|
attrs_list_insert (dstp, src->dv, src->offset, src->loc);
|
}
|
}
|
}
|
}
|
|
|
/* Combine nodes that are not onepart nodes from SRC and SRC2 into
|
/* Combine nodes that are not onepart nodes from SRC and SRC2 into
|
*DSTP. */
|
*DSTP. */
|
|
|
static void
|
static void
|
attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
|
attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
|
{
|
{
|
gcc_assert (!*dstp);
|
gcc_assert (!*dstp);
|
for (; src; src = src->next)
|
for (; src; src = src->next)
|
{
|
{
|
if (!dv_onepart_p (src->dv))
|
if (!dv_onepart_p (src->dv))
|
attrs_list_insert (dstp, src->dv, src->offset, src->loc);
|
attrs_list_insert (dstp, src->dv, src->offset, src->loc);
|
}
|
}
|
for (src = src2; src; src = src->next)
|
for (src = src2; src; src = src->next)
|
{
|
{
|
if (!dv_onepart_p (src->dv)
|
if (!dv_onepart_p (src->dv)
|
&& !attrs_list_member (*dstp, src->dv, src->offset))
|
&& !attrs_list_member (*dstp, src->dv, src->offset))
|
attrs_list_insert (dstp, src->dv, src->offset, src->loc);
|
attrs_list_insert (dstp, src->dv, src->offset, src->loc);
|
}
|
}
|
}
|
}
|
|
|
/* Shared hashtable support. */
|
/* Shared hashtable support. */
|
|
|
/* Return true if VARS is shared. */
|
/* Return true if VARS is shared. */
|
|
|
static inline bool
|
static inline bool
|
shared_hash_shared (shared_hash vars)
|
shared_hash_shared (shared_hash vars)
|
{
|
{
|
return vars->refcount > 1;
|
return vars->refcount > 1;
|
}
|
}
|
|
|
/* Return the hash table for VARS. */
|
/* Return the hash table for VARS. */
|
|
|
static inline htab_t
|
static inline htab_t
|
shared_hash_htab (shared_hash vars)
|
shared_hash_htab (shared_hash vars)
|
{
|
{
|
return vars->htab;
|
return vars->htab;
|
}
|
}
|
|
|
/* Return true if VAR is shared, or maybe because VARS is shared. */
|
/* Return true if VAR is shared, or maybe because VARS is shared. */
|
|
|
static inline bool
|
static inline bool
|
shared_var_p (variable var, shared_hash vars)
|
shared_var_p (variable var, shared_hash vars)
|
{
|
{
|
/* Don't count an entry in the changed_variables table as a duplicate. */
|
/* Don't count an entry in the changed_variables table as a duplicate. */
|
return ((var->refcount > 1 + (int) var->in_changed_variables)
|
return ((var->refcount > 1 + (int) var->in_changed_variables)
|
|| shared_hash_shared (vars));
|
|| shared_hash_shared (vars));
|
}
|
}
|
|
|
/* Copy variables into a new hash table. */
|
/* Copy variables into a new hash table. */
|
|
|
static shared_hash
|
static shared_hash
|
shared_hash_unshare (shared_hash vars)
|
shared_hash_unshare (shared_hash vars)
|
{
|
{
|
shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
|
shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
|
gcc_assert (vars->refcount > 1);
|
gcc_assert (vars->refcount > 1);
|
new_vars->refcount = 1;
|
new_vars->refcount = 1;
|
new_vars->htab
|
new_vars->htab
|
= htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
|
= htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
|
variable_htab_eq, variable_htab_free);
|
variable_htab_eq, variable_htab_free);
|
vars_copy (new_vars->htab, vars->htab);
|
vars_copy (new_vars->htab, vars->htab);
|
vars->refcount--;
|
vars->refcount--;
|
return new_vars;
|
return new_vars;
|
}
|
}
|
|
|
/* Increment reference counter on VARS and return it. */
|
/* Increment reference counter on VARS and return it. */
|
|
|
static inline shared_hash
|
static inline shared_hash
|
shared_hash_copy (shared_hash vars)
|
shared_hash_copy (shared_hash vars)
|
{
|
{
|
vars->refcount++;
|
vars->refcount++;
|
return vars;
|
return vars;
|
}
|
}
|
|
|
/* Decrement reference counter and destroy hash table if not shared
|
/* Decrement reference counter and destroy hash table if not shared
|
anymore. */
|
anymore. */
|
|
|
static void
|
static void
|
shared_hash_destroy (shared_hash vars)
|
shared_hash_destroy (shared_hash vars)
|
{
|
{
|
gcc_checking_assert (vars->refcount > 0);
|
gcc_checking_assert (vars->refcount > 0);
|
if (--vars->refcount == 0)
|
if (--vars->refcount == 0)
|
{
|
{
|
htab_delete (vars->htab);
|
htab_delete (vars->htab);
|
pool_free (shared_hash_pool, vars);
|
pool_free (shared_hash_pool, vars);
|
}
|
}
|
}
|
}
|
|
|
/* Unshare *PVARS if shared and return slot for DV. If INS is
|
/* Unshare *PVARS if shared and return slot for DV. If INS is
|
INSERT, insert it if not already present. */
|
INSERT, insert it if not already present. */
|
|
|
static inline void **
|
static inline void **
|
shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
|
shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
|
hashval_t dvhash, enum insert_option ins)
|
hashval_t dvhash, enum insert_option ins)
|
{
|
{
|
if (shared_hash_shared (*pvars))
|
if (shared_hash_shared (*pvars))
|
*pvars = shared_hash_unshare (*pvars);
|
*pvars = shared_hash_unshare (*pvars);
|
return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
|
return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
|
}
|
}
|
|
|
static inline void **
|
static inline void **
|
shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
|
shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
|
enum insert_option ins)
|
enum insert_option ins)
|
{
|
{
|
return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
|
return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
|
}
|
}
|
|
|
/* Return slot for DV, if it is already present in the hash table.
|
/* Return slot for DV, if it is already present in the hash table.
|
If it is not present, insert it only VARS is not shared, otherwise
|
If it is not present, insert it only VARS is not shared, otherwise
|
return NULL. */
|
return NULL. */
|
|
|
static inline void **
|
static inline void **
|
shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
|
shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
|
{
|
{
|
return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
|
return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
|
shared_hash_shared (vars)
|
shared_hash_shared (vars)
|
? NO_INSERT : INSERT);
|
? NO_INSERT : INSERT);
|
}
|
}
|
|
|
static inline void **
|
static inline void **
|
shared_hash_find_slot (shared_hash vars, decl_or_value dv)
|
shared_hash_find_slot (shared_hash vars, decl_or_value dv)
|
{
|
{
|
return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
|
return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
|
}
|
}
|
|
|
/* Return slot for DV only if it is already present in the hash table. */
|
/* Return slot for DV only if it is already present in the hash table. */
|
|
|
static inline void **
|
static inline void **
|
shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
|
shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
|
hashval_t dvhash)
|
hashval_t dvhash)
|
{
|
{
|
return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
|
return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
|
NO_INSERT);
|
NO_INSERT);
|
}
|
}
|
|
|
static inline void **
|
static inline void **
|
shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
|
shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
|
{
|
{
|
return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
|
return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
|
}
|
}
|
|
|
/* Return variable for DV or NULL if not already present in the hash
|
/* Return variable for DV or NULL if not already present in the hash
|
table. */
|
table. */
|
|
|
static inline variable
|
static inline variable
|
shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
|
shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
|
{
|
{
|
return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
|
return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
|
}
|
}
|
|
|
static inline variable
|
static inline variable
|
shared_hash_find (shared_hash vars, decl_or_value dv)
|
shared_hash_find (shared_hash vars, decl_or_value dv)
|
{
|
{
|
return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
|
return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
|
}
|
}
|
|
|
/* Return true if TVAL is better than CVAL as a canonival value. We
|
/* Return true if TVAL is better than CVAL as a canonival value. We
|
choose lowest-numbered VALUEs, using the RTX address as a
|
choose lowest-numbered VALUEs, using the RTX address as a
|
tie-breaker. The idea is to arrange them into a star topology,
|
tie-breaker. The idea is to arrange them into a star topology,
|
such that all of them are at most one step away from the canonical
|
such that all of them are at most one step away from the canonical
|
value, and the canonical value has backlinks to all of them, in
|
value, and the canonical value has backlinks to all of them, in
|
addition to all the actual locations. We don't enforce this
|
addition to all the actual locations. We don't enforce this
|
topology throughout the entire dataflow analysis, though.
|
topology throughout the entire dataflow analysis, though.
|
*/
|
*/
|
|
|
static inline bool
|
static inline bool
|
canon_value_cmp (rtx tval, rtx cval)
|
canon_value_cmp (rtx tval, rtx cval)
|
{
|
{
|
return !cval
|
return !cval
|
|| CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
|
|| CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
|
}
|
}
|
|
|
static bool dst_can_be_shared;
|
static bool dst_can_be_shared;
|
|
|
/* Return a copy of a variable VAR and insert it to dataflow set SET. */
|
/* Return a copy of a variable VAR and insert it to dataflow set SET. */
|
|
|
static void **
|
static void **
|
unshare_variable (dataflow_set *set, void **slot, variable var,
|
unshare_variable (dataflow_set *set, void **slot, variable var,
|
enum var_init_status initialized)
|
enum var_init_status initialized)
|
{
|
{
|
variable new_var;
|
variable new_var;
|
int i;
|
int i;
|
|
|
new_var = (variable) pool_alloc (onepart_pool (var->onepart));
|
new_var = (variable) pool_alloc (onepart_pool (var->onepart));
|
new_var->dv = var->dv;
|
new_var->dv = var->dv;
|
new_var->refcount = 1;
|
new_var->refcount = 1;
|
var->refcount--;
|
var->refcount--;
|
new_var->n_var_parts = var->n_var_parts;
|
new_var->n_var_parts = var->n_var_parts;
|
new_var->onepart = var->onepart;
|
new_var->onepart = var->onepart;
|
new_var->in_changed_variables = false;
|
new_var->in_changed_variables = false;
|
|
|
if (! flag_var_tracking_uninit)
|
if (! flag_var_tracking_uninit)
|
initialized = VAR_INIT_STATUS_INITIALIZED;
|
initialized = VAR_INIT_STATUS_INITIALIZED;
|
|
|
for (i = 0; i < var->n_var_parts; i++)
|
for (i = 0; i < var->n_var_parts; i++)
|
{
|
{
|
location_chain node;
|
location_chain node;
|
location_chain *nextp;
|
location_chain *nextp;
|
|
|
if (i == 0 && var->onepart)
|
if (i == 0 && var->onepart)
|
{
|
{
|
/* One-part auxiliary data is only used while emitting
|
/* One-part auxiliary data is only used while emitting
|
notes, so propagate it to the new variable in the active
|
notes, so propagate it to the new variable in the active
|
dataflow set. If we're not emitting notes, this will be
|
dataflow set. If we're not emitting notes, this will be
|
a no-op. */
|
a no-op. */
|
gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
|
gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
|
VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
|
VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
|
VAR_LOC_1PAUX (var) = NULL;
|
VAR_LOC_1PAUX (var) = NULL;
|
}
|
}
|
else
|
else
|
VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
|
VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
|
nextp = &new_var->var_part[i].loc_chain;
|
nextp = &new_var->var_part[i].loc_chain;
|
for (node = var->var_part[i].loc_chain; node; node = node->next)
|
for (node = var->var_part[i].loc_chain; node; node = node->next)
|
{
|
{
|
location_chain new_lc;
|
location_chain new_lc;
|
|
|
new_lc = (location_chain) pool_alloc (loc_chain_pool);
|
new_lc = (location_chain) pool_alloc (loc_chain_pool);
|
new_lc->next = NULL;
|
new_lc->next = NULL;
|
if (node->init > initialized)
|
if (node->init > initialized)
|
new_lc->init = node->init;
|
new_lc->init = node->init;
|
else
|
else
|
new_lc->init = initialized;
|
new_lc->init = initialized;
|
if (node->set_src && !(MEM_P (node->set_src)))
|
if (node->set_src && !(MEM_P (node->set_src)))
|
new_lc->set_src = node->set_src;
|
new_lc->set_src = node->set_src;
|
else
|
else
|
new_lc->set_src = NULL;
|
new_lc->set_src = NULL;
|
new_lc->loc = node->loc;
|
new_lc->loc = node->loc;
|
|
|
*nextp = new_lc;
|
*nextp = new_lc;
|
nextp = &new_lc->next;
|
nextp = &new_lc->next;
|
}
|
}
|
|
|
new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
|
new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
|
}
|
}
|
|
|
dst_can_be_shared = false;
|
dst_can_be_shared = false;
|
if (shared_hash_shared (set->vars))
|
if (shared_hash_shared (set->vars))
|
slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
|
slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
|
else if (set->traversed_vars && set->vars != set->traversed_vars)
|
else if (set->traversed_vars && set->vars != set->traversed_vars)
|
slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
|
slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
|
*slot = new_var;
|
*slot = new_var;
|
if (var->in_changed_variables)
|
if (var->in_changed_variables)
|
{
|
{
|
void **cslot
|
void **cslot
|
= htab_find_slot_with_hash (changed_variables, var->dv,
|
= htab_find_slot_with_hash (changed_variables, var->dv,
|
dv_htab_hash (var->dv), NO_INSERT);
|
dv_htab_hash (var->dv), NO_INSERT);
|
gcc_assert (*cslot == (void *) var);
|
gcc_assert (*cslot == (void *) var);
|
var->in_changed_variables = false;
|
var->in_changed_variables = false;
|
variable_htab_free (var);
|
variable_htab_free (var);
|
*cslot = new_var;
|
*cslot = new_var;
|
new_var->in_changed_variables = true;
|
new_var->in_changed_variables = true;
|
}
|
}
|
return slot;
|
return slot;
|
}
|
}
|
|
|
/* Copy all variables from hash table SRC to hash table DST. */
|
/* Copy all variables from hash table SRC to hash table DST. */
|
|
|
static void
|
static void
|
vars_copy (htab_t dst, htab_t src)
|
vars_copy (htab_t dst, htab_t src)
|
{
|
{
|
htab_iterator hi;
|
htab_iterator hi;
|
variable var;
|
variable var;
|
|
|
FOR_EACH_HTAB_ELEMENT (src, var, variable, hi)
|
FOR_EACH_HTAB_ELEMENT (src, var, variable, hi)
|
{
|
{
|
void **dstp;
|
void **dstp;
|
var->refcount++;
|
var->refcount++;
|
dstp = htab_find_slot_with_hash (dst, var->dv,
|
dstp = htab_find_slot_with_hash (dst, var->dv,
|
dv_htab_hash (var->dv),
|
dv_htab_hash (var->dv),
|
INSERT);
|
INSERT);
|
*dstp = var;
|
*dstp = var;
|
}
|
}
|
}
|
}
|
|
|
/* Map a decl to its main debug decl. */
|
/* Map a decl to its main debug decl. */
|
|
|
static inline tree
|
static inline tree
|
var_debug_decl (tree decl)
|
var_debug_decl (tree decl)
|
{
|
{
|
if (decl && DECL_P (decl)
|
if (decl && DECL_P (decl)
|
&& DECL_DEBUG_EXPR_IS_FROM (decl))
|
&& DECL_DEBUG_EXPR_IS_FROM (decl))
|
{
|
{
|
tree debugdecl = DECL_DEBUG_EXPR (decl);
|
tree debugdecl = DECL_DEBUG_EXPR (decl);
|
if (debugdecl && DECL_P (debugdecl))
|
if (debugdecl && DECL_P (debugdecl))
|
decl = debugdecl;
|
decl = debugdecl;
|
}
|
}
|
|
|
return decl;
|
return decl;
|
}
|
}
|
|
|
/* Set the register LOC to contain DV, OFFSET. */
|
/* Set the register LOC to contain DV, OFFSET. */
|
|
|
static void
|
static void
|
var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
|
var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
|
decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
|
decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
|
enum insert_option iopt)
|
enum insert_option iopt)
|
{
|
{
|
attrs node;
|
attrs node;
|
bool decl_p = dv_is_decl_p (dv);
|
bool decl_p = dv_is_decl_p (dv);
|
|
|
if (decl_p)
|
if (decl_p)
|
dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
|
dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
|
|
|
for (node = set->regs[REGNO (loc)]; node; node = node->next)
|
for (node = set->regs[REGNO (loc)]; node; node = node->next)
|
if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
|
if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
|
&& node->offset == offset)
|
&& node->offset == offset)
|
break;
|
break;
|
if (!node)
|
if (!node)
|
attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
|
attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
|
set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
|
set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
|
}
|
}
|
|
|
/* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
|
/* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
|
|
|
static void
|
static void
|
var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
|
var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
|
rtx set_src)
|
rtx set_src)
|
{
|
{
|
tree decl = REG_EXPR (loc);
|
tree decl = REG_EXPR (loc);
|
HOST_WIDE_INT offset = REG_OFFSET (loc);
|
HOST_WIDE_INT offset = REG_OFFSET (loc);
|
|
|
var_reg_decl_set (set, loc, initialized,
|
var_reg_decl_set (set, loc, initialized,
|
dv_from_decl (decl), offset, set_src, INSERT);
|
dv_from_decl (decl), offset, set_src, INSERT);
|
}
|
}
|
|
|
static enum var_init_status
|
static enum var_init_status
|
get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
|
get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
|
{
|
{
|
variable var;
|
variable var;
|
int i;
|
int i;
|
enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
|
enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
|
|
|
if (! flag_var_tracking_uninit)
|
if (! flag_var_tracking_uninit)
|
return VAR_INIT_STATUS_INITIALIZED;
|
return VAR_INIT_STATUS_INITIALIZED;
|
|
|
var = shared_hash_find (set->vars, dv);
|
var = shared_hash_find (set->vars, dv);
|
if (var)
|
if (var)
|
{
|
{
|
for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
|
for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
|
{
|
{
|
location_chain nextp;
|
location_chain nextp;
|
for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
|
for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
|
if (rtx_equal_p (nextp->loc, loc))
|
if (rtx_equal_p (nextp->loc, loc))
|
{
|
{
|
ret_val = nextp->init;
|
ret_val = nextp->init;
|
break;
|
break;
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
return ret_val;
|
return ret_val;
|
}
|
}
|
|
|
/* Delete current content of register LOC in dataflow set SET and set
|
/* Delete current content of register LOC in dataflow set SET and set
|
the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
|
the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
|
MODIFY is true, any other live copies of the same variable part are
|
MODIFY is true, any other live copies of the same variable part are
|
also deleted from the dataflow set, otherwise the variable part is
|
also deleted from the dataflow set, otherwise the variable part is
|
assumed to be copied from another location holding the same
|
assumed to be copied from another location holding the same
|
part. */
|
part. */
|
|
|
static void
|
static void
|
var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
|
var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
|
enum var_init_status initialized, rtx set_src)
|
enum var_init_status initialized, rtx set_src)
|
{
|
{
|
tree decl = REG_EXPR (loc);
|
tree decl = REG_EXPR (loc);
|
HOST_WIDE_INT offset = REG_OFFSET (loc);
|
HOST_WIDE_INT offset = REG_OFFSET (loc);
|
attrs node, next;
|
attrs node, next;
|
attrs *nextp;
|
attrs *nextp;
|
|
|
decl = var_debug_decl (decl);
|
decl = var_debug_decl (decl);
|
|
|
if (initialized == VAR_INIT_STATUS_UNKNOWN)
|
if (initialized == VAR_INIT_STATUS_UNKNOWN)
|
initialized = get_init_value (set, loc, dv_from_decl (decl));
|
initialized = get_init_value (set, loc, dv_from_decl (decl));
|
|
|
nextp = &set->regs[REGNO (loc)];
|
nextp = &set->regs[REGNO (loc)];
|
for (node = *nextp; node; node = next)
|
for (node = *nextp; node; node = next)
|
{
|
{
|
next = node->next;
|
next = node->next;
|
if (dv_as_opaque (node->dv) != decl || node->offset != offset)
|
if (dv_as_opaque (node->dv) != decl || node->offset != offset)
|
{
|
{
|
delete_variable_part (set, node->loc, node->dv, node->offset);
|
delete_variable_part (set, node->loc, node->dv, node->offset);
|
pool_free (attrs_pool, node);
|
pool_free (attrs_pool, node);
|
*nextp = next;
|
*nextp = next;
|
}
|
}
|
else
|
else
|
{
|
{
|
node->loc = loc;
|
node->loc = loc;
|
nextp = &node->next;
|
nextp = &node->next;
|
}
|
}
|
}
|
}
|
if (modify)
|
if (modify)
|
clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
|
clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
|
var_reg_set (set, loc, initialized, set_src);
|
var_reg_set (set, loc, initialized, set_src);
|
}
|
}
|
|
|
/* Delete the association of register LOC in dataflow set SET with any
|
/* Delete the association of register LOC in dataflow set SET with any
|
variables that aren't onepart. If CLOBBER is true, also delete any
|
variables that aren't onepart. If CLOBBER is true, also delete any
|
other live copies of the same variable part, and delete the
|
other live copies of the same variable part, and delete the
|
association with onepart dvs too. */
|
association with onepart dvs too. */
|
|
|
static void
|
static void
|
var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
|
var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
|
{
|
{
|
attrs *nextp = &set->regs[REGNO (loc)];
|
attrs *nextp = &set->regs[REGNO (loc)];
|
attrs node, next;
|
attrs node, next;
|
|
|
if (clobber)
|
if (clobber)
|
{
|
{
|
tree decl = REG_EXPR (loc);
|
tree decl = REG_EXPR (loc);
|
HOST_WIDE_INT offset = REG_OFFSET (loc);
|
HOST_WIDE_INT offset = REG_OFFSET (loc);
|
|
|
decl = var_debug_decl (decl);
|
decl = var_debug_decl (decl);
|
|
|
clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
|
clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
|
}
|
}
|
|
|
for (node = *nextp; node; node = next)
|
for (node = *nextp; node; node = next)
|
{
|
{
|
next = node->next;
|
next = node->next;
|
if (clobber || !dv_onepart_p (node->dv))
|
if (clobber || !dv_onepart_p (node->dv))
|
{
|
{
|
delete_variable_part (set, node->loc, node->dv, node->offset);
|
delete_variable_part (set, node->loc, node->dv, node->offset);
|
pool_free (attrs_pool, node);
|
pool_free (attrs_pool, node);
|
*nextp = next;
|
*nextp = next;
|
}
|
}
|
else
|
else
|
nextp = &node->next;
|
nextp = &node->next;
|
}
|
}
|
}
|
}
|
|
|
/* Delete content of register with number REGNO in dataflow set SET. */
|
/* Delete content of register with number REGNO in dataflow set SET. */
|
|
|
static void
|
static void
|
var_regno_delete (dataflow_set *set, int regno)
|
var_regno_delete (dataflow_set *set, int regno)
|
{
|
{
|
attrs *reg = &set->regs[regno];
|
attrs *reg = &set->regs[regno];
|
attrs node, next;
|
attrs node, next;
|
|
|
for (node = *reg; node; node = next)
|
for (node = *reg; node; node = next)
|
{
|
{
|
next = node->next;
|
next = node->next;
|
delete_variable_part (set, node->loc, node->dv, node->offset);
|
delete_variable_part (set, node->loc, node->dv, node->offset);
|
pool_free (attrs_pool, node);
|
pool_free (attrs_pool, node);
|
}
|
}
|
*reg = NULL;
|
*reg = NULL;
|
}
|
}
|
|
|
/* Set the location of DV, OFFSET as the MEM LOC. */
|
/* Set the location of DV, OFFSET as the MEM LOC. */
|
|
|
static void
|
static void
|
var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
|
var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
|
decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
|
decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
|
enum insert_option iopt)
|
enum insert_option iopt)
|
{
|
{
|
if (dv_is_decl_p (dv))
|
if (dv_is_decl_p (dv))
|
dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
|
dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
|
|
|
set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
|
set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
|
}
|
}
|
|
|
/* Set the location part of variable MEM_EXPR (LOC) in dataflow set
|
/* Set the location part of variable MEM_EXPR (LOC) in dataflow set
|
SET to LOC.
|
SET to LOC.
|
Adjust the address first if it is stack pointer based. */
|
Adjust the address first if it is stack pointer based. */
|
|
|
static void
|
static void
|
var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
|
var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
|
rtx set_src)
|
rtx set_src)
|
{
|
{
|
tree decl = MEM_EXPR (loc);
|
tree decl = MEM_EXPR (loc);
|
HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
|
HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
|
|
|
var_mem_decl_set (set, loc, initialized,
|
var_mem_decl_set (set, loc, initialized,
|
dv_from_decl (decl), offset, set_src, INSERT);
|
dv_from_decl (decl), offset, set_src, INSERT);
|
}
|
}
|
|
|
/* Delete and set the location part of variable MEM_EXPR (LOC) in
|
/* Delete and set the location part of variable MEM_EXPR (LOC) in
|
dataflow set SET to LOC. If MODIFY is true, any other live copies
|
dataflow set SET to LOC. If MODIFY is true, any other live copies
|
of the same variable part are also deleted from the dataflow set,
|
of the same variable part are also deleted from the dataflow set,
|
otherwise the variable part is assumed to be copied from another
|
otherwise the variable part is assumed to be copied from another
|
location holding the same part.
|
location holding the same part.
|
Adjust the address first if it is stack pointer based. */
|
Adjust the address first if it is stack pointer based. */
|
|
|
static void
|
static void
|
var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
|
var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
|
enum var_init_status initialized, rtx set_src)
|
enum var_init_status initialized, rtx set_src)
|
{
|
{
|
tree decl = MEM_EXPR (loc);
|
tree decl = MEM_EXPR (loc);
|
HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
|
HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
|
|
|
decl = var_debug_decl (decl);
|
decl = var_debug_decl (decl);
|
|
|
if (initialized == VAR_INIT_STATUS_UNKNOWN)
|
if (initialized == VAR_INIT_STATUS_UNKNOWN)
|
initialized = get_init_value (set, loc, dv_from_decl (decl));
|
initialized = get_init_value (set, loc, dv_from_decl (decl));
|
|
|
if (modify)
|
if (modify)
|
clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
|
clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
|
var_mem_set (set, loc, initialized, set_src);
|
var_mem_set (set, loc, initialized, set_src);
|
}
|
}
|
|
|
/* Delete the location part LOC from dataflow set SET. If CLOBBER is
|
/* Delete the location part LOC from dataflow set SET. If CLOBBER is
|
true, also delete any other live copies of the same variable part.
|
true, also delete any other live copies of the same variable part.
|
Adjust the address first if it is stack pointer based. */
|
Adjust the address first if it is stack pointer based. */
|
|
|
static void
|
static void
|
var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
|
var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
|
{
|
{
|
tree decl = MEM_EXPR (loc);
|
tree decl = MEM_EXPR (loc);
|
HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
|
HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
|
|
|
decl = var_debug_decl (decl);
|
decl = var_debug_decl (decl);
|
if (clobber)
|
if (clobber)
|
clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
|
clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
|
delete_variable_part (set, loc, dv_from_decl (decl), offset);
|
delete_variable_part (set, loc, dv_from_decl (decl), offset);
|
}
|
}
|
|
|
/* Return true if LOC should not be expanded for location expressions,
|
/* Return true if LOC should not be expanded for location expressions,
|
or used in them. */
|
or used in them. */
|
|
|
static inline bool
|
static inline bool
|
unsuitable_loc (rtx loc)
|
unsuitable_loc (rtx loc)
|
{
|
{
|
switch (GET_CODE (loc))
|
switch (GET_CODE (loc))
|
{
|
{
|
case PC:
|
case PC:
|
case SCRATCH:
|
case SCRATCH:
|
case CC0:
|
case CC0:
|
case ASM_INPUT:
|
case ASM_INPUT:
|
case ASM_OPERANDS:
|
case ASM_OPERANDS:
|
return true;
|
return true;
|
|
|
default:
|
default:
|
return false;
|
return false;
|
}
|
}
|
}
|
}
|
|
|
/* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
|
/* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
|
bound to it. */
|
bound to it. */
|
|
|
static inline void
|
static inline void
|
val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
|
val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
|
{
|
{
|
if (REG_P (loc))
|
if (REG_P (loc))
|
{
|
{
|
if (modified)
|
if (modified)
|
var_regno_delete (set, REGNO (loc));
|
var_regno_delete (set, REGNO (loc));
|
var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
|
var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
|
dv_from_value (val), 0, NULL_RTX, INSERT);
|
dv_from_value (val), 0, NULL_RTX, INSERT);
|
}
|
}
|
else if (MEM_P (loc))
|
else if (MEM_P (loc))
|
{
|
{
|
struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
|
struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
|
|
|
if (l && GET_CODE (l->loc) == VALUE)
|
if (l && GET_CODE (l->loc) == VALUE)
|
l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
|
l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
|
|
|
/* If this MEM is a global constant, we don't need it in the
|
/* If this MEM is a global constant, we don't need it in the
|
dynamic tables. ??? We should test this before emitting the
|
dynamic tables. ??? We should test this before emitting the
|
micro-op in the first place. */
|
micro-op in the first place. */
|
while (l)
|
while (l)
|
if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
|
if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
|
break;
|
break;
|
else
|
else
|
l = l->next;
|
l = l->next;
|
|
|
if (!l)
|
if (!l)
|
var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
|
var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
|
dv_from_value (val), 0, NULL_RTX, INSERT);
|
dv_from_value (val), 0, NULL_RTX, INSERT);
|
}
|
}
|
else
|
else
|
{
|
{
|
/* Other kinds of equivalences are necessarily static, at least
|
/* Other kinds of equivalences are necessarily static, at least
|
so long as we do not perform substitutions while merging
|
so long as we do not perform substitutions while merging
|
expressions. */
|
expressions. */
|
gcc_unreachable ();
|
gcc_unreachable ();
|
set_variable_part (set, loc, dv_from_value (val), 0,
|
set_variable_part (set, loc, dv_from_value (val), 0,
|
VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
|
VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
|
}
|
}
|
}
|
}
|
|
|
/* Bind a value to a location it was just stored in. If MODIFIED
|
/* Bind a value to a location it was just stored in. If MODIFIED
|
holds, assume the location was modified, detaching it from any
|
holds, assume the location was modified, detaching it from any
|
values bound to it. */
|
values bound to it. */
|
|
|
static void
|
static void
|
val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
|
val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
|
{
|
{
|
cselib_val *v = CSELIB_VAL_PTR (val);
|
cselib_val *v = CSELIB_VAL_PTR (val);
|
|
|
gcc_assert (cselib_preserved_value_p (v));
|
gcc_assert (cselib_preserved_value_p (v));
|
|
|
if (dump_file)
|
if (dump_file)
|
{
|
{
|
fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
|
fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
|
print_inline_rtx (dump_file, loc, 0);
|
print_inline_rtx (dump_file, loc, 0);
|
fprintf (dump_file, " evaluates to ");
|
fprintf (dump_file, " evaluates to ");
|
print_inline_rtx (dump_file, val, 0);
|
print_inline_rtx (dump_file, val, 0);
|
if (v->locs)
|
if (v->locs)
|
{
|
{
|
struct elt_loc_list *l;
|
struct elt_loc_list *l;
|
for (l = v->locs; l; l = l->next)
|
for (l = v->locs; l; l = l->next)
|
{
|
{
|
fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
|
fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
|
print_inline_rtx (dump_file, l->loc, 0);
|
print_inline_rtx (dump_file, l->loc, 0);
|
}
|
}
|
}
|
}
|
fprintf (dump_file, "\n");
|
fprintf (dump_file, "\n");
|
}
|
}
|
|
|
gcc_checking_assert (!unsuitable_loc (loc));
|
gcc_checking_assert (!unsuitable_loc (loc));
|
|
|
val_bind (set, val, loc, modified);
|
val_bind (set, val, loc, modified);
|
}
|
}
|
|
|
/* Reset this node, detaching all its equivalences. Return the slot
|
/* Reset this node, detaching all its equivalences. Return the slot
|
in the variable hash table that holds dv, if there is one. */
|
in the variable hash table that holds dv, if there is one. */
|
|
|
static void
|
static void
|
val_reset (dataflow_set *set, decl_or_value dv)
|
val_reset (dataflow_set *set, decl_or_value dv)
|
{
|
{
|
variable var = shared_hash_find (set->vars, dv) ;
|
variable var = shared_hash_find (set->vars, dv) ;
|
location_chain node;
|
location_chain node;
|
rtx cval;
|
rtx cval;
|
|
|
if (!var || !var->n_var_parts)
|
if (!var || !var->n_var_parts)
|
return;
|
return;
|
|
|
gcc_assert (var->n_var_parts == 1);
|
gcc_assert (var->n_var_parts == 1);
|
|
|
cval = NULL;
|
cval = NULL;
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
if (GET_CODE (node->loc) == VALUE
|
if (GET_CODE (node->loc) == VALUE
|
&& canon_value_cmp (node->loc, cval))
|
&& canon_value_cmp (node->loc, cval))
|
cval = node->loc;
|
cval = node->loc;
|
|
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
if (GET_CODE (node->loc) == VALUE && cval != node->loc)
|
if (GET_CODE (node->loc) == VALUE && cval != node->loc)
|
{
|
{
|
/* Redirect the equivalence link to the new canonical
|
/* Redirect the equivalence link to the new canonical
|
value, or simply remove it if it would point at
|
value, or simply remove it if it would point at
|
itself. */
|
itself. */
|
if (cval)
|
if (cval)
|
set_variable_part (set, cval, dv_from_value (node->loc),
|
set_variable_part (set, cval, dv_from_value (node->loc),
|
0, node->init, node->set_src, NO_INSERT);
|
0, node->init, node->set_src, NO_INSERT);
|
delete_variable_part (set, dv_as_value (dv),
|
delete_variable_part (set, dv_as_value (dv),
|
dv_from_value (node->loc), 0);
|
dv_from_value (node->loc), 0);
|
}
|
}
|
|
|
if (cval)
|
if (cval)
|
{
|
{
|
decl_or_value cdv = dv_from_value (cval);
|
decl_or_value cdv = dv_from_value (cval);
|
|
|
/* Keep the remaining values connected, accummulating links
|
/* Keep the remaining values connected, accummulating links
|
in the canonical value. */
|
in the canonical value. */
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
{
|
{
|
if (node->loc == cval)
|
if (node->loc == cval)
|
continue;
|
continue;
|
else if (GET_CODE (node->loc) == REG)
|
else if (GET_CODE (node->loc) == REG)
|
var_reg_decl_set (set, node->loc, node->init, cdv, 0,
|
var_reg_decl_set (set, node->loc, node->init, cdv, 0,
|
node->set_src, NO_INSERT);
|
node->set_src, NO_INSERT);
|
else if (GET_CODE (node->loc) == MEM)
|
else if (GET_CODE (node->loc) == MEM)
|
var_mem_decl_set (set, node->loc, node->init, cdv, 0,
|
var_mem_decl_set (set, node->loc, node->init, cdv, 0,
|
node->set_src, NO_INSERT);
|
node->set_src, NO_INSERT);
|
else
|
else
|
set_variable_part (set, node->loc, cdv, 0,
|
set_variable_part (set, node->loc, cdv, 0,
|
node->init, node->set_src, NO_INSERT);
|
node->init, node->set_src, NO_INSERT);
|
}
|
}
|
}
|
}
|
|
|
/* We remove this last, to make sure that the canonical value is not
|
/* We remove this last, to make sure that the canonical value is not
|
removed to the point of requiring reinsertion. */
|
removed to the point of requiring reinsertion. */
|
if (cval)
|
if (cval)
|
delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
|
delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
|
|
|
clobber_variable_part (set, NULL, dv, 0, NULL);
|
clobber_variable_part (set, NULL, dv, 0, NULL);
|
}
|
}
|
|
|
/* Find the values in a given location and map the val to another
|
/* Find the values in a given location and map the val to another
|
value, if it is unique, or add the location as one holding the
|
value, if it is unique, or add the location as one holding the
|
value. */
|
value. */
|
|
|
static void
|
static void
|
val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
|
val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
|
{
|
{
|
decl_or_value dv = dv_from_value (val);
|
decl_or_value dv = dv_from_value (val);
|
|
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
{
|
{
|
if (insn)
|
if (insn)
|
fprintf (dump_file, "%i: ", INSN_UID (insn));
|
fprintf (dump_file, "%i: ", INSN_UID (insn));
|
else
|
else
|
fprintf (dump_file, "head: ");
|
fprintf (dump_file, "head: ");
|
print_inline_rtx (dump_file, val, 0);
|
print_inline_rtx (dump_file, val, 0);
|
fputs (" is at ", dump_file);
|
fputs (" is at ", dump_file);
|
print_inline_rtx (dump_file, loc, 0);
|
print_inline_rtx (dump_file, loc, 0);
|
fputc ('\n', dump_file);
|
fputc ('\n', dump_file);
|
}
|
}
|
|
|
val_reset (set, dv);
|
val_reset (set, dv);
|
|
|
gcc_checking_assert (!unsuitable_loc (loc));
|
gcc_checking_assert (!unsuitable_loc (loc));
|
|
|
if (REG_P (loc))
|
if (REG_P (loc))
|
{
|
{
|
attrs node, found = NULL;
|
attrs node, found = NULL;
|
|
|
for (node = set->regs[REGNO (loc)]; node; node = node->next)
|
for (node = set->regs[REGNO (loc)]; node; node = node->next)
|
if (dv_is_value_p (node->dv)
|
if (dv_is_value_p (node->dv)
|
&& GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
|
&& GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
|
{
|
{
|
found = node;
|
found = node;
|
|
|
/* Map incoming equivalences. ??? Wouldn't it be nice if
|
/* Map incoming equivalences. ??? Wouldn't it be nice if
|
we just started sharing the location lists? Maybe a
|
we just started sharing the location lists? Maybe a
|
circular list ending at the value itself or some
|
circular list ending at the value itself or some
|
such. */
|
such. */
|
set_variable_part (set, dv_as_value (node->dv),
|
set_variable_part (set, dv_as_value (node->dv),
|
dv_from_value (val), node->offset,
|
dv_from_value (val), node->offset,
|
VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
|
VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
|
set_variable_part (set, val, node->dv, node->offset,
|
set_variable_part (set, val, node->dv, node->offset,
|
VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
|
VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
|
}
|
}
|
|
|
/* If we didn't find any equivalence, we need to remember that
|
/* If we didn't find any equivalence, we need to remember that
|
this value is held in the named register. */
|
this value is held in the named register. */
|
if (found)
|
if (found)
|
return;
|
return;
|
}
|
}
|
/* ??? Attempt to find and merge equivalent MEMs or other
|
/* ??? Attempt to find and merge equivalent MEMs or other
|
expressions too. */
|
expressions too. */
|
|
|
val_bind (set, val, loc, false);
|
val_bind (set, val, loc, false);
|
}
|
}
|
|
|
/* Initialize dataflow set SET to be empty.
|
/* Initialize dataflow set SET to be empty.
|
VARS_SIZE is the initial size of hash table VARS. */
|
VARS_SIZE is the initial size of hash table VARS. */
|
|
|
static void
|
static void
|
dataflow_set_init (dataflow_set *set)
|
dataflow_set_init (dataflow_set *set)
|
{
|
{
|
init_attrs_list_set (set->regs);
|
init_attrs_list_set (set->regs);
|
set->vars = shared_hash_copy (empty_shared_hash);
|
set->vars = shared_hash_copy (empty_shared_hash);
|
set->stack_adjust = 0;
|
set->stack_adjust = 0;
|
set->traversed_vars = NULL;
|
set->traversed_vars = NULL;
|
}
|
}
|
|
|
/* Delete the contents of dataflow set SET. */
|
/* Delete the contents of dataflow set SET. */
|
|
|
static void
|
static void
|
dataflow_set_clear (dataflow_set *set)
|
dataflow_set_clear (dataflow_set *set)
|
{
|
{
|
int i;
|
int i;
|
|
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
attrs_list_clear (&set->regs[i]);
|
attrs_list_clear (&set->regs[i]);
|
|
|
shared_hash_destroy (set->vars);
|
shared_hash_destroy (set->vars);
|
set->vars = shared_hash_copy (empty_shared_hash);
|
set->vars = shared_hash_copy (empty_shared_hash);
|
}
|
}
|
|
|
/* Copy the contents of dataflow set SRC to DST. */
|
/* Copy the contents of dataflow set SRC to DST. */
|
|
|
static void
|
static void
|
dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
|
dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
|
{
|
{
|
int i;
|
int i;
|
|
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
attrs_list_copy (&dst->regs[i], src->regs[i]);
|
attrs_list_copy (&dst->regs[i], src->regs[i]);
|
|
|
shared_hash_destroy (dst->vars);
|
shared_hash_destroy (dst->vars);
|
dst->vars = shared_hash_copy (src->vars);
|
dst->vars = shared_hash_copy (src->vars);
|
dst->stack_adjust = src->stack_adjust;
|
dst->stack_adjust = src->stack_adjust;
|
}
|
}
|
|
|
/* Information for merging lists of locations for a given offset of variable.
|
/* Information for merging lists of locations for a given offset of variable.
|
*/
|
*/
|
struct variable_union_info
|
struct variable_union_info
|
{
|
{
|
/* Node of the location chain. */
|
/* Node of the location chain. */
|
location_chain lc;
|
location_chain lc;
|
|
|
/* The sum of positions in the input chains. */
|
/* The sum of positions in the input chains. */
|
int pos;
|
int pos;
|
|
|
/* The position in the chain of DST dataflow set. */
|
/* The position in the chain of DST dataflow set. */
|
int pos_dst;
|
int pos_dst;
|
};
|
};
|
|
|
/* Buffer for location list sorting and its allocated size. */
|
/* Buffer for location list sorting and its allocated size. */
|
static struct variable_union_info *vui_vec;
|
static struct variable_union_info *vui_vec;
|
static int vui_allocated;
|
static int vui_allocated;
|
|
|
/* Compare function for qsort, order the structures by POS element. */
|
/* Compare function for qsort, order the structures by POS element. */
|
|
|
static int
|
static int
|
variable_union_info_cmp_pos (const void *n1, const void *n2)
|
variable_union_info_cmp_pos (const void *n1, const void *n2)
|
{
|
{
|
const struct variable_union_info *const i1 =
|
const struct variable_union_info *const i1 =
|
(const struct variable_union_info *) n1;
|
(const struct variable_union_info *) n1;
|
const struct variable_union_info *const i2 =
|
const struct variable_union_info *const i2 =
|
( const struct variable_union_info *) n2;
|
( const struct variable_union_info *) n2;
|
|
|
if (i1->pos != i2->pos)
|
if (i1->pos != i2->pos)
|
return i1->pos - i2->pos;
|
return i1->pos - i2->pos;
|
|
|
return (i1->pos_dst - i2->pos_dst);
|
return (i1->pos_dst - i2->pos_dst);
|
}
|
}
|
|
|
/* Compute union of location parts of variable *SLOT and the same variable
|
/* Compute union of location parts of variable *SLOT and the same variable
|
from hash table DATA. Compute "sorted" union of the location chains
|
from hash table DATA. Compute "sorted" union of the location chains
|
for common offsets, i.e. the locations of a variable part are sorted by
|
for common offsets, i.e. the locations of a variable part are sorted by
|
a priority where the priority is the sum of the positions in the 2 chains
|
a priority where the priority is the sum of the positions in the 2 chains
|
(if a location is only in one list the position in the second list is
|
(if a location is only in one list the position in the second list is
|
defined to be larger than the length of the chains).
|
defined to be larger than the length of the chains).
|
When we are updating the location parts the newest location is in the
|
When we are updating the location parts the newest location is in the
|
beginning of the chain, so when we do the described "sorted" union
|
beginning of the chain, so when we do the described "sorted" union
|
we keep the newest locations in the beginning. */
|
we keep the newest locations in the beginning. */
|
|
|
static int
|
static int
|
variable_union (variable src, dataflow_set *set)
|
variable_union (variable src, dataflow_set *set)
|
{
|
{
|
variable dst;
|
variable dst;
|
void **dstp;
|
void **dstp;
|
int i, j, k;
|
int i, j, k;
|
|
|
dstp = shared_hash_find_slot (set->vars, src->dv);
|
dstp = shared_hash_find_slot (set->vars, src->dv);
|
if (!dstp || !*dstp)
|
if (!dstp || !*dstp)
|
{
|
{
|
src->refcount++;
|
src->refcount++;
|
|
|
dst_can_be_shared = false;
|
dst_can_be_shared = false;
|
if (!dstp)
|
if (!dstp)
|
dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
|
dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
|
|
|
*dstp = src;
|
*dstp = src;
|
|
|
/* Continue traversing the hash table. */
|
/* Continue traversing the hash table. */
|
return 1;
|
return 1;
|
}
|
}
|
else
|
else
|
dst = (variable) *dstp;
|
dst = (variable) *dstp;
|
|
|
gcc_assert (src->n_var_parts);
|
gcc_assert (src->n_var_parts);
|
gcc_checking_assert (src->onepart == dst->onepart);
|
gcc_checking_assert (src->onepart == dst->onepart);
|
|
|
/* We can combine one-part variables very efficiently, because their
|
/* We can combine one-part variables very efficiently, because their
|
entries are in canonical order. */
|
entries are in canonical order. */
|
if (src->onepart)
|
if (src->onepart)
|
{
|
{
|
location_chain *nodep, dnode, snode;
|
location_chain *nodep, dnode, snode;
|
|
|
gcc_assert (src->n_var_parts == 1
|
gcc_assert (src->n_var_parts == 1
|
&& dst->n_var_parts == 1);
|
&& dst->n_var_parts == 1);
|
|
|
snode = src->var_part[0].loc_chain;
|
snode = src->var_part[0].loc_chain;
|
gcc_assert (snode);
|
gcc_assert (snode);
|
|
|
restart_onepart_unshared:
|
restart_onepart_unshared:
|
nodep = &dst->var_part[0].loc_chain;
|
nodep = &dst->var_part[0].loc_chain;
|
dnode = *nodep;
|
dnode = *nodep;
|
gcc_assert (dnode);
|
gcc_assert (dnode);
|
|
|
while (snode)
|
while (snode)
|
{
|
{
|
int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
|
int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
|
|
|
if (r > 0)
|
if (r > 0)
|
{
|
{
|
location_chain nnode;
|
location_chain nnode;
|
|
|
if (shared_var_p (dst, set->vars))
|
if (shared_var_p (dst, set->vars))
|
{
|
{
|
dstp = unshare_variable (set, dstp, dst,
|
dstp = unshare_variable (set, dstp, dst,
|
VAR_INIT_STATUS_INITIALIZED);
|
VAR_INIT_STATUS_INITIALIZED);
|
dst = (variable)*dstp;
|
dst = (variable)*dstp;
|
goto restart_onepart_unshared;
|
goto restart_onepart_unshared;
|
}
|
}
|
|
|
*nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
|
*nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
|
nnode->loc = snode->loc;
|
nnode->loc = snode->loc;
|
nnode->init = snode->init;
|
nnode->init = snode->init;
|
if (!snode->set_src || MEM_P (snode->set_src))
|
if (!snode->set_src || MEM_P (snode->set_src))
|
nnode->set_src = NULL;
|
nnode->set_src = NULL;
|
else
|
else
|
nnode->set_src = snode->set_src;
|
nnode->set_src = snode->set_src;
|
nnode->next = dnode;
|
nnode->next = dnode;
|
dnode = nnode;
|
dnode = nnode;
|
}
|
}
|
else if (r == 0)
|
else if (r == 0)
|
gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
|
gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
|
|
|
if (r >= 0)
|
if (r >= 0)
|
snode = snode->next;
|
snode = snode->next;
|
|
|
nodep = &dnode->next;
|
nodep = &dnode->next;
|
dnode = *nodep;
|
dnode = *nodep;
|
}
|
}
|
|
|
return 1;
|
return 1;
|
}
|
}
|
|
|
gcc_checking_assert (!src->onepart);
|
gcc_checking_assert (!src->onepart);
|
|
|
/* Count the number of location parts, result is K. */
|
/* Count the number of location parts, result is K. */
|
for (i = 0, j = 0, k = 0;
|
for (i = 0, j = 0, k = 0;
|
i < src->n_var_parts && j < dst->n_var_parts; k++)
|
i < src->n_var_parts && j < dst->n_var_parts; k++)
|
{
|
{
|
if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
|
if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
|
{
|
{
|
i++;
|
i++;
|
j++;
|
j++;
|
}
|
}
|
else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
|
else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
|
i++;
|
i++;
|
else
|
else
|
j++;
|
j++;
|
}
|
}
|
k += src->n_var_parts - i;
|
k += src->n_var_parts - i;
|
k += dst->n_var_parts - j;
|
k += dst->n_var_parts - j;
|
|
|
/* We track only variables whose size is <= MAX_VAR_PARTS bytes
|
/* We track only variables whose size is <= MAX_VAR_PARTS bytes
|
thus there are at most MAX_VAR_PARTS different offsets. */
|
thus there are at most MAX_VAR_PARTS different offsets. */
|
gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
|
gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
|
|
|
if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
|
if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
|
{
|
{
|
dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
|
dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
|
dst = (variable)*dstp;
|
dst = (variable)*dstp;
|
}
|
}
|
|
|
i = src->n_var_parts - 1;
|
i = src->n_var_parts - 1;
|
j = dst->n_var_parts - 1;
|
j = dst->n_var_parts - 1;
|
dst->n_var_parts = k;
|
dst->n_var_parts = k;
|
|
|
for (k--; k >= 0; k--)
|
for (k--; k >= 0; k--)
|
{
|
{
|
location_chain node, node2;
|
location_chain node, node2;
|
|
|
if (i >= 0 && j >= 0
|
if (i >= 0 && j >= 0
|
&& VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
|
&& VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
|
{
|
{
|
/* Compute the "sorted" union of the chains, i.e. the locations which
|
/* Compute the "sorted" union of the chains, i.e. the locations which
|
are in both chains go first, they are sorted by the sum of
|
are in both chains go first, they are sorted by the sum of
|
positions in the chains. */
|
positions in the chains. */
|
int dst_l, src_l;
|
int dst_l, src_l;
|
int ii, jj, n;
|
int ii, jj, n;
|
struct variable_union_info *vui;
|
struct variable_union_info *vui;
|
|
|
/* If DST is shared compare the location chains.
|
/* If DST is shared compare the location chains.
|
If they are different we will modify the chain in DST with
|
If they are different we will modify the chain in DST with
|
high probability so make a copy of DST. */
|
high probability so make a copy of DST. */
|
if (shared_var_p (dst, set->vars))
|
if (shared_var_p (dst, set->vars))
|
{
|
{
|
for (node = src->var_part[i].loc_chain,
|
for (node = src->var_part[i].loc_chain,
|
node2 = dst->var_part[j].loc_chain; node && node2;
|
node2 = dst->var_part[j].loc_chain; node && node2;
|
node = node->next, node2 = node2->next)
|
node = node->next, node2 = node2->next)
|
{
|
{
|
if (!((REG_P (node2->loc)
|
if (!((REG_P (node2->loc)
|
&& REG_P (node->loc)
|
&& REG_P (node->loc)
|
&& REGNO (node2->loc) == REGNO (node->loc))
|
&& REGNO (node2->loc) == REGNO (node->loc))
|
|| rtx_equal_p (node2->loc, node->loc)))
|
|| rtx_equal_p (node2->loc, node->loc)))
|
{
|
{
|
if (node2->init < node->init)
|
if (node2->init < node->init)
|
node2->init = node->init;
|
node2->init = node->init;
|
break;
|
break;
|
}
|
}
|
}
|
}
|
if (node || node2)
|
if (node || node2)
|
{
|
{
|
dstp = unshare_variable (set, dstp, dst,
|
dstp = unshare_variable (set, dstp, dst,
|
VAR_INIT_STATUS_UNKNOWN);
|
VAR_INIT_STATUS_UNKNOWN);
|
dst = (variable)*dstp;
|
dst = (variable)*dstp;
|
}
|
}
|
}
|
}
|
|
|
src_l = 0;
|
src_l = 0;
|
for (node = src->var_part[i].loc_chain; node; node = node->next)
|
for (node = src->var_part[i].loc_chain; node; node = node->next)
|
src_l++;
|
src_l++;
|
dst_l = 0;
|
dst_l = 0;
|
for (node = dst->var_part[j].loc_chain; node; node = node->next)
|
for (node = dst->var_part[j].loc_chain; node; node = node->next)
|
dst_l++;
|
dst_l++;
|
|
|
if (dst_l == 1)
|
if (dst_l == 1)
|
{
|
{
|
/* The most common case, much simpler, no qsort is needed. */
|
/* The most common case, much simpler, no qsort is needed. */
|
location_chain dstnode = dst->var_part[j].loc_chain;
|
location_chain dstnode = dst->var_part[j].loc_chain;
|
dst->var_part[k].loc_chain = dstnode;
|
dst->var_part[k].loc_chain = dstnode;
|
VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET(dst, j);
|
VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET(dst, j);
|
node2 = dstnode;
|
node2 = dstnode;
|
for (node = src->var_part[i].loc_chain; node; node = node->next)
|
for (node = src->var_part[i].loc_chain; node; node = node->next)
|
if (!((REG_P (dstnode->loc)
|
if (!((REG_P (dstnode->loc)
|
&& REG_P (node->loc)
|
&& REG_P (node->loc)
|
&& REGNO (dstnode->loc) == REGNO (node->loc))
|
&& REGNO (dstnode->loc) == REGNO (node->loc))
|
|| rtx_equal_p (dstnode->loc, node->loc)))
|
|| rtx_equal_p (dstnode->loc, node->loc)))
|
{
|
{
|
location_chain new_node;
|
location_chain new_node;
|
|
|
/* Copy the location from SRC. */
|
/* Copy the location from SRC. */
|
new_node = (location_chain) pool_alloc (loc_chain_pool);
|
new_node = (location_chain) pool_alloc (loc_chain_pool);
|
new_node->loc = node->loc;
|
new_node->loc = node->loc;
|
new_node->init = node->init;
|
new_node->init = node->init;
|
if (!node->set_src || MEM_P (node->set_src))
|
if (!node->set_src || MEM_P (node->set_src))
|
new_node->set_src = NULL;
|
new_node->set_src = NULL;
|
else
|
else
|
new_node->set_src = node->set_src;
|
new_node->set_src = node->set_src;
|
node2->next = new_node;
|
node2->next = new_node;
|
node2 = new_node;
|
node2 = new_node;
|
}
|
}
|
node2->next = NULL;
|
node2->next = NULL;
|
}
|
}
|
else
|
else
|
{
|
{
|
if (src_l + dst_l > vui_allocated)
|
if (src_l + dst_l > vui_allocated)
|
{
|
{
|
vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
|
vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
|
vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
|
vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
|
vui_allocated);
|
vui_allocated);
|
}
|
}
|
vui = vui_vec;
|
vui = vui_vec;
|
|
|
/* Fill in the locations from DST. */
|
/* Fill in the locations from DST. */
|
for (node = dst->var_part[j].loc_chain, jj = 0; node;
|
for (node = dst->var_part[j].loc_chain, jj = 0; node;
|
node = node->next, jj++)
|
node = node->next, jj++)
|
{
|
{
|
vui[jj].lc = node;
|
vui[jj].lc = node;
|
vui[jj].pos_dst = jj;
|
vui[jj].pos_dst = jj;
|
|
|
/* Pos plus value larger than a sum of 2 valid positions. */
|
/* Pos plus value larger than a sum of 2 valid positions. */
|
vui[jj].pos = jj + src_l + dst_l;
|
vui[jj].pos = jj + src_l + dst_l;
|
}
|
}
|
|
|
/* Fill in the locations from SRC. */
|
/* Fill in the locations from SRC. */
|
n = dst_l;
|
n = dst_l;
|
for (node = src->var_part[i].loc_chain, ii = 0; node;
|
for (node = src->var_part[i].loc_chain, ii = 0; node;
|
node = node->next, ii++)
|
node = node->next, ii++)
|
{
|
{
|
/* Find location from NODE. */
|
/* Find location from NODE. */
|
for (jj = 0; jj < dst_l; jj++)
|
for (jj = 0; jj < dst_l; jj++)
|
{
|
{
|
if ((REG_P (vui[jj].lc->loc)
|
if ((REG_P (vui[jj].lc->loc)
|
&& REG_P (node->loc)
|
&& REG_P (node->loc)
|
&& REGNO (vui[jj].lc->loc) == REGNO (node->loc))
|
&& REGNO (vui[jj].lc->loc) == REGNO (node->loc))
|
|| rtx_equal_p (vui[jj].lc->loc, node->loc))
|
|| rtx_equal_p (vui[jj].lc->loc, node->loc))
|
{
|
{
|
vui[jj].pos = jj + ii;
|
vui[jj].pos = jj + ii;
|
break;
|
break;
|
}
|
}
|
}
|
}
|
if (jj >= dst_l) /* The location has not been found. */
|
if (jj >= dst_l) /* The location has not been found. */
|
{
|
{
|
location_chain new_node;
|
location_chain new_node;
|
|
|
/* Copy the location from SRC. */
|
/* Copy the location from SRC. */
|
new_node = (location_chain) pool_alloc (loc_chain_pool);
|
new_node = (location_chain) pool_alloc (loc_chain_pool);
|
new_node->loc = node->loc;
|
new_node->loc = node->loc;
|
new_node->init = node->init;
|
new_node->init = node->init;
|
if (!node->set_src || MEM_P (node->set_src))
|
if (!node->set_src || MEM_P (node->set_src))
|
new_node->set_src = NULL;
|
new_node->set_src = NULL;
|
else
|
else
|
new_node->set_src = node->set_src;
|
new_node->set_src = node->set_src;
|
vui[n].lc = new_node;
|
vui[n].lc = new_node;
|
vui[n].pos_dst = src_l + dst_l;
|
vui[n].pos_dst = src_l + dst_l;
|
vui[n].pos = ii + src_l + dst_l;
|
vui[n].pos = ii + src_l + dst_l;
|
n++;
|
n++;
|
}
|
}
|
}
|
}
|
|
|
if (dst_l == 2)
|
if (dst_l == 2)
|
{
|
{
|
/* Special case still very common case. For dst_l == 2
|
/* Special case still very common case. For dst_l == 2
|
all entries dst_l ... n-1 are sorted, with for i >= dst_l
|
all entries dst_l ... n-1 are sorted, with for i >= dst_l
|
vui[i].pos == i + src_l + dst_l. */
|
vui[i].pos == i + src_l + dst_l. */
|
if (vui[0].pos > vui[1].pos)
|
if (vui[0].pos > vui[1].pos)
|
{
|
{
|
/* Order should be 1, 0, 2... */
|
/* Order should be 1, 0, 2... */
|
dst->var_part[k].loc_chain = vui[1].lc;
|
dst->var_part[k].loc_chain = vui[1].lc;
|
vui[1].lc->next = vui[0].lc;
|
vui[1].lc->next = vui[0].lc;
|
if (n >= 3)
|
if (n >= 3)
|
{
|
{
|
vui[0].lc->next = vui[2].lc;
|
vui[0].lc->next = vui[2].lc;
|
vui[n - 1].lc->next = NULL;
|
vui[n - 1].lc->next = NULL;
|
}
|
}
|
else
|
else
|
vui[0].lc->next = NULL;
|
vui[0].lc->next = NULL;
|
ii = 3;
|
ii = 3;
|
}
|
}
|
else
|
else
|
{
|
{
|
dst->var_part[k].loc_chain = vui[0].lc;
|
dst->var_part[k].loc_chain = vui[0].lc;
|
if (n >= 3 && vui[2].pos < vui[1].pos)
|
if (n >= 3 && vui[2].pos < vui[1].pos)
|
{
|
{
|
/* Order should be 0, 2, 1, 3... */
|
/* Order should be 0, 2, 1, 3... */
|
vui[0].lc->next = vui[2].lc;
|
vui[0].lc->next = vui[2].lc;
|
vui[2].lc->next = vui[1].lc;
|
vui[2].lc->next = vui[1].lc;
|
if (n >= 4)
|
if (n >= 4)
|
{
|
{
|
vui[1].lc->next = vui[3].lc;
|
vui[1].lc->next = vui[3].lc;
|
vui[n - 1].lc->next = NULL;
|
vui[n - 1].lc->next = NULL;
|
}
|
}
|
else
|
else
|
vui[1].lc->next = NULL;
|
vui[1].lc->next = NULL;
|
ii = 4;
|
ii = 4;
|
}
|
}
|
else
|
else
|
{
|
{
|
/* Order should be 0, 1, 2... */
|
/* Order should be 0, 1, 2... */
|
ii = 1;
|
ii = 1;
|
vui[n - 1].lc->next = NULL;
|
vui[n - 1].lc->next = NULL;
|
}
|
}
|
}
|
}
|
for (; ii < n; ii++)
|
for (; ii < n; ii++)
|
vui[ii - 1].lc->next = vui[ii].lc;
|
vui[ii - 1].lc->next = vui[ii].lc;
|
}
|
}
|
else
|
else
|
{
|
{
|
qsort (vui, n, sizeof (struct variable_union_info),
|
qsort (vui, n, sizeof (struct variable_union_info),
|
variable_union_info_cmp_pos);
|
variable_union_info_cmp_pos);
|
|
|
/* Reconnect the nodes in sorted order. */
|
/* Reconnect the nodes in sorted order. */
|
for (ii = 1; ii < n; ii++)
|
for (ii = 1; ii < n; ii++)
|
vui[ii - 1].lc->next = vui[ii].lc;
|
vui[ii - 1].lc->next = vui[ii].lc;
|
vui[n - 1].lc->next = NULL;
|
vui[n - 1].lc->next = NULL;
|
dst->var_part[k].loc_chain = vui[0].lc;
|
dst->var_part[k].loc_chain = vui[0].lc;
|
}
|
}
|
|
|
VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
|
VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
|
}
|
}
|
i--;
|
i--;
|
j--;
|
j--;
|
}
|
}
|
else if ((i >= 0 && j >= 0
|
else if ((i >= 0 && j >= 0
|
&& VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
|
&& VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
|
|| i < 0)
|
|| i < 0)
|
{
|
{
|
dst->var_part[k] = dst->var_part[j];
|
dst->var_part[k] = dst->var_part[j];
|
j--;
|
j--;
|
}
|
}
|
else if ((i >= 0 && j >= 0
|
else if ((i >= 0 && j >= 0
|
&& VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
|
&& VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
|
|| j < 0)
|
|| j < 0)
|
{
|
{
|
location_chain *nextp;
|
location_chain *nextp;
|
|
|
/* Copy the chain from SRC. */
|
/* Copy the chain from SRC. */
|
nextp = &dst->var_part[k].loc_chain;
|
nextp = &dst->var_part[k].loc_chain;
|
for (node = src->var_part[i].loc_chain; node; node = node->next)
|
for (node = src->var_part[i].loc_chain; node; node = node->next)
|
{
|
{
|
location_chain new_lc;
|
location_chain new_lc;
|
|
|
new_lc = (location_chain) pool_alloc (loc_chain_pool);
|
new_lc = (location_chain) pool_alloc (loc_chain_pool);
|
new_lc->next = NULL;
|
new_lc->next = NULL;
|
new_lc->init = node->init;
|
new_lc->init = node->init;
|
if (!node->set_src || MEM_P (node->set_src))
|
if (!node->set_src || MEM_P (node->set_src))
|
new_lc->set_src = NULL;
|
new_lc->set_src = NULL;
|
else
|
else
|
new_lc->set_src = node->set_src;
|
new_lc->set_src = node->set_src;
|
new_lc->loc = node->loc;
|
new_lc->loc = node->loc;
|
|
|
*nextp = new_lc;
|
*nextp = new_lc;
|
nextp = &new_lc->next;
|
nextp = &new_lc->next;
|
}
|
}
|
|
|
VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
|
VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
|
i--;
|
i--;
|
}
|
}
|
dst->var_part[k].cur_loc = NULL;
|
dst->var_part[k].cur_loc = NULL;
|
}
|
}
|
|
|
if (flag_var_tracking_uninit)
|
if (flag_var_tracking_uninit)
|
for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
|
for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
|
{
|
{
|
location_chain node, node2;
|
location_chain node, node2;
|
for (node = src->var_part[i].loc_chain; node; node = node->next)
|
for (node = src->var_part[i].loc_chain; node; node = node->next)
|
for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
|
for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
|
if (rtx_equal_p (node->loc, node2->loc))
|
if (rtx_equal_p (node->loc, node2->loc))
|
{
|
{
|
if (node->init > node2->init)
|
if (node->init > node2->init)
|
node2->init = node->init;
|
node2->init = node->init;
|
}
|
}
|
}
|
}
|
|
|
/* Continue traversing the hash table. */
|
/* Continue traversing the hash table. */
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Compute union of dataflow sets SRC and DST and store it to DST. */
|
/* Compute union of dataflow sets SRC and DST and store it to DST. */
|
|
|
static void
|
static void
|
dataflow_set_union (dataflow_set *dst, dataflow_set *src)
|
dataflow_set_union (dataflow_set *dst, dataflow_set *src)
|
{
|
{
|
int i;
|
int i;
|
|
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
attrs_list_union (&dst->regs[i], src->regs[i]);
|
attrs_list_union (&dst->regs[i], src->regs[i]);
|
|
|
if (dst->vars == empty_shared_hash)
|
if (dst->vars == empty_shared_hash)
|
{
|
{
|
shared_hash_destroy (dst->vars);
|
shared_hash_destroy (dst->vars);
|
dst->vars = shared_hash_copy (src->vars);
|
dst->vars = shared_hash_copy (src->vars);
|
}
|
}
|
else
|
else
|
{
|
{
|
htab_iterator hi;
|
htab_iterator hi;
|
variable var;
|
variable var;
|
|
|
FOR_EACH_HTAB_ELEMENT (shared_hash_htab (src->vars), var, variable, hi)
|
FOR_EACH_HTAB_ELEMENT (shared_hash_htab (src->vars), var, variable, hi)
|
variable_union (var, dst);
|
variable_union (var, dst);
|
}
|
}
|
}
|
}
|
|
|
/* Whether the value is currently being expanded. */
|
/* Whether the value is currently being expanded. */
|
#define VALUE_RECURSED_INTO(x) \
|
#define VALUE_RECURSED_INTO(x) \
|
(RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
|
(RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
|
|
|
/* Whether no expansion was found, saving useless lookups.
|
/* Whether no expansion was found, saving useless lookups.
|
It must only be set when VALUE_CHANGED is clear. */
|
It must only be set when VALUE_CHANGED is clear. */
|
#define NO_LOC_P(x) \
|
#define NO_LOC_P(x) \
|
(RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
|
(RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
|
|
|
/* Whether cur_loc in the value needs to be (re)computed. */
|
/* Whether cur_loc in the value needs to be (re)computed. */
|
#define VALUE_CHANGED(x) \
|
#define VALUE_CHANGED(x) \
|
(RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
|
(RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
|
/* Whether cur_loc in the decl needs to be (re)computed. */
|
/* Whether cur_loc in the decl needs to be (re)computed. */
|
#define DECL_CHANGED(x) TREE_VISITED (x)
|
#define DECL_CHANGED(x) TREE_VISITED (x)
|
|
|
/* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
|
/* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
|
user DECLs, this means they're in changed_variables. Values and
|
user DECLs, this means they're in changed_variables. Values and
|
debug exprs may be left with this flag set if no user variable
|
debug exprs may be left with this flag set if no user variable
|
requires them to be evaluated. */
|
requires them to be evaluated. */
|
|
|
static inline void
|
static inline void
|
set_dv_changed (decl_or_value dv, bool newv)
|
set_dv_changed (decl_or_value dv, bool newv)
|
{
|
{
|
switch (dv_onepart_p (dv))
|
switch (dv_onepart_p (dv))
|
{
|
{
|
case ONEPART_VALUE:
|
case ONEPART_VALUE:
|
if (newv)
|
if (newv)
|
NO_LOC_P (dv_as_value (dv)) = false;
|
NO_LOC_P (dv_as_value (dv)) = false;
|
VALUE_CHANGED (dv_as_value (dv)) = newv;
|
VALUE_CHANGED (dv_as_value (dv)) = newv;
|
break;
|
break;
|
|
|
case ONEPART_DEXPR:
|
case ONEPART_DEXPR:
|
if (newv)
|
if (newv)
|
NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
|
NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
|
/* Fall through... */
|
/* Fall through... */
|
|
|
default:
|
default:
|
DECL_CHANGED (dv_as_decl (dv)) = newv;
|
DECL_CHANGED (dv_as_decl (dv)) = newv;
|
break;
|
break;
|
}
|
}
|
}
|
}
|
|
|
/* Return true if DV needs to have its cur_loc recomputed. */
|
/* Return true if DV needs to have its cur_loc recomputed. */
|
|
|
static inline bool
|
static inline bool
|
dv_changed_p (decl_or_value dv)
|
dv_changed_p (decl_or_value dv)
|
{
|
{
|
return (dv_is_value_p (dv)
|
return (dv_is_value_p (dv)
|
? VALUE_CHANGED (dv_as_value (dv))
|
? VALUE_CHANGED (dv_as_value (dv))
|
: DECL_CHANGED (dv_as_decl (dv)));
|
: DECL_CHANGED (dv_as_decl (dv)));
|
}
|
}
|
|
|
/* Return a location list node whose loc is rtx_equal to LOC, in the
|
/* Return a location list node whose loc is rtx_equal to LOC, in the
|
location list of a one-part variable or value VAR, or in that of
|
location list of a one-part variable or value VAR, or in that of
|
any values recursively mentioned in the location lists. VARS must
|
any values recursively mentioned in the location lists. VARS must
|
be in star-canonical form. */
|
be in star-canonical form. */
|
|
|
static location_chain
|
static location_chain
|
find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
|
find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
|
{
|
{
|
location_chain node;
|
location_chain node;
|
enum rtx_code loc_code;
|
enum rtx_code loc_code;
|
|
|
if (!var)
|
if (!var)
|
return NULL;
|
return NULL;
|
|
|
gcc_checking_assert (var->onepart);
|
gcc_checking_assert (var->onepart);
|
|
|
if (!var->n_var_parts)
|
if (!var->n_var_parts)
|
return NULL;
|
return NULL;
|
|
|
gcc_checking_assert (loc != dv_as_opaque (var->dv));
|
gcc_checking_assert (loc != dv_as_opaque (var->dv));
|
|
|
loc_code = GET_CODE (loc);
|
loc_code = GET_CODE (loc);
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
{
|
{
|
decl_or_value dv;
|
decl_or_value dv;
|
variable rvar;
|
variable rvar;
|
|
|
if (GET_CODE (node->loc) != loc_code)
|
if (GET_CODE (node->loc) != loc_code)
|
{
|
{
|
if (GET_CODE (node->loc) != VALUE)
|
if (GET_CODE (node->loc) != VALUE)
|
continue;
|
continue;
|
}
|
}
|
else if (loc == node->loc)
|
else if (loc == node->loc)
|
return node;
|
return node;
|
else if (loc_code != VALUE)
|
else if (loc_code != VALUE)
|
{
|
{
|
if (rtx_equal_p (loc, node->loc))
|
if (rtx_equal_p (loc, node->loc))
|
return node;
|
return node;
|
continue;
|
continue;
|
}
|
}
|
|
|
/* Since we're in star-canonical form, we don't need to visit
|
/* Since we're in star-canonical form, we don't need to visit
|
non-canonical nodes: one-part variables and non-canonical
|
non-canonical nodes: one-part variables and non-canonical
|
values would only point back to the canonical node. */
|
values would only point back to the canonical node. */
|
if (dv_is_value_p (var->dv)
|
if (dv_is_value_p (var->dv)
|
&& !canon_value_cmp (node->loc, dv_as_value (var->dv)))
|
&& !canon_value_cmp (node->loc, dv_as_value (var->dv)))
|
{
|
{
|
/* Skip all subsequent VALUEs. */
|
/* Skip all subsequent VALUEs. */
|
while (node->next && GET_CODE (node->next->loc) == VALUE)
|
while (node->next && GET_CODE (node->next->loc) == VALUE)
|
{
|
{
|
node = node->next;
|
node = node->next;
|
gcc_checking_assert (!canon_value_cmp (node->loc,
|
gcc_checking_assert (!canon_value_cmp (node->loc,
|
dv_as_value (var->dv)));
|
dv_as_value (var->dv)));
|
if (loc == node->loc)
|
if (loc == node->loc)
|
return node;
|
return node;
|
}
|
}
|
continue;
|
continue;
|
}
|
}
|
|
|
gcc_checking_assert (node == var->var_part[0].loc_chain);
|
gcc_checking_assert (node == var->var_part[0].loc_chain);
|
gcc_checking_assert (!node->next);
|
gcc_checking_assert (!node->next);
|
|
|
dv = dv_from_value (node->loc);
|
dv = dv_from_value (node->loc);
|
rvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
|
rvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
|
return find_loc_in_1pdv (loc, rvar, vars);
|
return find_loc_in_1pdv (loc, rvar, vars);
|
}
|
}
|
|
|
/* ??? Gotta look in cselib_val locations too. */
|
/* ??? Gotta look in cselib_val locations too. */
|
|
|
return NULL;
|
return NULL;
|
}
|
}
|
|
|
/* Hash table iteration argument passed to variable_merge. */
|
/* Hash table iteration argument passed to variable_merge. */
|
struct dfset_merge
|
struct dfset_merge
|
{
|
{
|
/* The set in which the merge is to be inserted. */
|
/* The set in which the merge is to be inserted. */
|
dataflow_set *dst;
|
dataflow_set *dst;
|
/* The set that we're iterating in. */
|
/* The set that we're iterating in. */
|
dataflow_set *cur;
|
dataflow_set *cur;
|
/* The set that may contain the other dv we are to merge with. */
|
/* The set that may contain the other dv we are to merge with. */
|
dataflow_set *src;
|
dataflow_set *src;
|
/* Number of onepart dvs in src. */
|
/* Number of onepart dvs in src. */
|
int src_onepart_cnt;
|
int src_onepart_cnt;
|
};
|
};
|
|
|
/* Insert LOC in *DNODE, if it's not there yet. The list must be in
|
/* Insert LOC in *DNODE, if it's not there yet. The list must be in
|
loc_cmp order, and it is maintained as such. */
|
loc_cmp order, and it is maintained as such. */
|
|
|
static void
|
static void
|
insert_into_intersection (location_chain *nodep, rtx loc,
|
insert_into_intersection (location_chain *nodep, rtx loc,
|
enum var_init_status status)
|
enum var_init_status status)
|
{
|
{
|
location_chain node;
|
location_chain node;
|
int r;
|
int r;
|
|
|
for (node = *nodep; node; nodep = &node->next, node = *nodep)
|
for (node = *nodep; node; nodep = &node->next, node = *nodep)
|
if ((r = loc_cmp (node->loc, loc)) == 0)
|
if ((r = loc_cmp (node->loc, loc)) == 0)
|
{
|
{
|
node->init = MIN (node->init, status);
|
node->init = MIN (node->init, status);
|
return;
|
return;
|
}
|
}
|
else if (r > 0)
|
else if (r > 0)
|
break;
|
break;
|
|
|
node = (location_chain) pool_alloc (loc_chain_pool);
|
node = (location_chain) pool_alloc (loc_chain_pool);
|
|
|
node->loc = loc;
|
node->loc = loc;
|
node->set_src = NULL;
|
node->set_src = NULL;
|
node->init = status;
|
node->init = status;
|
node->next = *nodep;
|
node->next = *nodep;
|
*nodep = node;
|
*nodep = node;
|
}
|
}
|
|
|
/* Insert in DEST the intersection of the locations present in both
|
/* Insert in DEST the intersection of the locations present in both
|
S1NODE and S2VAR, directly or indirectly. S1NODE is from a
|
S1NODE and S2VAR, directly or indirectly. S1NODE is from a
|
variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
|
variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
|
DSM->dst. */
|
DSM->dst. */
|
|
|
static void
|
static void
|
intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
|
intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
|
location_chain s1node, variable s2var)
|
location_chain s1node, variable s2var)
|
{
|
{
|
dataflow_set *s1set = dsm->cur;
|
dataflow_set *s1set = dsm->cur;
|
dataflow_set *s2set = dsm->src;
|
dataflow_set *s2set = dsm->src;
|
location_chain found;
|
location_chain found;
|
|
|
if (s2var)
|
if (s2var)
|
{
|
{
|
location_chain s2node;
|
location_chain s2node;
|
|
|
gcc_checking_assert (s2var->onepart);
|
gcc_checking_assert (s2var->onepart);
|
|
|
if (s2var->n_var_parts)
|
if (s2var->n_var_parts)
|
{
|
{
|
s2node = s2var->var_part[0].loc_chain;
|
s2node = s2var->var_part[0].loc_chain;
|
|
|
for (; s1node && s2node;
|
for (; s1node && s2node;
|
s1node = s1node->next, s2node = s2node->next)
|
s1node = s1node->next, s2node = s2node->next)
|
if (s1node->loc != s2node->loc)
|
if (s1node->loc != s2node->loc)
|
break;
|
break;
|
else if (s1node->loc == val)
|
else if (s1node->loc == val)
|
continue;
|
continue;
|
else
|
else
|
insert_into_intersection (dest, s1node->loc,
|
insert_into_intersection (dest, s1node->loc,
|
MIN (s1node->init, s2node->init));
|
MIN (s1node->init, s2node->init));
|
}
|
}
|
}
|
}
|
|
|
for (; s1node; s1node = s1node->next)
|
for (; s1node; s1node = s1node->next)
|
{
|
{
|
if (s1node->loc == val)
|
if (s1node->loc == val)
|
continue;
|
continue;
|
|
|
if ((found = find_loc_in_1pdv (s1node->loc, s2var,
|
if ((found = find_loc_in_1pdv (s1node->loc, s2var,
|
shared_hash_htab (s2set->vars))))
|
shared_hash_htab (s2set->vars))))
|
{
|
{
|
insert_into_intersection (dest, s1node->loc,
|
insert_into_intersection (dest, s1node->loc,
|
MIN (s1node->init, found->init));
|
MIN (s1node->init, found->init));
|
continue;
|
continue;
|
}
|
}
|
|
|
if (GET_CODE (s1node->loc) == VALUE
|
if (GET_CODE (s1node->loc) == VALUE
|
&& !VALUE_RECURSED_INTO (s1node->loc))
|
&& !VALUE_RECURSED_INTO (s1node->loc))
|
{
|
{
|
decl_or_value dv = dv_from_value (s1node->loc);
|
decl_or_value dv = dv_from_value (s1node->loc);
|
variable svar = shared_hash_find (s1set->vars, dv);
|
variable svar = shared_hash_find (s1set->vars, dv);
|
if (svar)
|
if (svar)
|
{
|
{
|
if (svar->n_var_parts == 1)
|
if (svar->n_var_parts == 1)
|
{
|
{
|
VALUE_RECURSED_INTO (s1node->loc) = true;
|
VALUE_RECURSED_INTO (s1node->loc) = true;
|
intersect_loc_chains (val, dest, dsm,
|
intersect_loc_chains (val, dest, dsm,
|
svar->var_part[0].loc_chain,
|
svar->var_part[0].loc_chain,
|
s2var);
|
s2var);
|
VALUE_RECURSED_INTO (s1node->loc) = false;
|
VALUE_RECURSED_INTO (s1node->loc) = false;
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
/* ??? gotta look in cselib_val locations too. */
|
/* ??? gotta look in cselib_val locations too. */
|
|
|
/* ??? if the location is equivalent to any location in src,
|
/* ??? if the location is equivalent to any location in src,
|
searched recursively
|
searched recursively
|
|
|
add to dst the values needed to represent the equivalence
|
add to dst the values needed to represent the equivalence
|
|
|
telling whether locations S is equivalent to another dv's
|
telling whether locations S is equivalent to another dv's
|
location list:
|
location list:
|
|
|
for each location D in the list
|
for each location D in the list
|
|
|
if S and D satisfy rtx_equal_p, then it is present
|
if S and D satisfy rtx_equal_p, then it is present
|
|
|
else if D is a value, recurse without cycles
|
else if D is a value, recurse without cycles
|
|
|
else if S and D have the same CODE and MODE
|
else if S and D have the same CODE and MODE
|
|
|
for each operand oS and the corresponding oD
|
for each operand oS and the corresponding oD
|
|
|
if oS and oD are not equivalent, then S an D are not equivalent
|
if oS and oD are not equivalent, then S an D are not equivalent
|
|
|
else if they are RTX vectors
|
else if they are RTX vectors
|
|
|
if any vector oS element is not equivalent to its respective oD,
|
if any vector oS element is not equivalent to its respective oD,
|
then S and D are not equivalent
|
then S and D are not equivalent
|
|
|
*/
|
*/
|
|
|
|
|
}
|
}
|
}
|
}
|
|
|
/* Return -1 if X should be before Y in a location list for a 1-part
|
/* Return -1 if X should be before Y in a location list for a 1-part
|
variable, 1 if Y should be before X, and 0 if they're equivalent
|
variable, 1 if Y should be before X, and 0 if they're equivalent
|
and should not appear in the list. */
|
and should not appear in the list. */
|
|
|
static int
|
static int
|
loc_cmp (rtx x, rtx y)
|
loc_cmp (rtx x, rtx y)
|
{
|
{
|
int i, j, r;
|
int i, j, r;
|
RTX_CODE code = GET_CODE (x);
|
RTX_CODE code = GET_CODE (x);
|
const char *fmt;
|
const char *fmt;
|
|
|
if (x == y)
|
if (x == y)
|
return 0;
|
return 0;
|
|
|
if (REG_P (x))
|
if (REG_P (x))
|
{
|
{
|
if (!REG_P (y))
|
if (!REG_P (y))
|
return -1;
|
return -1;
|
gcc_assert (GET_MODE (x) == GET_MODE (y));
|
gcc_assert (GET_MODE (x) == GET_MODE (y));
|
if (REGNO (x) == REGNO (y))
|
if (REGNO (x) == REGNO (y))
|
return 0;
|
return 0;
|
else if (REGNO (x) < REGNO (y))
|
else if (REGNO (x) < REGNO (y))
|
return -1;
|
return -1;
|
else
|
else
|
return 1;
|
return 1;
|
}
|
}
|
|
|
if (REG_P (y))
|
if (REG_P (y))
|
return 1;
|
return 1;
|
|
|
if (MEM_P (x))
|
if (MEM_P (x))
|
{
|
{
|
if (!MEM_P (y))
|
if (!MEM_P (y))
|
return -1;
|
return -1;
|
gcc_assert (GET_MODE (x) == GET_MODE (y));
|
gcc_assert (GET_MODE (x) == GET_MODE (y));
|
return loc_cmp (XEXP (x, 0), XEXP (y, 0));
|
return loc_cmp (XEXP (x, 0), XEXP (y, 0));
|
}
|
}
|
|
|
if (MEM_P (y))
|
if (MEM_P (y))
|
return 1;
|
return 1;
|
|
|
if (GET_CODE (x) == VALUE)
|
if (GET_CODE (x) == VALUE)
|
{
|
{
|
if (GET_CODE (y) != VALUE)
|
if (GET_CODE (y) != VALUE)
|
return -1;
|
return -1;
|
/* Don't assert the modes are the same, that is true only
|
/* Don't assert the modes are the same, that is true only
|
when not recursing. (subreg:QI (value:SI 1:1) 0)
|
when not recursing. (subreg:QI (value:SI 1:1) 0)
|
and (subreg:QI (value:DI 2:2) 0) can be compared,
|
and (subreg:QI (value:DI 2:2) 0) can be compared,
|
even when the modes are different. */
|
even when the modes are different. */
|
if (canon_value_cmp (x, y))
|
if (canon_value_cmp (x, y))
|
return -1;
|
return -1;
|
else
|
else
|
return 1;
|
return 1;
|
}
|
}
|
|
|
if (GET_CODE (y) == VALUE)
|
if (GET_CODE (y) == VALUE)
|
return 1;
|
return 1;
|
|
|
/* Entry value is the least preferable kind of expression. */
|
/* Entry value is the least preferable kind of expression. */
|
if (GET_CODE (x) == ENTRY_VALUE)
|
if (GET_CODE (x) == ENTRY_VALUE)
|
{
|
{
|
if (GET_CODE (y) != ENTRY_VALUE)
|
if (GET_CODE (y) != ENTRY_VALUE)
|
return 1;
|
return 1;
|
gcc_assert (GET_MODE (x) == GET_MODE (y));
|
gcc_assert (GET_MODE (x) == GET_MODE (y));
|
return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
|
return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
|
}
|
}
|
|
|
if (GET_CODE (y) == ENTRY_VALUE)
|
if (GET_CODE (y) == ENTRY_VALUE)
|
return -1;
|
return -1;
|
|
|
if (GET_CODE (x) == GET_CODE (y))
|
if (GET_CODE (x) == GET_CODE (y))
|
/* Compare operands below. */;
|
/* Compare operands below. */;
|
else if (GET_CODE (x) < GET_CODE (y))
|
else if (GET_CODE (x) < GET_CODE (y))
|
return -1;
|
return -1;
|
else
|
else
|
return 1;
|
return 1;
|
|
|
gcc_assert (GET_MODE (x) == GET_MODE (y));
|
gcc_assert (GET_MODE (x) == GET_MODE (y));
|
|
|
if (GET_CODE (x) == DEBUG_EXPR)
|
if (GET_CODE (x) == DEBUG_EXPR)
|
{
|
{
|
if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
|
if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
|
< DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
|
< DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
|
return -1;
|
return -1;
|
gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
|
gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
|
> DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
|
> DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
|
return 1;
|
return 1;
|
}
|
}
|
|
|
fmt = GET_RTX_FORMAT (code);
|
fmt = GET_RTX_FORMAT (code);
|
for (i = 0; i < GET_RTX_LENGTH (code); i++)
|
for (i = 0; i < GET_RTX_LENGTH (code); i++)
|
switch (fmt[i])
|
switch (fmt[i])
|
{
|
{
|
case 'w':
|
case 'w':
|
if (XWINT (x, i) == XWINT (y, i))
|
if (XWINT (x, i) == XWINT (y, i))
|
break;
|
break;
|
else if (XWINT (x, i) < XWINT (y, i))
|
else if (XWINT (x, i) < XWINT (y, i))
|
return -1;
|
return -1;
|
else
|
else
|
return 1;
|
return 1;
|
|
|
case 'n':
|
case 'n':
|
case 'i':
|
case 'i':
|
if (XINT (x, i) == XINT (y, i))
|
if (XINT (x, i) == XINT (y, i))
|
break;
|
break;
|
else if (XINT (x, i) < XINT (y, i))
|
else if (XINT (x, i) < XINT (y, i))
|
return -1;
|
return -1;
|
else
|
else
|
return 1;
|
return 1;
|
|
|
case 'V':
|
case 'V':
|
case 'E':
|
case 'E':
|
/* Compare the vector length first. */
|
/* Compare the vector length first. */
|
if (XVECLEN (x, i) == XVECLEN (y, i))
|
if (XVECLEN (x, i) == XVECLEN (y, i))
|
/* Compare the vectors elements. */;
|
/* Compare the vectors elements. */;
|
else if (XVECLEN (x, i) < XVECLEN (y, i))
|
else if (XVECLEN (x, i) < XVECLEN (y, i))
|
return -1;
|
return -1;
|
else
|
else
|
return 1;
|
return 1;
|
|
|
for (j = 0; j < XVECLEN (x, i); j++)
|
for (j = 0; j < XVECLEN (x, i); j++)
|
if ((r = loc_cmp (XVECEXP (x, i, j),
|
if ((r = loc_cmp (XVECEXP (x, i, j),
|
XVECEXP (y, i, j))))
|
XVECEXP (y, i, j))))
|
return r;
|
return r;
|
break;
|
break;
|
|
|
case 'e':
|
case 'e':
|
if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
|
if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
|
return r;
|
return r;
|
break;
|
break;
|
|
|
case 'S':
|
case 'S':
|
case 's':
|
case 's':
|
if (XSTR (x, i) == XSTR (y, i))
|
if (XSTR (x, i) == XSTR (y, i))
|
break;
|
break;
|
if (!XSTR (x, i))
|
if (!XSTR (x, i))
|
return -1;
|
return -1;
|
if (!XSTR (y, i))
|
if (!XSTR (y, i))
|
return 1;
|
return 1;
|
if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
|
if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
|
break;
|
break;
|
else if (r < 0)
|
else if (r < 0)
|
return -1;
|
return -1;
|
else
|
else
|
return 1;
|
return 1;
|
|
|
case 'u':
|
case 'u':
|
/* These are just backpointers, so they don't matter. */
|
/* These are just backpointers, so they don't matter. */
|
break;
|
break;
|
|
|
case '0':
|
case '0':
|
case 't':
|
case 't':
|
break;
|
break;
|
|
|
/* It is believed that rtx's at this level will never
|
/* It is believed that rtx's at this level will never
|
contain anything but integers and other rtx's,
|
contain anything but integers and other rtx's,
|
except for within LABEL_REFs and SYMBOL_REFs. */
|
except for within LABEL_REFs and SYMBOL_REFs. */
|
default:
|
default:
|
gcc_unreachable ();
|
gcc_unreachable ();
|
}
|
}
|
|
|
return 0;
|
return 0;
|
}
|
}
|
|
|
#if ENABLE_CHECKING
|
#if ENABLE_CHECKING
|
/* Check the order of entries in one-part variables. */
|
/* Check the order of entries in one-part variables. */
|
|
|
static int
|
static int
|
canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
|
canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
|
{
|
{
|
variable var = (variable) *slot;
|
variable var = (variable) *slot;
|
location_chain node, next;
|
location_chain node, next;
|
|
|
#ifdef ENABLE_RTL_CHECKING
|
#ifdef ENABLE_RTL_CHECKING
|
int i;
|
int i;
|
for (i = 0; i < var->n_var_parts; i++)
|
for (i = 0; i < var->n_var_parts; i++)
|
gcc_assert (var->var_part[0].cur_loc == NULL);
|
gcc_assert (var->var_part[0].cur_loc == NULL);
|
gcc_assert (!var->in_changed_variables);
|
gcc_assert (!var->in_changed_variables);
|
#endif
|
#endif
|
|
|
if (!var->onepart)
|
if (!var->onepart)
|
return 1;
|
return 1;
|
|
|
gcc_assert (var->n_var_parts == 1);
|
gcc_assert (var->n_var_parts == 1);
|
node = var->var_part[0].loc_chain;
|
node = var->var_part[0].loc_chain;
|
gcc_assert (node);
|
gcc_assert (node);
|
|
|
while ((next = node->next))
|
while ((next = node->next))
|
{
|
{
|
gcc_assert (loc_cmp (node->loc, next->loc) < 0);
|
gcc_assert (loc_cmp (node->loc, next->loc) < 0);
|
node = next;
|
node = next;
|
}
|
}
|
|
|
return 1;
|
return 1;
|
}
|
}
|
#endif
|
#endif
|
|
|
/* Mark with VALUE_RECURSED_INTO values that have neighbors that are
|
/* Mark with VALUE_RECURSED_INTO values that have neighbors that are
|
more likely to be chosen as canonical for an equivalence set.
|
more likely to be chosen as canonical for an equivalence set.
|
Ensure less likely values can reach more likely neighbors, making
|
Ensure less likely values can reach more likely neighbors, making
|
the connections bidirectional. */
|
the connections bidirectional. */
|
|
|
static int
|
static int
|
canonicalize_values_mark (void **slot, void *data)
|
canonicalize_values_mark (void **slot, void *data)
|
{
|
{
|
dataflow_set *set = (dataflow_set *)data;
|
dataflow_set *set = (dataflow_set *)data;
|
variable var = (variable) *slot;
|
variable var = (variable) *slot;
|
decl_or_value dv = var->dv;
|
decl_or_value dv = var->dv;
|
rtx val;
|
rtx val;
|
location_chain node;
|
location_chain node;
|
|
|
if (!dv_is_value_p (dv))
|
if (!dv_is_value_p (dv))
|
return 1;
|
return 1;
|
|
|
gcc_checking_assert (var->n_var_parts == 1);
|
gcc_checking_assert (var->n_var_parts == 1);
|
|
|
val = dv_as_value (dv);
|
val = dv_as_value (dv);
|
|
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
if (GET_CODE (node->loc) == VALUE)
|
if (GET_CODE (node->loc) == VALUE)
|
{
|
{
|
if (canon_value_cmp (node->loc, val))
|
if (canon_value_cmp (node->loc, val))
|
VALUE_RECURSED_INTO (val) = true;
|
VALUE_RECURSED_INTO (val) = true;
|
else
|
else
|
{
|
{
|
decl_or_value odv = dv_from_value (node->loc);
|
decl_or_value odv = dv_from_value (node->loc);
|
void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
|
void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
|
|
|
set_slot_part (set, val, oslot, odv, 0,
|
set_slot_part (set, val, oslot, odv, 0,
|
node->init, NULL_RTX);
|
node->init, NULL_RTX);
|
|
|
VALUE_RECURSED_INTO (node->loc) = true;
|
VALUE_RECURSED_INTO (node->loc) = true;
|
}
|
}
|
}
|
}
|
|
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Remove redundant entries from equivalence lists in onepart
|
/* Remove redundant entries from equivalence lists in onepart
|
variables, canonicalizing equivalence sets into star shapes. */
|
variables, canonicalizing equivalence sets into star shapes. */
|
|
|
static int
|
static int
|
canonicalize_values_star (void **slot, void *data)
|
canonicalize_values_star (void **slot, void *data)
|
{
|
{
|
dataflow_set *set = (dataflow_set *)data;
|
dataflow_set *set = (dataflow_set *)data;
|
variable var = (variable) *slot;
|
variable var = (variable) *slot;
|
decl_or_value dv = var->dv;
|
decl_or_value dv = var->dv;
|
location_chain node;
|
location_chain node;
|
decl_or_value cdv;
|
decl_or_value cdv;
|
rtx val, cval;
|
rtx val, cval;
|
void **cslot;
|
void **cslot;
|
bool has_value;
|
bool has_value;
|
bool has_marks;
|
bool has_marks;
|
|
|
if (!var->onepart)
|
if (!var->onepart)
|
return 1;
|
return 1;
|
|
|
gcc_checking_assert (var->n_var_parts == 1);
|
gcc_checking_assert (var->n_var_parts == 1);
|
|
|
if (dv_is_value_p (dv))
|
if (dv_is_value_p (dv))
|
{
|
{
|
cval = dv_as_value (dv);
|
cval = dv_as_value (dv);
|
if (!VALUE_RECURSED_INTO (cval))
|
if (!VALUE_RECURSED_INTO (cval))
|
return 1;
|
return 1;
|
VALUE_RECURSED_INTO (cval) = false;
|
VALUE_RECURSED_INTO (cval) = false;
|
}
|
}
|
else
|
else
|
cval = NULL_RTX;
|
cval = NULL_RTX;
|
|
|
restart:
|
restart:
|
val = cval;
|
val = cval;
|
has_value = false;
|
has_value = false;
|
has_marks = false;
|
has_marks = false;
|
|
|
gcc_assert (var->n_var_parts == 1);
|
gcc_assert (var->n_var_parts == 1);
|
|
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
if (GET_CODE (node->loc) == VALUE)
|
if (GET_CODE (node->loc) == VALUE)
|
{
|
{
|
has_value = true;
|
has_value = true;
|
if (VALUE_RECURSED_INTO (node->loc))
|
if (VALUE_RECURSED_INTO (node->loc))
|
has_marks = true;
|
has_marks = true;
|
if (canon_value_cmp (node->loc, cval))
|
if (canon_value_cmp (node->loc, cval))
|
cval = node->loc;
|
cval = node->loc;
|
}
|
}
|
|
|
if (!has_value)
|
if (!has_value)
|
return 1;
|
return 1;
|
|
|
if (cval == val)
|
if (cval == val)
|
{
|
{
|
if (!has_marks || dv_is_decl_p (dv))
|
if (!has_marks || dv_is_decl_p (dv))
|
return 1;
|
return 1;
|
|
|
/* Keep it marked so that we revisit it, either after visiting a
|
/* Keep it marked so that we revisit it, either after visiting a
|
child node, or after visiting a new parent that might be
|
child node, or after visiting a new parent that might be
|
found out. */
|
found out. */
|
VALUE_RECURSED_INTO (val) = true;
|
VALUE_RECURSED_INTO (val) = true;
|
|
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
if (GET_CODE (node->loc) == VALUE
|
if (GET_CODE (node->loc) == VALUE
|
&& VALUE_RECURSED_INTO (node->loc))
|
&& VALUE_RECURSED_INTO (node->loc))
|
{
|
{
|
cval = node->loc;
|
cval = node->loc;
|
restart_with_cval:
|
restart_with_cval:
|
VALUE_RECURSED_INTO (cval) = false;
|
VALUE_RECURSED_INTO (cval) = false;
|
dv = dv_from_value (cval);
|
dv = dv_from_value (cval);
|
slot = shared_hash_find_slot_noinsert (set->vars, dv);
|
slot = shared_hash_find_slot_noinsert (set->vars, dv);
|
if (!slot)
|
if (!slot)
|
{
|
{
|
gcc_assert (dv_is_decl_p (var->dv));
|
gcc_assert (dv_is_decl_p (var->dv));
|
/* The canonical value was reset and dropped.
|
/* The canonical value was reset and dropped.
|
Remove it. */
|
Remove it. */
|
clobber_variable_part (set, NULL, var->dv, 0, NULL);
|
clobber_variable_part (set, NULL, var->dv, 0, NULL);
|
return 1;
|
return 1;
|
}
|
}
|
var = (variable)*slot;
|
var = (variable)*slot;
|
gcc_assert (dv_is_value_p (var->dv));
|
gcc_assert (dv_is_value_p (var->dv));
|
if (var->n_var_parts == 0)
|
if (var->n_var_parts == 0)
|
return 1;
|
return 1;
|
gcc_assert (var->n_var_parts == 1);
|
gcc_assert (var->n_var_parts == 1);
|
goto restart;
|
goto restart;
|
}
|
}
|
|
|
VALUE_RECURSED_INTO (val) = false;
|
VALUE_RECURSED_INTO (val) = false;
|
|
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Push values to the canonical one. */
|
/* Push values to the canonical one. */
|
cdv = dv_from_value (cval);
|
cdv = dv_from_value (cval);
|
cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
|
cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
|
|
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
if (node->loc != cval)
|
if (node->loc != cval)
|
{
|
{
|
cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
|
cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
|
node->init, NULL_RTX);
|
node->init, NULL_RTX);
|
if (GET_CODE (node->loc) == VALUE)
|
if (GET_CODE (node->loc) == VALUE)
|
{
|
{
|
decl_or_value ndv = dv_from_value (node->loc);
|
decl_or_value ndv = dv_from_value (node->loc);
|
|
|
set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
|
set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
|
NO_INSERT);
|
NO_INSERT);
|
|
|
if (canon_value_cmp (node->loc, val))
|
if (canon_value_cmp (node->loc, val))
|
{
|
{
|
/* If it could have been a local minimum, it's not any more,
|
/* If it could have been a local minimum, it's not any more,
|
since it's now neighbor to cval, so it may have to push
|
since it's now neighbor to cval, so it may have to push
|
to it. Conversely, if it wouldn't have prevailed over
|
to it. Conversely, if it wouldn't have prevailed over
|
val, then whatever mark it has is fine: if it was to
|
val, then whatever mark it has is fine: if it was to
|
push, it will now push to a more canonical node, but if
|
push, it will now push to a more canonical node, but if
|
it wasn't, then it has already pushed any values it might
|
it wasn't, then it has already pushed any values it might
|
have to. */
|
have to. */
|
VALUE_RECURSED_INTO (node->loc) = true;
|
VALUE_RECURSED_INTO (node->loc) = true;
|
/* Make sure we visit node->loc by ensuring we cval is
|
/* Make sure we visit node->loc by ensuring we cval is
|
visited too. */
|
visited too. */
|
VALUE_RECURSED_INTO (cval) = true;
|
VALUE_RECURSED_INTO (cval) = true;
|
}
|
}
|
else if (!VALUE_RECURSED_INTO (node->loc))
|
else if (!VALUE_RECURSED_INTO (node->loc))
|
/* If we have no need to "recurse" into this node, it's
|
/* If we have no need to "recurse" into this node, it's
|
already "canonicalized", so drop the link to the old
|
already "canonicalized", so drop the link to the old
|
parent. */
|
parent. */
|
clobber_variable_part (set, cval, ndv, 0, NULL);
|
clobber_variable_part (set, cval, ndv, 0, NULL);
|
}
|
}
|
else if (GET_CODE (node->loc) == REG)
|
else if (GET_CODE (node->loc) == REG)
|
{
|
{
|
attrs list = set->regs[REGNO (node->loc)], *listp;
|
attrs list = set->regs[REGNO (node->loc)], *listp;
|
|
|
/* Change an existing attribute referring to dv so that it
|
/* Change an existing attribute referring to dv so that it
|
refers to cdv, removing any duplicate this might
|
refers to cdv, removing any duplicate this might
|
introduce, and checking that no previous duplicates
|
introduce, and checking that no previous duplicates
|
existed, all in a single pass. */
|
existed, all in a single pass. */
|
|
|
while (list)
|
while (list)
|
{
|
{
|
if (list->offset == 0
|
if (list->offset == 0
|
&& (dv_as_opaque (list->dv) == dv_as_opaque (dv)
|
&& (dv_as_opaque (list->dv) == dv_as_opaque (dv)
|
|| dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
|
|| dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
|
break;
|
break;
|
|
|
list = list->next;
|
list = list->next;
|
}
|
}
|
|
|
gcc_assert (list);
|
gcc_assert (list);
|
if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
|
if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
|
{
|
{
|
list->dv = cdv;
|
list->dv = cdv;
|
for (listp = &list->next; (list = *listp); listp = &list->next)
|
for (listp = &list->next; (list = *listp); listp = &list->next)
|
{
|
{
|
if (list->offset)
|
if (list->offset)
|
continue;
|
continue;
|
|
|
if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
|
if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
|
{
|
{
|
*listp = list->next;
|
*listp = list->next;
|
pool_free (attrs_pool, list);
|
pool_free (attrs_pool, list);
|
list = *listp;
|
list = *listp;
|
break;
|
break;
|
}
|
}
|
|
|
gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
|
gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
|
}
|
}
|
}
|
}
|
else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
|
else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
|
{
|
{
|
for (listp = &list->next; (list = *listp); listp = &list->next)
|
for (listp = &list->next; (list = *listp); listp = &list->next)
|
{
|
{
|
if (list->offset)
|
if (list->offset)
|
continue;
|
continue;
|
|
|
if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
|
if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
|
{
|
{
|
*listp = list->next;
|
*listp = list->next;
|
pool_free (attrs_pool, list);
|
pool_free (attrs_pool, list);
|
list = *listp;
|
list = *listp;
|
break;
|
break;
|
}
|
}
|
|
|
gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
|
gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
|
}
|
}
|
}
|
}
|
else
|
else
|
gcc_unreachable ();
|
gcc_unreachable ();
|
|
|
#if ENABLE_CHECKING
|
#if ENABLE_CHECKING
|
while (list)
|
while (list)
|
{
|
{
|
if (list->offset == 0
|
if (list->offset == 0
|
&& (dv_as_opaque (list->dv) == dv_as_opaque (dv)
|
&& (dv_as_opaque (list->dv) == dv_as_opaque (dv)
|
|| dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
|
|| dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
|
gcc_unreachable ();
|
gcc_unreachable ();
|
|
|
list = list->next;
|
list = list->next;
|
}
|
}
|
#endif
|
#endif
|
}
|
}
|
}
|
}
|
|
|
if (val)
|
if (val)
|
set_slot_part (set, val, cslot, cdv, 0,
|
set_slot_part (set, val, cslot, cdv, 0,
|
VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
|
VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
|
|
|
slot = clobber_slot_part (set, cval, slot, 0, NULL);
|
slot = clobber_slot_part (set, cval, slot, 0, NULL);
|
|
|
/* Variable may have been unshared. */
|
/* Variable may have been unshared. */
|
var = (variable)*slot;
|
var = (variable)*slot;
|
gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
|
gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
|
&& var->var_part[0].loc_chain->next == NULL);
|
&& var->var_part[0].loc_chain->next == NULL);
|
|
|
if (VALUE_RECURSED_INTO (cval))
|
if (VALUE_RECURSED_INTO (cval))
|
goto restart_with_cval;
|
goto restart_with_cval;
|
|
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Bind one-part variables to the canonical value in an equivalence
|
/* Bind one-part variables to the canonical value in an equivalence
|
set. Not doing this causes dataflow convergence failure in rare
|
set. Not doing this causes dataflow convergence failure in rare
|
circumstances, see PR42873. Unfortunately we can't do this
|
circumstances, see PR42873. Unfortunately we can't do this
|
efficiently as part of canonicalize_values_star, since we may not
|
efficiently as part of canonicalize_values_star, since we may not
|
have determined or even seen the canonical value of a set when we
|
have determined or even seen the canonical value of a set when we
|
get to a variable that references another member of the set. */
|
get to a variable that references another member of the set. */
|
|
|
static int
|
static int
|
canonicalize_vars_star (void **slot, void *data)
|
canonicalize_vars_star (void **slot, void *data)
|
{
|
{
|
dataflow_set *set = (dataflow_set *)data;
|
dataflow_set *set = (dataflow_set *)data;
|
variable var = (variable) *slot;
|
variable var = (variable) *slot;
|
decl_or_value dv = var->dv;
|
decl_or_value dv = var->dv;
|
location_chain node;
|
location_chain node;
|
rtx cval;
|
rtx cval;
|
decl_or_value cdv;
|
decl_or_value cdv;
|
void **cslot;
|
void **cslot;
|
variable cvar;
|
variable cvar;
|
location_chain cnode;
|
location_chain cnode;
|
|
|
if (!var->onepart || var->onepart == ONEPART_VALUE)
|
if (!var->onepart || var->onepart == ONEPART_VALUE)
|
return 1;
|
return 1;
|
|
|
gcc_assert (var->n_var_parts == 1);
|
gcc_assert (var->n_var_parts == 1);
|
|
|
node = var->var_part[0].loc_chain;
|
node = var->var_part[0].loc_chain;
|
|
|
if (GET_CODE (node->loc) != VALUE)
|
if (GET_CODE (node->loc) != VALUE)
|
return 1;
|
return 1;
|
|
|
gcc_assert (!node->next);
|
gcc_assert (!node->next);
|
cval = node->loc;
|
cval = node->loc;
|
|
|
/* Push values to the canonical one. */
|
/* Push values to the canonical one. */
|
cdv = dv_from_value (cval);
|
cdv = dv_from_value (cval);
|
cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
|
cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
|
if (!cslot)
|
if (!cslot)
|
return 1;
|
return 1;
|
cvar = (variable)*cslot;
|
cvar = (variable)*cslot;
|
gcc_assert (cvar->n_var_parts == 1);
|
gcc_assert (cvar->n_var_parts == 1);
|
|
|
cnode = cvar->var_part[0].loc_chain;
|
cnode = cvar->var_part[0].loc_chain;
|
|
|
/* CVAL is canonical if its value list contains non-VALUEs or VALUEs
|
/* CVAL is canonical if its value list contains non-VALUEs or VALUEs
|
that are not “more canonical” than it. */
|
that are not “more canonical” than it. */
|
if (GET_CODE (cnode->loc) != VALUE
|
if (GET_CODE (cnode->loc) != VALUE
|
|| !canon_value_cmp (cnode->loc, cval))
|
|| !canon_value_cmp (cnode->loc, cval))
|
return 1;
|
return 1;
|
|
|
/* CVAL was found to be non-canonical. Change the variable to point
|
/* CVAL was found to be non-canonical. Change the variable to point
|
to the canonical VALUE. */
|
to the canonical VALUE. */
|
gcc_assert (!cnode->next);
|
gcc_assert (!cnode->next);
|
cval = cnode->loc;
|
cval = cnode->loc;
|
|
|
slot = set_slot_part (set, cval, slot, dv, 0,
|
slot = set_slot_part (set, cval, slot, dv, 0,
|
node->init, node->set_src);
|
node->init, node->set_src);
|
clobber_slot_part (set, cval, slot, 0, node->set_src);
|
clobber_slot_part (set, cval, slot, 0, node->set_src);
|
|
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Combine variable or value in *S1SLOT (in DSM->cur) with the
|
/* Combine variable or value in *S1SLOT (in DSM->cur) with the
|
corresponding entry in DSM->src. Multi-part variables are combined
|
corresponding entry in DSM->src. Multi-part variables are combined
|
with variable_union, whereas onepart dvs are combined with
|
with variable_union, whereas onepart dvs are combined with
|
intersection. */
|
intersection. */
|
|
|
static int
|
static int
|
variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
|
variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
|
{
|
{
|
dataflow_set *dst = dsm->dst;
|
dataflow_set *dst = dsm->dst;
|
void **dstslot;
|
void **dstslot;
|
variable s2var, dvar = NULL;
|
variable s2var, dvar = NULL;
|
decl_or_value dv = s1var->dv;
|
decl_or_value dv = s1var->dv;
|
onepart_enum_t onepart = s1var->onepart;
|
onepart_enum_t onepart = s1var->onepart;
|
rtx val;
|
rtx val;
|
hashval_t dvhash;
|
hashval_t dvhash;
|
location_chain node, *nodep;
|
location_chain node, *nodep;
|
|
|
/* If the incoming onepart variable has an empty location list, then
|
/* If the incoming onepart variable has an empty location list, then
|
the intersection will be just as empty. For other variables,
|
the intersection will be just as empty. For other variables,
|
it's always union. */
|
it's always union. */
|
gcc_checking_assert (s1var->n_var_parts
|
gcc_checking_assert (s1var->n_var_parts
|
&& s1var->var_part[0].loc_chain);
|
&& s1var->var_part[0].loc_chain);
|
|
|
if (!onepart)
|
if (!onepart)
|
return variable_union (s1var, dst);
|
return variable_union (s1var, dst);
|
|
|
gcc_checking_assert (s1var->n_var_parts == 1);
|
gcc_checking_assert (s1var->n_var_parts == 1);
|
|
|
dvhash = dv_htab_hash (dv);
|
dvhash = dv_htab_hash (dv);
|
if (dv_is_value_p (dv))
|
if (dv_is_value_p (dv))
|
val = dv_as_value (dv);
|
val = dv_as_value (dv);
|
else
|
else
|
val = NULL;
|
val = NULL;
|
|
|
s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
|
s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
|
if (!s2var)
|
if (!s2var)
|
{
|
{
|
dst_can_be_shared = false;
|
dst_can_be_shared = false;
|
return 1;
|
return 1;
|
}
|
}
|
|
|
dsm->src_onepart_cnt--;
|
dsm->src_onepart_cnt--;
|
gcc_assert (s2var->var_part[0].loc_chain
|
gcc_assert (s2var->var_part[0].loc_chain
|
&& s2var->onepart == onepart
|
&& s2var->onepart == onepart
|
&& s2var->n_var_parts == 1);
|
&& s2var->n_var_parts == 1);
|
|
|
dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
|
dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
|
if (dstslot)
|
if (dstslot)
|
{
|
{
|
dvar = (variable)*dstslot;
|
dvar = (variable)*dstslot;
|
gcc_assert (dvar->refcount == 1
|
gcc_assert (dvar->refcount == 1
|
&& dvar->onepart == onepart
|
&& dvar->onepart == onepart
|
&& dvar->n_var_parts == 1);
|
&& dvar->n_var_parts == 1);
|
nodep = &dvar->var_part[0].loc_chain;
|
nodep = &dvar->var_part[0].loc_chain;
|
}
|
}
|
else
|
else
|
{
|
{
|
nodep = &node;
|
nodep = &node;
|
node = NULL;
|
node = NULL;
|
}
|
}
|
|
|
if (!dstslot && !onepart_variable_different_p (s1var, s2var))
|
if (!dstslot && !onepart_variable_different_p (s1var, s2var))
|
{
|
{
|
dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
|
dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
|
dvhash, INSERT);
|
dvhash, INSERT);
|
*dstslot = dvar = s2var;
|
*dstslot = dvar = s2var;
|
dvar->refcount++;
|
dvar->refcount++;
|
}
|
}
|
else
|
else
|
{
|
{
|
dst_can_be_shared = false;
|
dst_can_be_shared = false;
|
|
|
intersect_loc_chains (val, nodep, dsm,
|
intersect_loc_chains (val, nodep, dsm,
|
s1var->var_part[0].loc_chain, s2var);
|
s1var->var_part[0].loc_chain, s2var);
|
|
|
if (!dstslot)
|
if (!dstslot)
|
{
|
{
|
if (node)
|
if (node)
|
{
|
{
|
dvar = (variable) pool_alloc (onepart_pool (onepart));
|
dvar = (variable) pool_alloc (onepart_pool (onepart));
|
dvar->dv = dv;
|
dvar->dv = dv;
|
dvar->refcount = 1;
|
dvar->refcount = 1;
|
dvar->n_var_parts = 1;
|
dvar->n_var_parts = 1;
|
dvar->onepart = onepart;
|
dvar->onepart = onepart;
|
dvar->in_changed_variables = false;
|
dvar->in_changed_variables = false;
|
dvar->var_part[0].loc_chain = node;
|
dvar->var_part[0].loc_chain = node;
|
dvar->var_part[0].cur_loc = NULL;
|
dvar->var_part[0].cur_loc = NULL;
|
if (onepart)
|
if (onepart)
|
VAR_LOC_1PAUX (dvar) = NULL;
|
VAR_LOC_1PAUX (dvar) = NULL;
|
else
|
else
|
VAR_PART_OFFSET (dvar, 0) = 0;
|
VAR_PART_OFFSET (dvar, 0) = 0;
|
|
|
dstslot
|
dstslot
|
= shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
|
= shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
|
INSERT);
|
INSERT);
|
gcc_assert (!*dstslot);
|
gcc_assert (!*dstslot);
|
*dstslot = dvar;
|
*dstslot = dvar;
|
}
|
}
|
else
|
else
|
return 1;
|
return 1;
|
}
|
}
|
}
|
}
|
|
|
nodep = &dvar->var_part[0].loc_chain;
|
nodep = &dvar->var_part[0].loc_chain;
|
while ((node = *nodep))
|
while ((node = *nodep))
|
{
|
{
|
location_chain *nextp = &node->next;
|
location_chain *nextp = &node->next;
|
|
|
if (GET_CODE (node->loc) == REG)
|
if (GET_CODE (node->loc) == REG)
|
{
|
{
|
attrs list;
|
attrs list;
|
|
|
for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
|
for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
|
if (GET_MODE (node->loc) == GET_MODE (list->loc)
|
if (GET_MODE (node->loc) == GET_MODE (list->loc)
|
&& dv_is_value_p (list->dv))
|
&& dv_is_value_p (list->dv))
|
break;
|
break;
|
|
|
if (!list)
|
if (!list)
|
attrs_list_insert (&dst->regs[REGNO (node->loc)],
|
attrs_list_insert (&dst->regs[REGNO (node->loc)],
|
dv, 0, node->loc);
|
dv, 0, node->loc);
|
/* If this value became canonical for another value that had
|
/* If this value became canonical for another value that had
|
this register, we want to leave it alone. */
|
this register, we want to leave it alone. */
|
else if (dv_as_value (list->dv) != val)
|
else if (dv_as_value (list->dv) != val)
|
{
|
{
|
dstslot = set_slot_part (dst, dv_as_value (list->dv),
|
dstslot = set_slot_part (dst, dv_as_value (list->dv),
|
dstslot, dv, 0,
|
dstslot, dv, 0,
|
node->init, NULL_RTX);
|
node->init, NULL_RTX);
|
dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
|
dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
|
|
|
/* Since nextp points into the removed node, we can't
|
/* Since nextp points into the removed node, we can't
|
use it. The pointer to the next node moved to nodep.
|
use it. The pointer to the next node moved to nodep.
|
However, if the variable we're walking is unshared
|
However, if the variable we're walking is unshared
|
during our walk, we'll keep walking the location list
|
during our walk, we'll keep walking the location list
|
of the previously-shared variable, in which case the
|
of the previously-shared variable, in which case the
|
node won't have been removed, and we'll want to skip
|
node won't have been removed, and we'll want to skip
|
it. That's why we test *nodep here. */
|
it. That's why we test *nodep here. */
|
if (*nodep != node)
|
if (*nodep != node)
|
nextp = nodep;
|
nextp = nodep;
|
}
|
}
|
}
|
}
|
else
|
else
|
/* Canonicalization puts registers first, so we don't have to
|
/* Canonicalization puts registers first, so we don't have to
|
walk it all. */
|
walk it all. */
|
break;
|
break;
|
nodep = nextp;
|
nodep = nextp;
|
}
|
}
|
|
|
if (dvar != (variable)*dstslot)
|
if (dvar != (variable)*dstslot)
|
dvar = (variable)*dstslot;
|
dvar = (variable)*dstslot;
|
nodep = &dvar->var_part[0].loc_chain;
|
nodep = &dvar->var_part[0].loc_chain;
|
|
|
if (val)
|
if (val)
|
{
|
{
|
/* Mark all referenced nodes for canonicalization, and make sure
|
/* Mark all referenced nodes for canonicalization, and make sure
|
we have mutual equivalence links. */
|
we have mutual equivalence links. */
|
VALUE_RECURSED_INTO (val) = true;
|
VALUE_RECURSED_INTO (val) = true;
|
for (node = *nodep; node; node = node->next)
|
for (node = *nodep; node; node = node->next)
|
if (GET_CODE (node->loc) == VALUE)
|
if (GET_CODE (node->loc) == VALUE)
|
{
|
{
|
VALUE_RECURSED_INTO (node->loc) = true;
|
VALUE_RECURSED_INTO (node->loc) = true;
|
set_variable_part (dst, val, dv_from_value (node->loc), 0,
|
set_variable_part (dst, val, dv_from_value (node->loc), 0,
|
node->init, NULL, INSERT);
|
node->init, NULL, INSERT);
|
}
|
}
|
|
|
dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
|
dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
|
gcc_assert (*dstslot == dvar);
|
gcc_assert (*dstslot == dvar);
|
canonicalize_values_star (dstslot, dst);
|
canonicalize_values_star (dstslot, dst);
|
gcc_checking_assert (dstslot
|
gcc_checking_assert (dstslot
|
== shared_hash_find_slot_noinsert_1 (dst->vars,
|
== shared_hash_find_slot_noinsert_1 (dst->vars,
|
dv, dvhash));
|
dv, dvhash));
|
dvar = (variable)*dstslot;
|
dvar = (variable)*dstslot;
|
}
|
}
|
else
|
else
|
{
|
{
|
bool has_value = false, has_other = false;
|
bool has_value = false, has_other = false;
|
|
|
/* If we have one value and anything else, we're going to
|
/* If we have one value and anything else, we're going to
|
canonicalize this, so make sure all values have an entry in
|
canonicalize this, so make sure all values have an entry in
|
the table and are marked for canonicalization. */
|
the table and are marked for canonicalization. */
|
for (node = *nodep; node; node = node->next)
|
for (node = *nodep; node; node = node->next)
|
{
|
{
|
if (GET_CODE (node->loc) == VALUE)
|
if (GET_CODE (node->loc) == VALUE)
|
{
|
{
|
/* If this was marked during register canonicalization,
|
/* If this was marked during register canonicalization,
|
we know we have to canonicalize values. */
|
we know we have to canonicalize values. */
|
if (has_value)
|
if (has_value)
|
has_other = true;
|
has_other = true;
|
has_value = true;
|
has_value = true;
|
if (has_other)
|
if (has_other)
|
break;
|
break;
|
}
|
}
|
else
|
else
|
{
|
{
|
has_other = true;
|
has_other = true;
|
if (has_value)
|
if (has_value)
|
break;
|
break;
|
}
|
}
|
}
|
}
|
|
|
if (has_value && has_other)
|
if (has_value && has_other)
|
{
|
{
|
for (node = *nodep; node; node = node->next)
|
for (node = *nodep; node; node = node->next)
|
{
|
{
|
if (GET_CODE (node->loc) == VALUE)
|
if (GET_CODE (node->loc) == VALUE)
|
{
|
{
|
decl_or_value dv = dv_from_value (node->loc);
|
decl_or_value dv = dv_from_value (node->loc);
|
void **slot = NULL;
|
void **slot = NULL;
|
|
|
if (shared_hash_shared (dst->vars))
|
if (shared_hash_shared (dst->vars))
|
slot = shared_hash_find_slot_noinsert (dst->vars, dv);
|
slot = shared_hash_find_slot_noinsert (dst->vars, dv);
|
if (!slot)
|
if (!slot)
|
slot = shared_hash_find_slot_unshare (&dst->vars, dv,
|
slot = shared_hash_find_slot_unshare (&dst->vars, dv,
|
INSERT);
|
INSERT);
|
if (!*slot)
|
if (!*slot)
|
{
|
{
|
variable var = (variable) pool_alloc (onepart_pool
|
variable var = (variable) pool_alloc (onepart_pool
|
(ONEPART_VALUE));
|
(ONEPART_VALUE));
|
var->dv = dv;
|
var->dv = dv;
|
var->refcount = 1;
|
var->refcount = 1;
|
var->n_var_parts = 1;
|
var->n_var_parts = 1;
|
var->onepart = ONEPART_VALUE;
|
var->onepart = ONEPART_VALUE;
|
var->in_changed_variables = false;
|
var->in_changed_variables = false;
|
var->var_part[0].loc_chain = NULL;
|
var->var_part[0].loc_chain = NULL;
|
var->var_part[0].cur_loc = NULL;
|
var->var_part[0].cur_loc = NULL;
|
VAR_LOC_1PAUX (var) = NULL;
|
VAR_LOC_1PAUX (var) = NULL;
|
*slot = var;
|
*slot = var;
|
}
|
}
|
|
|
VALUE_RECURSED_INTO (node->loc) = true;
|
VALUE_RECURSED_INTO (node->loc) = true;
|
}
|
}
|
}
|
}
|
|
|
dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
|
dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
|
gcc_assert (*dstslot == dvar);
|
gcc_assert (*dstslot == dvar);
|
canonicalize_values_star (dstslot, dst);
|
canonicalize_values_star (dstslot, dst);
|
gcc_checking_assert (dstslot
|
gcc_checking_assert (dstslot
|
== shared_hash_find_slot_noinsert_1 (dst->vars,
|
== shared_hash_find_slot_noinsert_1 (dst->vars,
|
dv, dvhash));
|
dv, dvhash));
|
dvar = (variable)*dstslot;
|
dvar = (variable)*dstslot;
|
}
|
}
|
}
|
}
|
|
|
if (!onepart_variable_different_p (dvar, s2var))
|
if (!onepart_variable_different_p (dvar, s2var))
|
{
|
{
|
variable_htab_free (dvar);
|
variable_htab_free (dvar);
|
*dstslot = dvar = s2var;
|
*dstslot = dvar = s2var;
|
dvar->refcount++;
|
dvar->refcount++;
|
}
|
}
|
else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
|
else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
|
{
|
{
|
variable_htab_free (dvar);
|
variable_htab_free (dvar);
|
*dstslot = dvar = s1var;
|
*dstslot = dvar = s1var;
|
dvar->refcount++;
|
dvar->refcount++;
|
dst_can_be_shared = false;
|
dst_can_be_shared = false;
|
}
|
}
|
else
|
else
|
dst_can_be_shared = false;
|
dst_can_be_shared = false;
|
|
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
|
/* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
|
multi-part variable. Unions of multi-part variables and
|
multi-part variable. Unions of multi-part variables and
|
intersections of one-part ones will be handled in
|
intersections of one-part ones will be handled in
|
variable_merge_over_cur(). */
|
variable_merge_over_cur(). */
|
|
|
static int
|
static int
|
variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
|
variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
|
{
|
{
|
dataflow_set *dst = dsm->dst;
|
dataflow_set *dst = dsm->dst;
|
decl_or_value dv = s2var->dv;
|
decl_or_value dv = s2var->dv;
|
|
|
if (!s2var->onepart)
|
if (!s2var->onepart)
|
{
|
{
|
void **dstp = shared_hash_find_slot (dst->vars, dv);
|
void **dstp = shared_hash_find_slot (dst->vars, dv);
|
*dstp = s2var;
|
*dstp = s2var;
|
s2var->refcount++;
|
s2var->refcount++;
|
return 1;
|
return 1;
|
}
|
}
|
|
|
dsm->src_onepart_cnt++;
|
dsm->src_onepart_cnt++;
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Combine dataflow set information from SRC2 into DST, using PDST
|
/* Combine dataflow set information from SRC2 into DST, using PDST
|
to carry over information across passes. */
|
to carry over information across passes. */
|
|
|
static void
|
static void
|
dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
|
dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
|
{
|
{
|
dataflow_set cur = *dst;
|
dataflow_set cur = *dst;
|
dataflow_set *src1 = &cur;
|
dataflow_set *src1 = &cur;
|
struct dfset_merge dsm;
|
struct dfset_merge dsm;
|
int i;
|
int i;
|
size_t src1_elems, src2_elems;
|
size_t src1_elems, src2_elems;
|
htab_iterator hi;
|
htab_iterator hi;
|
variable var;
|
variable var;
|
|
|
src1_elems = htab_elements (shared_hash_htab (src1->vars));
|
src1_elems = htab_elements (shared_hash_htab (src1->vars));
|
src2_elems = htab_elements (shared_hash_htab (src2->vars));
|
src2_elems = htab_elements (shared_hash_htab (src2->vars));
|
dataflow_set_init (dst);
|
dataflow_set_init (dst);
|
dst->stack_adjust = cur.stack_adjust;
|
dst->stack_adjust = cur.stack_adjust;
|
shared_hash_destroy (dst->vars);
|
shared_hash_destroy (dst->vars);
|
dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
|
dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
|
dst->vars->refcount = 1;
|
dst->vars->refcount = 1;
|
dst->vars->htab
|
dst->vars->htab
|
= htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
|
= htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
|
variable_htab_eq, variable_htab_free);
|
variable_htab_eq, variable_htab_free);
|
|
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
|
attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
|
|
|
dsm.dst = dst;
|
dsm.dst = dst;
|
dsm.src = src2;
|
dsm.src = src2;
|
dsm.cur = src1;
|
dsm.cur = src1;
|
dsm.src_onepart_cnt = 0;
|
dsm.src_onepart_cnt = 0;
|
|
|
FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.src->vars), var, variable, hi)
|
FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.src->vars), var, variable, hi)
|
variable_merge_over_src (var, &dsm);
|
variable_merge_over_src (var, &dsm);
|
FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.cur->vars), var, variable, hi)
|
FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.cur->vars), var, variable, hi)
|
variable_merge_over_cur (var, &dsm);
|
variable_merge_over_cur (var, &dsm);
|
|
|
if (dsm.src_onepart_cnt)
|
if (dsm.src_onepart_cnt)
|
dst_can_be_shared = false;
|
dst_can_be_shared = false;
|
|
|
dataflow_set_destroy (src1);
|
dataflow_set_destroy (src1);
|
}
|
}
|
|
|
/* Mark register equivalences. */
|
/* Mark register equivalences. */
|
|
|
static void
|
static void
|
dataflow_set_equiv_regs (dataflow_set *set)
|
dataflow_set_equiv_regs (dataflow_set *set)
|
{
|
{
|
int i;
|
int i;
|
attrs list, *listp;
|
attrs list, *listp;
|
|
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
{
|
{
|
rtx canon[NUM_MACHINE_MODES];
|
rtx canon[NUM_MACHINE_MODES];
|
|
|
/* If the list is empty or one entry, no need to canonicalize
|
/* If the list is empty or one entry, no need to canonicalize
|
anything. */
|
anything. */
|
if (set->regs[i] == NULL || set->regs[i]->next == NULL)
|
if (set->regs[i] == NULL || set->regs[i]->next == NULL)
|
continue;
|
continue;
|
|
|
memset (canon, 0, sizeof (canon));
|
memset (canon, 0, sizeof (canon));
|
|
|
for (list = set->regs[i]; list; list = list->next)
|
for (list = set->regs[i]; list; list = list->next)
|
if (list->offset == 0 && dv_is_value_p (list->dv))
|
if (list->offset == 0 && dv_is_value_p (list->dv))
|
{
|
{
|
rtx val = dv_as_value (list->dv);
|
rtx val = dv_as_value (list->dv);
|
rtx *cvalp = &canon[(int)GET_MODE (val)];
|
rtx *cvalp = &canon[(int)GET_MODE (val)];
|
rtx cval = *cvalp;
|
rtx cval = *cvalp;
|
|
|
if (canon_value_cmp (val, cval))
|
if (canon_value_cmp (val, cval))
|
*cvalp = val;
|
*cvalp = val;
|
}
|
}
|
|
|
for (list = set->regs[i]; list; list = list->next)
|
for (list = set->regs[i]; list; list = list->next)
|
if (list->offset == 0 && dv_onepart_p (list->dv))
|
if (list->offset == 0 && dv_onepart_p (list->dv))
|
{
|
{
|
rtx cval = canon[(int)GET_MODE (list->loc)];
|
rtx cval = canon[(int)GET_MODE (list->loc)];
|
|
|
if (!cval)
|
if (!cval)
|
continue;
|
continue;
|
|
|
if (dv_is_value_p (list->dv))
|
if (dv_is_value_p (list->dv))
|
{
|
{
|
rtx val = dv_as_value (list->dv);
|
rtx val = dv_as_value (list->dv);
|
|
|
if (val == cval)
|
if (val == cval)
|
continue;
|
continue;
|
|
|
VALUE_RECURSED_INTO (val) = true;
|
VALUE_RECURSED_INTO (val) = true;
|
set_variable_part (set, val, dv_from_value (cval), 0,
|
set_variable_part (set, val, dv_from_value (cval), 0,
|
VAR_INIT_STATUS_INITIALIZED,
|
VAR_INIT_STATUS_INITIALIZED,
|
NULL, NO_INSERT);
|
NULL, NO_INSERT);
|
}
|
}
|
|
|
VALUE_RECURSED_INTO (cval) = true;
|
VALUE_RECURSED_INTO (cval) = true;
|
set_variable_part (set, cval, list->dv, 0,
|
set_variable_part (set, cval, list->dv, 0,
|
VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
|
VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
|
}
|
}
|
|
|
for (listp = &set->regs[i]; (list = *listp);
|
for (listp = &set->regs[i]; (list = *listp);
|
listp = list ? &list->next : listp)
|
listp = list ? &list->next : listp)
|
if (list->offset == 0 && dv_onepart_p (list->dv))
|
if (list->offset == 0 && dv_onepart_p (list->dv))
|
{
|
{
|
rtx cval = canon[(int)GET_MODE (list->loc)];
|
rtx cval = canon[(int)GET_MODE (list->loc)];
|
void **slot;
|
void **slot;
|
|
|
if (!cval)
|
if (!cval)
|
continue;
|
continue;
|
|
|
if (dv_is_value_p (list->dv))
|
if (dv_is_value_p (list->dv))
|
{
|
{
|
rtx val = dv_as_value (list->dv);
|
rtx val = dv_as_value (list->dv);
|
if (!VALUE_RECURSED_INTO (val))
|
if (!VALUE_RECURSED_INTO (val))
|
continue;
|
continue;
|
}
|
}
|
|
|
slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
|
slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
|
canonicalize_values_star (slot, set);
|
canonicalize_values_star (slot, set);
|
if (*listp != list)
|
if (*listp != list)
|
list = NULL;
|
list = NULL;
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
/* Remove any redundant values in the location list of VAR, which must
|
/* Remove any redundant values in the location list of VAR, which must
|
be unshared and 1-part. */
|
be unshared and 1-part. */
|
|
|
static void
|
static void
|
remove_duplicate_values (variable var)
|
remove_duplicate_values (variable var)
|
{
|
{
|
location_chain node, *nodep;
|
location_chain node, *nodep;
|
|
|
gcc_assert (var->onepart);
|
gcc_assert (var->onepart);
|
gcc_assert (var->n_var_parts == 1);
|
gcc_assert (var->n_var_parts == 1);
|
gcc_assert (var->refcount == 1);
|
gcc_assert (var->refcount == 1);
|
|
|
for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
|
for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
|
{
|
{
|
if (GET_CODE (node->loc) == VALUE)
|
if (GET_CODE (node->loc) == VALUE)
|
{
|
{
|
if (VALUE_RECURSED_INTO (node->loc))
|
if (VALUE_RECURSED_INTO (node->loc))
|
{
|
{
|
/* Remove duplicate value node. */
|
/* Remove duplicate value node. */
|
*nodep = node->next;
|
*nodep = node->next;
|
pool_free (loc_chain_pool, node);
|
pool_free (loc_chain_pool, node);
|
continue;
|
continue;
|
}
|
}
|
else
|
else
|
VALUE_RECURSED_INTO (node->loc) = true;
|
VALUE_RECURSED_INTO (node->loc) = true;
|
}
|
}
|
nodep = &node->next;
|
nodep = &node->next;
|
}
|
}
|
|
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
if (GET_CODE (node->loc) == VALUE)
|
if (GET_CODE (node->loc) == VALUE)
|
{
|
{
|
gcc_assert (VALUE_RECURSED_INTO (node->loc));
|
gcc_assert (VALUE_RECURSED_INTO (node->loc));
|
VALUE_RECURSED_INTO (node->loc) = false;
|
VALUE_RECURSED_INTO (node->loc) = false;
|
}
|
}
|
}
|
}
|
|
|
|
|
/* Hash table iteration argument passed to variable_post_merge. */
|
/* Hash table iteration argument passed to variable_post_merge. */
|
struct dfset_post_merge
|
struct dfset_post_merge
|
{
|
{
|
/* The new input set for the current block. */
|
/* The new input set for the current block. */
|
dataflow_set *set;
|
dataflow_set *set;
|
/* Pointer to the permanent input set for the current block, or
|
/* Pointer to the permanent input set for the current block, or
|
NULL. */
|
NULL. */
|
dataflow_set **permp;
|
dataflow_set **permp;
|
};
|
};
|
|
|
/* Create values for incoming expressions associated with one-part
|
/* Create values for incoming expressions associated with one-part
|
variables that don't have value numbers for them. */
|
variables that don't have value numbers for them. */
|
|
|
static int
|
static int
|
variable_post_merge_new_vals (void **slot, void *info)
|
variable_post_merge_new_vals (void **slot, void *info)
|
{
|
{
|
struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
|
struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
|
dataflow_set *set = dfpm->set;
|
dataflow_set *set = dfpm->set;
|
variable var = (variable)*slot;
|
variable var = (variable)*slot;
|
location_chain node;
|
location_chain node;
|
|
|
if (!var->onepart || !var->n_var_parts)
|
if (!var->onepart || !var->n_var_parts)
|
return 1;
|
return 1;
|
|
|
gcc_assert (var->n_var_parts == 1);
|
gcc_assert (var->n_var_parts == 1);
|
|
|
if (dv_is_decl_p (var->dv))
|
if (dv_is_decl_p (var->dv))
|
{
|
{
|
bool check_dupes = false;
|
bool check_dupes = false;
|
|
|
restart:
|
restart:
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
{
|
{
|
if (GET_CODE (node->loc) == VALUE)
|
if (GET_CODE (node->loc) == VALUE)
|
gcc_assert (!VALUE_RECURSED_INTO (node->loc));
|
gcc_assert (!VALUE_RECURSED_INTO (node->loc));
|
else if (GET_CODE (node->loc) == REG)
|
else if (GET_CODE (node->loc) == REG)
|
{
|
{
|
attrs att, *attp, *curp = NULL;
|
attrs att, *attp, *curp = NULL;
|
|
|
if (var->refcount != 1)
|
if (var->refcount != 1)
|
{
|
{
|
slot = unshare_variable (set, slot, var,
|
slot = unshare_variable (set, slot, var,
|
VAR_INIT_STATUS_INITIALIZED);
|
VAR_INIT_STATUS_INITIALIZED);
|
var = (variable)*slot;
|
var = (variable)*slot;
|
goto restart;
|
goto restart;
|
}
|
}
|
|
|
for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
|
for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
|
attp = &att->next)
|
attp = &att->next)
|
if (att->offset == 0
|
if (att->offset == 0
|
&& GET_MODE (att->loc) == GET_MODE (node->loc))
|
&& GET_MODE (att->loc) == GET_MODE (node->loc))
|
{
|
{
|
if (dv_is_value_p (att->dv))
|
if (dv_is_value_p (att->dv))
|
{
|
{
|
rtx cval = dv_as_value (att->dv);
|
rtx cval = dv_as_value (att->dv);
|
node->loc = cval;
|
node->loc = cval;
|
check_dupes = true;
|
check_dupes = true;
|
break;
|
break;
|
}
|
}
|
else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
|
else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
|
curp = attp;
|
curp = attp;
|
}
|
}
|
|
|
if (!curp)
|
if (!curp)
|
{
|
{
|
curp = attp;
|
curp = attp;
|
while (*curp)
|
while (*curp)
|
if ((*curp)->offset == 0
|
if ((*curp)->offset == 0
|
&& GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
|
&& GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
|
&& dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
|
&& dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
|
break;
|
break;
|
else
|
else
|
curp = &(*curp)->next;
|
curp = &(*curp)->next;
|
gcc_assert (*curp);
|
gcc_assert (*curp);
|
}
|
}
|
|
|
if (!att)
|
if (!att)
|
{
|
{
|
decl_or_value cdv;
|
decl_or_value cdv;
|
rtx cval;
|
rtx cval;
|
|
|
if (!*dfpm->permp)
|
if (!*dfpm->permp)
|
{
|
{
|
*dfpm->permp = XNEW (dataflow_set);
|
*dfpm->permp = XNEW (dataflow_set);
|
dataflow_set_init (*dfpm->permp);
|
dataflow_set_init (*dfpm->permp);
|
}
|
}
|
|
|
for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
|
for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
|
att; att = att->next)
|
att; att = att->next)
|
if (GET_MODE (att->loc) == GET_MODE (node->loc))
|
if (GET_MODE (att->loc) == GET_MODE (node->loc))
|
{
|
{
|
gcc_assert (att->offset == 0
|
gcc_assert (att->offset == 0
|
&& dv_is_value_p (att->dv));
|
&& dv_is_value_p (att->dv));
|
val_reset (set, att->dv);
|
val_reset (set, att->dv);
|
break;
|
break;
|
}
|
}
|
|
|
if (att)
|
if (att)
|
{
|
{
|
cdv = att->dv;
|
cdv = att->dv;
|
cval = dv_as_value (cdv);
|
cval = dv_as_value (cdv);
|
}
|
}
|
else
|
else
|
{
|
{
|
/* Create a unique value to hold this register,
|
/* Create a unique value to hold this register,
|
that ought to be found and reused in
|
that ought to be found and reused in
|
subsequent rounds. */
|
subsequent rounds. */
|
cselib_val *v;
|
cselib_val *v;
|
gcc_assert (!cselib_lookup (node->loc,
|
gcc_assert (!cselib_lookup (node->loc,
|
GET_MODE (node->loc), 0,
|
GET_MODE (node->loc), 0,
|
VOIDmode));
|
VOIDmode));
|
v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
|
v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
|
VOIDmode);
|
VOIDmode);
|
cselib_preserve_value (v);
|
cselib_preserve_value (v);
|
cselib_invalidate_rtx (node->loc);
|
cselib_invalidate_rtx (node->loc);
|
cval = v->val_rtx;
|
cval = v->val_rtx;
|
cdv = dv_from_value (cval);
|
cdv = dv_from_value (cval);
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file,
|
fprintf (dump_file,
|
"Created new value %u:%u for reg %i\n",
|
"Created new value %u:%u for reg %i\n",
|
v->uid, v->hash, REGNO (node->loc));
|
v->uid, v->hash, REGNO (node->loc));
|
}
|
}
|
|
|
var_reg_decl_set (*dfpm->permp, node->loc,
|
var_reg_decl_set (*dfpm->permp, node->loc,
|
VAR_INIT_STATUS_INITIALIZED,
|
VAR_INIT_STATUS_INITIALIZED,
|
cdv, 0, NULL, INSERT);
|
cdv, 0, NULL, INSERT);
|
|
|
node->loc = cval;
|
node->loc = cval;
|
check_dupes = true;
|
check_dupes = true;
|
}
|
}
|
|
|
/* Remove attribute referring to the decl, which now
|
/* Remove attribute referring to the decl, which now
|
uses the value for the register, already existing or
|
uses the value for the register, already existing or
|
to be added when we bring perm in. */
|
to be added when we bring perm in. */
|
att = *curp;
|
att = *curp;
|
*curp = att->next;
|
*curp = att->next;
|
pool_free (attrs_pool, att);
|
pool_free (attrs_pool, att);
|
}
|
}
|
}
|
}
|
|
|
if (check_dupes)
|
if (check_dupes)
|
remove_duplicate_values (var);
|
remove_duplicate_values (var);
|
}
|
}
|
|
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Reset values in the permanent set that are not associated with the
|
/* Reset values in the permanent set that are not associated with the
|
chosen expression. */
|
chosen expression. */
|
|
|
static int
|
static int
|
variable_post_merge_perm_vals (void **pslot, void *info)
|
variable_post_merge_perm_vals (void **pslot, void *info)
|
{
|
{
|
struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
|
struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
|
dataflow_set *set = dfpm->set;
|
dataflow_set *set = dfpm->set;
|
variable pvar = (variable)*pslot, var;
|
variable pvar = (variable)*pslot, var;
|
location_chain pnode;
|
location_chain pnode;
|
decl_or_value dv;
|
decl_or_value dv;
|
attrs att;
|
attrs att;
|
|
|
gcc_assert (dv_is_value_p (pvar->dv)
|
gcc_assert (dv_is_value_p (pvar->dv)
|
&& pvar->n_var_parts == 1);
|
&& pvar->n_var_parts == 1);
|
pnode = pvar->var_part[0].loc_chain;
|
pnode = pvar->var_part[0].loc_chain;
|
gcc_assert (pnode
|
gcc_assert (pnode
|
&& !pnode->next
|
&& !pnode->next
|
&& REG_P (pnode->loc));
|
&& REG_P (pnode->loc));
|
|
|
dv = pvar->dv;
|
dv = pvar->dv;
|
|
|
var = shared_hash_find (set->vars, dv);
|
var = shared_hash_find (set->vars, dv);
|
if (var)
|
if (var)
|
{
|
{
|
/* Although variable_post_merge_new_vals may have made decls
|
/* Although variable_post_merge_new_vals may have made decls
|
non-star-canonical, values that pre-existed in canonical form
|
non-star-canonical, values that pre-existed in canonical form
|
remain canonical, and newly-created values reference a single
|
remain canonical, and newly-created values reference a single
|
REG, so they are canonical as well. Since VAR has the
|
REG, so they are canonical as well. Since VAR has the
|
location list for a VALUE, using find_loc_in_1pdv for it is
|
location list for a VALUE, using find_loc_in_1pdv for it is
|
fine, since VALUEs don't map back to DECLs. */
|
fine, since VALUEs don't map back to DECLs. */
|
if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
|
if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
|
return 1;
|
return 1;
|
val_reset (set, dv);
|
val_reset (set, dv);
|
}
|
}
|
|
|
for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
|
for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
|
if (att->offset == 0
|
if (att->offset == 0
|
&& GET_MODE (att->loc) == GET_MODE (pnode->loc)
|
&& GET_MODE (att->loc) == GET_MODE (pnode->loc)
|
&& dv_is_value_p (att->dv))
|
&& dv_is_value_p (att->dv))
|
break;
|
break;
|
|
|
/* If there is a value associated with this register already, create
|
/* If there is a value associated with this register already, create
|
an equivalence. */
|
an equivalence. */
|
if (att && dv_as_value (att->dv) != dv_as_value (dv))
|
if (att && dv_as_value (att->dv) != dv_as_value (dv))
|
{
|
{
|
rtx cval = dv_as_value (att->dv);
|
rtx cval = dv_as_value (att->dv);
|
set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
|
set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
|
set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
|
set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
|
NULL, INSERT);
|
NULL, INSERT);
|
}
|
}
|
else if (!att)
|
else if (!att)
|
{
|
{
|
attrs_list_insert (&set->regs[REGNO (pnode->loc)],
|
attrs_list_insert (&set->regs[REGNO (pnode->loc)],
|
dv, 0, pnode->loc);
|
dv, 0, pnode->loc);
|
variable_union (pvar, set);
|
variable_union (pvar, set);
|
}
|
}
|
|
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Just checking stuff and registering register attributes for
|
/* Just checking stuff and registering register attributes for
|
now. */
|
now. */
|
|
|
static void
|
static void
|
dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
|
dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
|
{
|
{
|
struct dfset_post_merge dfpm;
|
struct dfset_post_merge dfpm;
|
|
|
dfpm.set = set;
|
dfpm.set = set;
|
dfpm.permp = permp;
|
dfpm.permp = permp;
|
|
|
htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
|
htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
|
&dfpm);
|
&dfpm);
|
if (*permp)
|
if (*permp)
|
htab_traverse (shared_hash_htab ((*permp)->vars),
|
htab_traverse (shared_hash_htab ((*permp)->vars),
|
variable_post_merge_perm_vals, &dfpm);
|
variable_post_merge_perm_vals, &dfpm);
|
htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
|
htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
|
htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
|
htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
|
}
|
}
|
|
|
/* Return a node whose loc is a MEM that refers to EXPR in the
|
/* Return a node whose loc is a MEM that refers to EXPR in the
|
location list of a one-part variable or value VAR, or in that of
|
location list of a one-part variable or value VAR, or in that of
|
any values recursively mentioned in the location lists. */
|
any values recursively mentioned in the location lists. */
|
|
|
static location_chain
|
static location_chain
|
find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
|
find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
|
{
|
{
|
location_chain node;
|
location_chain node;
|
decl_or_value dv;
|
decl_or_value dv;
|
variable var;
|
variable var;
|
location_chain where = NULL;
|
location_chain where = NULL;
|
|
|
if (!val)
|
if (!val)
|
return NULL;
|
return NULL;
|
|
|
gcc_assert (GET_CODE (val) == VALUE
|
gcc_assert (GET_CODE (val) == VALUE
|
&& !VALUE_RECURSED_INTO (val));
|
&& !VALUE_RECURSED_INTO (val));
|
|
|
dv = dv_from_value (val);
|
dv = dv_from_value (val);
|
var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
|
var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
|
|
|
if (!var)
|
if (!var)
|
return NULL;
|
return NULL;
|
|
|
gcc_assert (var->onepart);
|
gcc_assert (var->onepart);
|
|
|
if (!var->n_var_parts)
|
if (!var->n_var_parts)
|
return NULL;
|
return NULL;
|
|
|
VALUE_RECURSED_INTO (val) = true;
|
VALUE_RECURSED_INTO (val) = true;
|
|
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
for (node = var->var_part[0].loc_chain; node; node = node->next)
|
if (MEM_P (node->loc)
|
if (MEM_P (node->loc)
|
&& MEM_EXPR (node->loc) == expr
|
&& MEM_EXPR (node->loc) == expr
|
&& INT_MEM_OFFSET (node->loc) == 0)
|
&& INT_MEM_OFFSET (node->loc) == 0)
|
{
|
{
|
where = node;
|
where = node;
|
break;
|
break;
|
}
|
}
|
else if (GET_CODE (node->loc) == VALUE
|
else if (GET_CODE (node->loc) == VALUE
|
&& !VALUE_RECURSED_INTO (node->loc)
|
&& !VALUE_RECURSED_INTO (node->loc)
|
&& (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
|
&& (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
|
break;
|
break;
|
|
|
VALUE_RECURSED_INTO (val) = false;
|
VALUE_RECURSED_INTO (val) = false;
|
|
|
return where;
|
return where;
|
}
|
}
|
|
|
/* Return TRUE if the value of MEM may vary across a call. */
|
/* Return TRUE if the value of MEM may vary across a call. */
|
|
|
static bool
|
static bool
|
mem_dies_at_call (rtx mem)
|
mem_dies_at_call (rtx mem)
|
{
|
{
|
tree expr = MEM_EXPR (mem);
|
tree expr = MEM_EXPR (mem);
|
tree decl;
|
tree decl;
|
|
|
if (!expr)
|
if (!expr)
|
return true;
|
return true;
|
|
|
decl = get_base_address (expr);
|
decl = get_base_address (expr);
|
|
|
if (!decl)
|
if (!decl)
|
return true;
|
return true;
|
|
|
if (!DECL_P (decl))
|
if (!DECL_P (decl))
|
return true;
|
return true;
|
|
|
return (may_be_aliased (decl)
|
return (may_be_aliased (decl)
|
|| (!TREE_READONLY (decl) && is_global_var (decl)));
|
|| (!TREE_READONLY (decl) && is_global_var (decl)));
|
}
|
}
|
|
|
/* Remove all MEMs from the location list of a hash table entry for a
|
/* Remove all MEMs from the location list of a hash table entry for a
|
one-part variable, except those whose MEM attributes map back to
|
one-part variable, except those whose MEM attributes map back to
|
the variable itself, directly or within a VALUE. */
|
the variable itself, directly or within a VALUE. */
|
|
|
static int
|
static int
|
dataflow_set_preserve_mem_locs (void **slot, void *data)
|
dataflow_set_preserve_mem_locs (void **slot, void *data)
|
{
|
{
|
dataflow_set *set = (dataflow_set *) data;
|
dataflow_set *set = (dataflow_set *) data;
|
variable var = (variable) *slot;
|
variable var = (variable) *slot;
|
|
|
if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
|
if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
|
{
|
{
|
tree decl = dv_as_decl (var->dv);
|
tree decl = dv_as_decl (var->dv);
|
location_chain loc, *locp;
|
location_chain loc, *locp;
|
bool changed = false;
|
bool changed = false;
|
|
|
if (!var->n_var_parts)
|
if (!var->n_var_parts)
|
return 1;
|
return 1;
|
|
|
gcc_assert (var->n_var_parts == 1);
|
gcc_assert (var->n_var_parts == 1);
|
|
|
if (shared_var_p (var, set->vars))
|
if (shared_var_p (var, set->vars))
|
{
|
{
|
for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
|
for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
|
{
|
{
|
/* We want to remove dying MEMs that doesn't refer to DECL. */
|
/* We want to remove dying MEMs that doesn't refer to DECL. */
|
if (GET_CODE (loc->loc) == MEM
|
if (GET_CODE (loc->loc) == MEM
|
&& (MEM_EXPR (loc->loc) != decl
|
&& (MEM_EXPR (loc->loc) != decl
|
|| INT_MEM_OFFSET (loc->loc) != 0)
|
|| INT_MEM_OFFSET (loc->loc) != 0)
|
&& !mem_dies_at_call (loc->loc))
|
&& !mem_dies_at_call (loc->loc))
|
break;
|
break;
|
/* We want to move here MEMs that do refer to DECL. */
|
/* We want to move here MEMs that do refer to DECL. */
|
else if (GET_CODE (loc->loc) == VALUE
|
else if (GET_CODE (loc->loc) == VALUE
|
&& find_mem_expr_in_1pdv (decl, loc->loc,
|
&& find_mem_expr_in_1pdv (decl, loc->loc,
|
shared_hash_htab (set->vars)))
|
shared_hash_htab (set->vars)))
|
break;
|
break;
|
}
|
}
|
|
|
if (!loc)
|
if (!loc)
|
return 1;
|
return 1;
|
|
|
slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
|
slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
|
var = (variable)*slot;
|
var = (variable)*slot;
|
gcc_assert (var->n_var_parts == 1);
|
gcc_assert (var->n_var_parts == 1);
|
}
|
}
|
|
|
for (locp = &var->var_part[0].loc_chain, loc = *locp;
|
for (locp = &var->var_part[0].loc_chain, loc = *locp;
|
loc; loc = *locp)
|
loc; loc = *locp)
|
{
|
{
|
rtx old_loc = loc->loc;
|
rtx old_loc = loc->loc;
|
if (GET_CODE (old_loc) == VALUE)
|
if (GET_CODE (old_loc) == VALUE)
|
{
|
{
|
location_chain mem_node
|
location_chain mem_node
|
= find_mem_expr_in_1pdv (decl, loc->loc,
|
= find_mem_expr_in_1pdv (decl, loc->loc,
|
shared_hash_htab (set->vars));
|
shared_hash_htab (set->vars));
|
|
|
/* ??? This picks up only one out of multiple MEMs that
|
/* ??? This picks up only one out of multiple MEMs that
|
refer to the same variable. Do we ever need to be
|
refer to the same variable. Do we ever need to be
|
concerned about dealing with more than one, or, given
|
concerned about dealing with more than one, or, given
|
that they should all map to the same variable
|
that they should all map to the same variable
|
location, their addresses will have been merged and
|
location, their addresses will have been merged and
|
they will be regarded as equivalent? */
|
they will be regarded as equivalent? */
|
if (mem_node)
|
if (mem_node)
|
{
|
{
|
loc->loc = mem_node->loc;
|
loc->loc = mem_node->loc;
|
loc->set_src = mem_node->set_src;
|
loc->set_src = mem_node->set_src;
|
loc->init = MIN (loc->init, mem_node->init);
|
loc->init = MIN (loc->init, mem_node->init);
|
}
|
}
|
}
|
}
|
|
|
if (GET_CODE (loc->loc) != MEM
|
if (GET_CODE (loc->loc) != MEM
|
|| (MEM_EXPR (loc->loc) == decl
|
|| (MEM_EXPR (loc->loc) == decl
|
&& INT_MEM_OFFSET (loc->loc) == 0)
|
&& INT_MEM_OFFSET (loc->loc) == 0)
|
|| !mem_dies_at_call (loc->loc))
|
|| !mem_dies_at_call (loc->loc))
|
{
|
{
|
if (old_loc != loc->loc && emit_notes)
|
if (old_loc != loc->loc && emit_notes)
|
{
|
{
|
if (old_loc == var->var_part[0].cur_loc)
|
if (old_loc == var->var_part[0].cur_loc)
|
{
|
{
|
changed = true;
|
changed = true;
|
var->var_part[0].cur_loc = NULL;
|
var->var_part[0].cur_loc = NULL;
|
}
|
}
|
}
|
}
|
locp = &loc->next;
|
locp = &loc->next;
|
continue;
|
continue;
|
}
|
}
|
|
|
if (emit_notes)
|
if (emit_notes)
|
{
|
{
|
if (old_loc == var->var_part[0].cur_loc)
|
if (old_loc == var->var_part[0].cur_loc)
|
{
|
{
|
changed = true;
|
changed = true;
|
var->var_part[0].cur_loc = NULL;
|
var->var_part[0].cur_loc = NULL;
|
}
|
}
|
}
|
}
|
*locp = loc->next;
|
*locp = loc->next;
|
pool_free (loc_chain_pool, loc);
|
pool_free (loc_chain_pool, loc);
|
}
|
}
|
|
|
if (!var->var_part[0].loc_chain)
|
if (!var->var_part[0].loc_chain)
|
{
|
{
|
var->n_var_parts--;
|
var->n_var_parts--;
|
changed = true;
|
changed = true;
|
}
|
}
|
if (changed)
|
if (changed)
|
variable_was_changed (var, set);
|
variable_was_changed (var, set);
|
}
|
}
|
|
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Remove all MEMs from the location list of a hash table entry for a
|
/* Remove all MEMs from the location list of a hash table entry for a
|
value. */
|
value. */
|
|
|
static int
|
static int
|
dataflow_set_remove_mem_locs (void **slot, void *data)
|
dataflow_set_remove_mem_locs (void **slot, void *data)
|
{
|
{
|
dataflow_set *set = (dataflow_set *) data;
|
dataflow_set *set = (dataflow_set *) data;
|
variable var = (variable) *slot;
|
variable var = (variable) *slot;
|
|
|
if (var->onepart == ONEPART_VALUE)
|
if (var->onepart == ONEPART_VALUE)
|
{
|
{
|
location_chain loc, *locp;
|
location_chain loc, *locp;
|
bool changed = false;
|
bool changed = false;
|
rtx cur_loc;
|
rtx cur_loc;
|
|
|
gcc_assert (var->n_var_parts == 1);
|
gcc_assert (var->n_var_parts == 1);
|
|
|
if (shared_var_p (var, set->vars))
|
if (shared_var_p (var, set->vars))
|
{
|
{
|
for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
|
for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
|
if (GET_CODE (loc->loc) == MEM
|
if (GET_CODE (loc->loc) == MEM
|
&& mem_dies_at_call (loc->loc))
|
&& mem_dies_at_call (loc->loc))
|
break;
|
break;
|
|
|
if (!loc)
|
if (!loc)
|
return 1;
|
return 1;
|
|
|
slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
|
slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
|
var = (variable)*slot;
|
var = (variable)*slot;
|
gcc_assert (var->n_var_parts == 1);
|
gcc_assert (var->n_var_parts == 1);
|
}
|
}
|
|
|
if (VAR_LOC_1PAUX (var))
|
if (VAR_LOC_1PAUX (var))
|
cur_loc = VAR_LOC_FROM (var);
|
cur_loc = VAR_LOC_FROM (var);
|
else
|
else
|
cur_loc = var->var_part[0].cur_loc;
|
cur_loc = var->var_part[0].cur_loc;
|
|
|
for (locp = &var->var_part[0].loc_chain, loc = *locp;
|
for (locp = &var->var_part[0].loc_chain, loc = *locp;
|
loc; loc = *locp)
|
loc; loc = *locp)
|
{
|
{
|
if (GET_CODE (loc->loc) != MEM
|
if (GET_CODE (loc->loc) != MEM
|
|| !mem_dies_at_call (loc->loc))
|
|| !mem_dies_at_call (loc->loc))
|
{
|
{
|
locp = &loc->next;
|
locp = &loc->next;
|
continue;
|
continue;
|
}
|
}
|
|
|
*locp = loc->next;
|
*locp = loc->next;
|
/* If we have deleted the location which was last emitted
|
/* If we have deleted the location which was last emitted
|
we have to emit new location so add the variable to set
|
we have to emit new location so add the variable to set
|
of changed variables. */
|
of changed variables. */
|
if (cur_loc == loc->loc)
|
if (cur_loc == loc->loc)
|
{
|
{
|
changed = true;
|
changed = true;
|
var->var_part[0].cur_loc = NULL;
|
var->var_part[0].cur_loc = NULL;
|
if (VAR_LOC_1PAUX (var))
|
if (VAR_LOC_1PAUX (var))
|
VAR_LOC_FROM (var) = NULL;
|
VAR_LOC_FROM (var) = NULL;
|
}
|
}
|
pool_free (loc_chain_pool, loc);
|
pool_free (loc_chain_pool, loc);
|
}
|
}
|
|
|
if (!var->var_part[0].loc_chain)
|
if (!var->var_part[0].loc_chain)
|
{
|
{
|
var->n_var_parts--;
|
var->n_var_parts--;
|
changed = true;
|
changed = true;
|
}
|
}
|
if (changed)
|
if (changed)
|
variable_was_changed (var, set);
|
variable_was_changed (var, set);
|
}
|
}
|
|
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Remove all variable-location information about call-clobbered
|
/* Remove all variable-location information about call-clobbered
|
registers, as well as associations between MEMs and VALUEs. */
|
registers, as well as associations between MEMs and VALUEs. */
|
|
|
static void
|
static void
|
dataflow_set_clear_at_call (dataflow_set *set)
|
dataflow_set_clear_at_call (dataflow_set *set)
|
{
|
{
|
int r;
|
int r;
|
|
|
for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
|
for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
|
if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r))
|
if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r))
|
var_regno_delete (set, r);
|
var_regno_delete (set, r);
|
|
|
if (MAY_HAVE_DEBUG_INSNS)
|
if (MAY_HAVE_DEBUG_INSNS)
|
{
|
{
|
set->traversed_vars = set->vars;
|
set->traversed_vars = set->vars;
|
htab_traverse (shared_hash_htab (set->vars),
|
htab_traverse (shared_hash_htab (set->vars),
|
dataflow_set_preserve_mem_locs, set);
|
dataflow_set_preserve_mem_locs, set);
|
set->traversed_vars = set->vars;
|
set->traversed_vars = set->vars;
|
htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
|
htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
|
set);
|
set);
|
set->traversed_vars = NULL;
|
set->traversed_vars = NULL;
|
}
|
}
|
}
|
}
|
|
|
static bool
|
static bool
|
variable_part_different_p (variable_part *vp1, variable_part *vp2)
|
variable_part_different_p (variable_part *vp1, variable_part *vp2)
|
{
|
{
|
location_chain lc1, lc2;
|
location_chain lc1, lc2;
|
|
|
for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
|
for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
|
{
|
{
|
for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
|
for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
|
{
|
{
|
if (REG_P (lc1->loc) && REG_P (lc2->loc))
|
if (REG_P (lc1->loc) && REG_P (lc2->loc))
|
{
|
{
|
if (REGNO (lc1->loc) == REGNO (lc2->loc))
|
if (REGNO (lc1->loc) == REGNO (lc2->loc))
|
break;
|
break;
|
}
|
}
|
if (rtx_equal_p (lc1->loc, lc2->loc))
|
if (rtx_equal_p (lc1->loc, lc2->loc))
|
break;
|
break;
|
}
|
}
|
if (!lc2)
|
if (!lc2)
|
return true;
|
return true;
|
}
|
}
|
return false;
|
return false;
|
}
|
}
|
|
|
/* Return true if one-part variables VAR1 and VAR2 are different.
|
/* Return true if one-part variables VAR1 and VAR2 are different.
|
They must be in canonical order. */
|
They must be in canonical order. */
|
|
|
static bool
|
static bool
|
onepart_variable_different_p (variable var1, variable var2)
|
onepart_variable_different_p (variable var1, variable var2)
|
{
|
{
|
location_chain lc1, lc2;
|
location_chain lc1, lc2;
|
|
|
if (var1 == var2)
|
if (var1 == var2)
|
return false;
|
return false;
|
|
|
gcc_assert (var1->n_var_parts == 1
|
gcc_assert (var1->n_var_parts == 1
|
&& var2->n_var_parts == 1);
|
&& var2->n_var_parts == 1);
|
|
|
lc1 = var1->var_part[0].loc_chain;
|
lc1 = var1->var_part[0].loc_chain;
|
lc2 = var2->var_part[0].loc_chain;
|
lc2 = var2->var_part[0].loc_chain;
|
|
|
gcc_assert (lc1 && lc2);
|
gcc_assert (lc1 && lc2);
|
|
|
while (lc1 && lc2)
|
while (lc1 && lc2)
|
{
|
{
|
if (loc_cmp (lc1->loc, lc2->loc))
|
if (loc_cmp (lc1->loc, lc2->loc))
|
return true;
|
return true;
|
lc1 = lc1->next;
|
lc1 = lc1->next;
|
lc2 = lc2->next;
|
lc2 = lc2->next;
|
}
|
}
|
|
|
return lc1 != lc2;
|
return lc1 != lc2;
|
}
|
}
|
|
|
/* Return true if variables VAR1 and VAR2 are different. */
|
/* Return true if variables VAR1 and VAR2 are different. */
|
|
|
static bool
|
static bool
|
variable_different_p (variable var1, variable var2)
|
variable_different_p (variable var1, variable var2)
|
{
|
{
|
int i;
|
int i;
|
|
|
if (var1 == var2)
|
if (var1 == var2)
|
return false;
|
return false;
|
|
|
if (var1->onepart != var2->onepart)
|
if (var1->onepart != var2->onepart)
|
return true;
|
return true;
|
|
|
if (var1->n_var_parts != var2->n_var_parts)
|
if (var1->n_var_parts != var2->n_var_parts)
|
return true;
|
return true;
|
|
|
if (var1->onepart && var1->n_var_parts)
|
if (var1->onepart && var1->n_var_parts)
|
{
|
{
|
gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
|
gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
|
&& var1->n_var_parts == 1);
|
&& var1->n_var_parts == 1);
|
/* One-part values have locations in a canonical order. */
|
/* One-part values have locations in a canonical order. */
|
return onepart_variable_different_p (var1, var2);
|
return onepart_variable_different_p (var1, var2);
|
}
|
}
|
|
|
for (i = 0; i < var1->n_var_parts; i++)
|
for (i = 0; i < var1->n_var_parts; i++)
|
{
|
{
|
if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
|
if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
|
return true;
|
return true;
|
if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
|
if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
|
return true;
|
return true;
|
if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
|
if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
|
return true;
|
return true;
|
}
|
}
|
return false;
|
return false;
|
}
|
}
|
|
|
/* Return true if dataflow sets OLD_SET and NEW_SET differ. */
|
/* Return true if dataflow sets OLD_SET and NEW_SET differ. */
|
|
|
static bool
|
static bool
|
dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
|
dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
|
{
|
{
|
htab_iterator hi;
|
htab_iterator hi;
|
variable var1;
|
variable var1;
|
|
|
if (old_set->vars == new_set->vars)
|
if (old_set->vars == new_set->vars)
|
return false;
|
return false;
|
|
|
if (htab_elements (shared_hash_htab (old_set->vars))
|
if (htab_elements (shared_hash_htab (old_set->vars))
|
!= htab_elements (shared_hash_htab (new_set->vars)))
|
!= htab_elements (shared_hash_htab (new_set->vars)))
|
return true;
|
return true;
|
|
|
FOR_EACH_HTAB_ELEMENT (shared_hash_htab (old_set->vars), var1, variable, hi)
|
FOR_EACH_HTAB_ELEMENT (shared_hash_htab (old_set->vars), var1, variable, hi)
|
{
|
{
|
htab_t htab = shared_hash_htab (new_set->vars);
|
htab_t htab = shared_hash_htab (new_set->vars);
|
variable var2 = (variable) htab_find_with_hash (htab, var1->dv,
|
variable var2 = (variable) htab_find_with_hash (htab, var1->dv,
|
dv_htab_hash (var1->dv));
|
dv_htab_hash (var1->dv));
|
if (!var2)
|
if (!var2)
|
{
|
{
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
{
|
{
|
fprintf (dump_file, "dataflow difference found: removal of:\n");
|
fprintf (dump_file, "dataflow difference found: removal of:\n");
|
dump_var (var1);
|
dump_var (var1);
|
}
|
}
|
return true;
|
return true;
|
}
|
}
|
|
|
if (variable_different_p (var1, var2))
|
if (variable_different_p (var1, var2))
|
{
|
{
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
{
|
{
|
fprintf (dump_file, "dataflow difference found: "
|
fprintf (dump_file, "dataflow difference found: "
|
"old and new follow:\n");
|
"old and new follow:\n");
|
dump_var (var1);
|
dump_var (var1);
|
dump_var (var2);
|
dump_var (var2);
|
}
|
}
|
return true;
|
return true;
|
}
|
}
|
}
|
}
|
|
|
/* No need to traverse the second hashtab, if both have the same number
|
/* No need to traverse the second hashtab, if both have the same number
|
of elements and the second one had all entries found in the first one,
|
of elements and the second one had all entries found in the first one,
|
then it can't have any extra entries. */
|
then it can't have any extra entries. */
|
return false;
|
return false;
|
}
|
}
|
|
|
/* Free the contents of dataflow set SET. */
|
/* Free the contents of dataflow set SET. */
|
|
|
static void
|
static void
|
dataflow_set_destroy (dataflow_set *set)
|
dataflow_set_destroy (dataflow_set *set)
|
{
|
{
|
int i;
|
int i;
|
|
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
attrs_list_clear (&set->regs[i]);
|
attrs_list_clear (&set->regs[i]);
|
|
|
shared_hash_destroy (set->vars);
|
shared_hash_destroy (set->vars);
|
set->vars = NULL;
|
set->vars = NULL;
|
}
|
}
|
|
|
/* Return true if RTL X contains a SYMBOL_REF. */
|
/* Return true if RTL X contains a SYMBOL_REF. */
|
|
|
static bool
|
static bool
|
contains_symbol_ref (rtx x)
|
contains_symbol_ref (rtx x)
|
{
|
{
|
const char *fmt;
|
const char *fmt;
|
RTX_CODE code;
|
RTX_CODE code;
|
int i;
|
int i;
|
|
|
if (!x)
|
if (!x)
|
return false;
|
return false;
|
|
|
code = GET_CODE (x);
|
code = GET_CODE (x);
|
if (code == SYMBOL_REF)
|
if (code == SYMBOL_REF)
|
return true;
|
return true;
|
|
|
fmt = GET_RTX_FORMAT (code);
|
fmt = GET_RTX_FORMAT (code);
|
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
|
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
|
{
|
{
|
if (fmt[i] == 'e')
|
if (fmt[i] == 'e')
|
{
|
{
|
if (contains_symbol_ref (XEXP (x, i)))
|
if (contains_symbol_ref (XEXP (x, i)))
|
return true;
|
return true;
|
}
|
}
|
else if (fmt[i] == 'E')
|
else if (fmt[i] == 'E')
|
{
|
{
|
int j;
|
int j;
|
for (j = 0; j < XVECLEN (x, i); j++)
|
for (j = 0; j < XVECLEN (x, i); j++)
|
if (contains_symbol_ref (XVECEXP (x, i, j)))
|
if (contains_symbol_ref (XVECEXP (x, i, j)))
|
return true;
|
return true;
|
}
|
}
|
}
|
}
|
|
|
return false;
|
return false;
|
}
|
}
|
|
|
/* Shall EXPR be tracked? */
|
/* Shall EXPR be tracked? */
|
|
|
static bool
|
static bool
|
track_expr_p (tree expr, bool need_rtl)
|
track_expr_p (tree expr, bool need_rtl)
|
{
|
{
|
rtx decl_rtl;
|
rtx decl_rtl;
|
tree realdecl;
|
tree realdecl;
|
|
|
if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
|
if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
|
return DECL_RTL_SET_P (expr);
|
return DECL_RTL_SET_P (expr);
|
|
|
/* If EXPR is not a parameter or a variable do not track it. */
|
/* If EXPR is not a parameter or a variable do not track it. */
|
if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
|
if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
|
return 0;
|
return 0;
|
|
|
/* It also must have a name... */
|
/* It also must have a name... */
|
if (!DECL_NAME (expr) && need_rtl)
|
if (!DECL_NAME (expr) && need_rtl)
|
return 0;
|
return 0;
|
|
|
/* ... and a RTL assigned to it. */
|
/* ... and a RTL assigned to it. */
|
decl_rtl = DECL_RTL_IF_SET (expr);
|
decl_rtl = DECL_RTL_IF_SET (expr);
|
if (!decl_rtl && need_rtl)
|
if (!decl_rtl && need_rtl)
|
return 0;
|
return 0;
|
|
|
/* If this expression is really a debug alias of some other declaration, we
|
/* If this expression is really a debug alias of some other declaration, we
|
don't need to track this expression if the ultimate declaration is
|
don't need to track this expression if the ultimate declaration is
|
ignored. */
|
ignored. */
|
realdecl = expr;
|
realdecl = expr;
|
if (DECL_DEBUG_EXPR_IS_FROM (realdecl))
|
if (DECL_DEBUG_EXPR_IS_FROM (realdecl))
|
{
|
{
|
realdecl = DECL_DEBUG_EXPR (realdecl);
|
realdecl = DECL_DEBUG_EXPR (realdecl);
|
if (realdecl == NULL_TREE)
|
if (realdecl == NULL_TREE)
|
realdecl = expr;
|
realdecl = expr;
|
else if (!DECL_P (realdecl))
|
else if (!DECL_P (realdecl))
|
{
|
{
|
if (handled_component_p (realdecl))
|
if (handled_component_p (realdecl))
|
{
|
{
|
HOST_WIDE_INT bitsize, bitpos, maxsize;
|
HOST_WIDE_INT bitsize, bitpos, maxsize;
|
tree innerdecl
|
tree innerdecl
|
= get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
|
= get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
|
&maxsize);
|
&maxsize);
|
if (!DECL_P (innerdecl)
|
if (!DECL_P (innerdecl)
|
|| DECL_IGNORED_P (innerdecl)
|
|| DECL_IGNORED_P (innerdecl)
|
|| TREE_STATIC (innerdecl)
|
|| TREE_STATIC (innerdecl)
|
|| bitsize <= 0
|
|| bitsize <= 0
|
|| bitpos + bitsize > 256
|
|| bitpos + bitsize > 256
|
|| bitsize != maxsize)
|
|| bitsize != maxsize)
|
return 0;
|
return 0;
|
else
|
else
|
realdecl = expr;
|
realdecl = expr;
|
}
|
}
|
else
|
else
|
return 0;
|
return 0;
|
}
|
}
|
}
|
}
|
|
|
/* Do not track EXPR if REALDECL it should be ignored for debugging
|
/* Do not track EXPR if REALDECL it should be ignored for debugging
|
purposes. */
|
purposes. */
|
if (DECL_IGNORED_P (realdecl))
|
if (DECL_IGNORED_P (realdecl))
|
return 0;
|
return 0;
|
|
|
/* Do not track global variables until we are able to emit correct location
|
/* Do not track global variables until we are able to emit correct location
|
list for them. */
|
list for them. */
|
if (TREE_STATIC (realdecl))
|
if (TREE_STATIC (realdecl))
|
return 0;
|
return 0;
|
|
|
/* When the EXPR is a DECL for alias of some variable (see example)
|
/* When the EXPR is a DECL for alias of some variable (see example)
|
the TREE_STATIC flag is not used. Disable tracking all DECLs whose
|
the TREE_STATIC flag is not used. Disable tracking all DECLs whose
|
DECL_RTL contains SYMBOL_REF.
|
DECL_RTL contains SYMBOL_REF.
|
|
|
Example:
|
Example:
|
extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
|
extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
|
char **_dl_argv;
|
char **_dl_argv;
|
*/
|
*/
|
if (decl_rtl && MEM_P (decl_rtl)
|
if (decl_rtl && MEM_P (decl_rtl)
|
&& contains_symbol_ref (XEXP (decl_rtl, 0)))
|
&& contains_symbol_ref (XEXP (decl_rtl, 0)))
|
return 0;
|
return 0;
|
|
|
/* If RTX is a memory it should not be very large (because it would be
|
/* If RTX is a memory it should not be very large (because it would be
|
an array or struct). */
|
an array or struct). */
|
if (decl_rtl && MEM_P (decl_rtl))
|
if (decl_rtl && MEM_P (decl_rtl))
|
{
|
{
|
/* Do not track structures and arrays. */
|
/* Do not track structures and arrays. */
|
if (GET_MODE (decl_rtl) == BLKmode
|
if (GET_MODE (decl_rtl) == BLKmode
|
|| AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
|
|| AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
|
return 0;
|
return 0;
|
if (MEM_SIZE_KNOWN_P (decl_rtl)
|
if (MEM_SIZE_KNOWN_P (decl_rtl)
|
&& MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
|
&& MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
|
return 0;
|
return 0;
|
}
|
}
|
|
|
DECL_CHANGED (expr) = 0;
|
DECL_CHANGED (expr) = 0;
|
DECL_CHANGED (realdecl) = 0;
|
DECL_CHANGED (realdecl) = 0;
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Determine whether a given LOC refers to the same variable part as
|
/* Determine whether a given LOC refers to the same variable part as
|
EXPR+OFFSET. */
|
EXPR+OFFSET. */
|
|
|
static bool
|
static bool
|
same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
|
same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
|
{
|
{
|
tree expr2;
|
tree expr2;
|
HOST_WIDE_INT offset2;
|
HOST_WIDE_INT offset2;
|
|
|
if (! DECL_P (expr))
|
if (! DECL_P (expr))
|
return false;
|
return false;
|
|
|
if (REG_P (loc))
|
if (REG_P (loc))
|
{
|
{
|
expr2 = REG_EXPR (loc);
|
expr2 = REG_EXPR (loc);
|
offset2 = REG_OFFSET (loc);
|
offset2 = REG_OFFSET (loc);
|
}
|
}
|
else if (MEM_P (loc))
|
else if (MEM_P (loc))
|
{
|
{
|
expr2 = MEM_EXPR (loc);
|
expr2 = MEM_EXPR (loc);
|
offset2 = INT_MEM_OFFSET (loc);
|
offset2 = INT_MEM_OFFSET (loc);
|
}
|
}
|
else
|
else
|
return false;
|
return false;
|
|
|
if (! expr2 || ! DECL_P (expr2))
|
if (! expr2 || ! DECL_P (expr2))
|
return false;
|
return false;
|
|
|
expr = var_debug_decl (expr);
|
expr = var_debug_decl (expr);
|
expr2 = var_debug_decl (expr2);
|
expr2 = var_debug_decl (expr2);
|
|
|
return (expr == expr2 && offset == offset2);
|
return (expr == expr2 && offset == offset2);
|
}
|
}
|
|
|
/* LOC is a REG or MEM that we would like to track if possible.
|
/* LOC is a REG or MEM that we would like to track if possible.
|
If EXPR is null, we don't know what expression LOC refers to,
|
If EXPR is null, we don't know what expression LOC refers to,
|
otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
|
otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
|
LOC is an lvalue register.
|
LOC is an lvalue register.
|
|
|
Return true if EXPR is nonnull and if LOC, or some lowpart of it,
|
Return true if EXPR is nonnull and if LOC, or some lowpart of it,
|
is something we can track. When returning true, store the mode of
|
is something we can track. When returning true, store the mode of
|
the lowpart we can track in *MODE_OUT (if nonnull) and its offset
|
the lowpart we can track in *MODE_OUT (if nonnull) and its offset
|
from EXPR in *OFFSET_OUT (if nonnull). */
|
from EXPR in *OFFSET_OUT (if nonnull). */
|
|
|
static bool
|
static bool
|
track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
|
track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
|
enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
|
enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
|
{
|
{
|
enum machine_mode mode;
|
enum machine_mode mode;
|
|
|
if (expr == NULL || !track_expr_p (expr, true))
|
if (expr == NULL || !track_expr_p (expr, true))
|
return false;
|
return false;
|
|
|
/* If REG was a paradoxical subreg, its REG_ATTRS will describe the
|
/* If REG was a paradoxical subreg, its REG_ATTRS will describe the
|
whole subreg, but only the old inner part is really relevant. */
|
whole subreg, but only the old inner part is really relevant. */
|
mode = GET_MODE (loc);
|
mode = GET_MODE (loc);
|
if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
|
if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
|
{
|
{
|
enum machine_mode pseudo_mode;
|
enum machine_mode pseudo_mode;
|
|
|
pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
|
pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
|
if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
|
if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
|
{
|
{
|
offset += byte_lowpart_offset (pseudo_mode, mode);
|
offset += byte_lowpart_offset (pseudo_mode, mode);
|
mode = pseudo_mode;
|
mode = pseudo_mode;
|
}
|
}
|
}
|
}
|
|
|
/* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
|
/* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
|
Do the same if we are storing to a register and EXPR occupies
|
Do the same if we are storing to a register and EXPR occupies
|
the whole of register LOC; in that case, the whole of EXPR is
|
the whole of register LOC; in that case, the whole of EXPR is
|
being changed. We exclude complex modes from the second case
|
being changed. We exclude complex modes from the second case
|
because the real and imaginary parts are represented as separate
|
because the real and imaginary parts are represented as separate
|
pseudo registers, even if the whole complex value fits into one
|
pseudo registers, even if the whole complex value fits into one
|
hard register. */
|
hard register. */
|
if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
|
if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
|
|| (store_reg_p
|
|| (store_reg_p
|
&& !COMPLEX_MODE_P (DECL_MODE (expr))
|
&& !COMPLEX_MODE_P (DECL_MODE (expr))
|
&& hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
|
&& hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
|
&& offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
|
&& offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
|
{
|
{
|
mode = DECL_MODE (expr);
|
mode = DECL_MODE (expr);
|
offset = 0;
|
offset = 0;
|
}
|
}
|
|
|
if (offset < 0 || offset >= MAX_VAR_PARTS)
|
if (offset < 0 || offset >= MAX_VAR_PARTS)
|
return false;
|
return false;
|
|
|
if (mode_out)
|
if (mode_out)
|
*mode_out = mode;
|
*mode_out = mode;
|
if (offset_out)
|
if (offset_out)
|
*offset_out = offset;
|
*offset_out = offset;
|
return true;
|
return true;
|
}
|
}
|
|
|
/* Return the MODE lowpart of LOC, or null if LOC is not something we
|
/* Return the MODE lowpart of LOC, or null if LOC is not something we
|
want to track. When returning nonnull, make sure that the attributes
|
want to track. When returning nonnull, make sure that the attributes
|
on the returned value are updated. */
|
on the returned value are updated. */
|
|
|
static rtx
|
static rtx
|
var_lowpart (enum machine_mode mode, rtx loc)
|
var_lowpart (enum machine_mode mode, rtx loc)
|
{
|
{
|
unsigned int offset, reg_offset, regno;
|
unsigned int offset, reg_offset, regno;
|
|
|
if (!REG_P (loc) && !MEM_P (loc))
|
if (!REG_P (loc) && !MEM_P (loc))
|
return NULL;
|
return NULL;
|
|
|
if (GET_MODE (loc) == mode)
|
if (GET_MODE (loc) == mode)
|
return loc;
|
return loc;
|
|
|
offset = byte_lowpart_offset (mode, GET_MODE (loc));
|
offset = byte_lowpart_offset (mode, GET_MODE (loc));
|
|
|
if (MEM_P (loc))
|
if (MEM_P (loc))
|
return adjust_address_nv (loc, mode, offset);
|
return adjust_address_nv (loc, mode, offset);
|
|
|
reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
|
reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
|
regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
|
regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
|
reg_offset, mode);
|
reg_offset, mode);
|
return gen_rtx_REG_offset (loc, mode, regno, offset);
|
return gen_rtx_REG_offset (loc, mode, regno, offset);
|
}
|
}
|
|
|
/* Carry information about uses and stores while walking rtx. */
|
/* Carry information about uses and stores while walking rtx. */
|
|
|
struct count_use_info
|
struct count_use_info
|
{
|
{
|
/* The insn where the RTX is. */
|
/* The insn where the RTX is. */
|
rtx insn;
|
rtx insn;
|
|
|
/* The basic block where insn is. */
|
/* The basic block where insn is. */
|
basic_block bb;
|
basic_block bb;
|
|
|
/* The array of n_sets sets in the insn, as determined by cselib. */
|
/* The array of n_sets sets in the insn, as determined by cselib. */
|
struct cselib_set *sets;
|
struct cselib_set *sets;
|
int n_sets;
|
int n_sets;
|
|
|
/* True if we're counting stores, false otherwise. */
|
/* True if we're counting stores, false otherwise. */
|
bool store_p;
|
bool store_p;
|
};
|
};
|
|
|
/* Find a VALUE corresponding to X. */
|
/* Find a VALUE corresponding to X. */
|
|
|
static inline cselib_val *
|
static inline cselib_val *
|
find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
|
find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
|
{
|
{
|
int i;
|
int i;
|
|
|
if (cui->sets)
|
if (cui->sets)
|
{
|
{
|
/* This is called after uses are set up and before stores are
|
/* This is called after uses are set up and before stores are
|
processed by cselib, so it's safe to look up srcs, but not
|
processed by cselib, so it's safe to look up srcs, but not
|
dsts. So we look up expressions that appear in srcs or in
|
dsts. So we look up expressions that appear in srcs or in
|
dest expressions, but we search the sets array for dests of
|
dest expressions, but we search the sets array for dests of
|
stores. */
|
stores. */
|
if (cui->store_p)
|
if (cui->store_p)
|
{
|
{
|
/* Some targets represent memset and memcpy patterns
|
/* Some targets represent memset and memcpy patterns
|
by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
|
by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
|
(set (mem:BLK ...) (const_int ...)) or
|
(set (mem:BLK ...) (const_int ...)) or
|
(set (mem:BLK ...) (mem:BLK ...)). Don't return anything
|
(set (mem:BLK ...) (mem:BLK ...)). Don't return anything
|
in that case, otherwise we end up with mode mismatches. */
|
in that case, otherwise we end up with mode mismatches. */
|
if (mode == BLKmode && MEM_P (x))
|
if (mode == BLKmode && MEM_P (x))
|
return NULL;
|
return NULL;
|
for (i = 0; i < cui->n_sets; i++)
|
for (i = 0; i < cui->n_sets; i++)
|
if (cui->sets[i].dest == x)
|
if (cui->sets[i].dest == x)
|
return cui->sets[i].src_elt;
|
return cui->sets[i].src_elt;
|
}
|
}
|
else
|
else
|
return cselib_lookup (x, mode, 0, VOIDmode);
|
return cselib_lookup (x, mode, 0, VOIDmode);
|
}
|
}
|
|
|
return NULL;
|
return NULL;
|
}
|
}
|
|
|
/* Helper function to get mode of MEM's address. */
|
/* Helper function to get mode of MEM's address. */
|
|
|
static inline enum machine_mode
|
static inline enum machine_mode
|
get_address_mode (rtx mem)
|
get_address_mode (rtx mem)
|
{
|
{
|
enum machine_mode mode = GET_MODE (XEXP (mem, 0));
|
enum machine_mode mode = GET_MODE (XEXP (mem, 0));
|
if (mode != VOIDmode)
|
if (mode != VOIDmode)
|
return mode;
|
return mode;
|
return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
|
return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
|
}
|
}
|
|
|
/* Replace all registers and addresses in an expression with VALUE
|
/* Replace all registers and addresses in an expression with VALUE
|
expressions that map back to them, unless the expression is a
|
expressions that map back to them, unless the expression is a
|
register. If no mapping is or can be performed, returns NULL. */
|
register. If no mapping is or can be performed, returns NULL. */
|
|
|
static rtx
|
static rtx
|
replace_expr_with_values (rtx loc)
|
replace_expr_with_values (rtx loc)
|
{
|
{
|
if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
|
if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
|
return NULL;
|
return NULL;
|
else if (MEM_P (loc))
|
else if (MEM_P (loc))
|
{
|
{
|
cselib_val *addr = cselib_lookup (XEXP (loc, 0),
|
cselib_val *addr = cselib_lookup (XEXP (loc, 0),
|
get_address_mode (loc), 0,
|
get_address_mode (loc), 0,
|
GET_MODE (loc));
|
GET_MODE (loc));
|
if (addr)
|
if (addr)
|
return replace_equiv_address_nv (loc, addr->val_rtx);
|
return replace_equiv_address_nv (loc, addr->val_rtx);
|
else
|
else
|
return NULL;
|
return NULL;
|
}
|
}
|
else
|
else
|
return cselib_subst_to_values (loc, VOIDmode);
|
return cselib_subst_to_values (loc, VOIDmode);
|
}
|
}
|
|
|
/* Return true if *X is a DEBUG_EXPR. Usable as an argument to
|
/* Return true if *X is a DEBUG_EXPR. Usable as an argument to
|
for_each_rtx to tell whether there are any DEBUG_EXPRs within
|
for_each_rtx to tell whether there are any DEBUG_EXPRs within
|
RTX. */
|
RTX. */
|
|
|
static int
|
static int
|
rtx_debug_expr_p (rtx *x, void *data ATTRIBUTE_UNUSED)
|
rtx_debug_expr_p (rtx *x, void *data ATTRIBUTE_UNUSED)
|
{
|
{
|
rtx loc = *x;
|
rtx loc = *x;
|
|
|
return GET_CODE (loc) == DEBUG_EXPR;
|
return GET_CODE (loc) == DEBUG_EXPR;
|
}
|
}
|
|
|
/* Determine what kind of micro operation to choose for a USE. Return
|
/* Determine what kind of micro operation to choose for a USE. Return
|
MO_CLOBBER if no micro operation is to be generated. */
|
MO_CLOBBER if no micro operation is to be generated. */
|
|
|
static enum micro_operation_type
|
static enum micro_operation_type
|
use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
|
use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
|
{
|
{
|
tree expr;
|
tree expr;
|
|
|
if (cui && cui->sets)
|
if (cui && cui->sets)
|
{
|
{
|
if (GET_CODE (loc) == VAR_LOCATION)
|
if (GET_CODE (loc) == VAR_LOCATION)
|
{
|
{
|
if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
|
if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
|
{
|
{
|
rtx ploc = PAT_VAR_LOCATION_LOC (loc);
|
rtx ploc = PAT_VAR_LOCATION_LOC (loc);
|
if (! VAR_LOC_UNKNOWN_P (ploc))
|
if (! VAR_LOC_UNKNOWN_P (ploc))
|
{
|
{
|
cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
|
cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
|
VOIDmode);
|
VOIDmode);
|
|
|
/* ??? flag_float_store and volatile mems are never
|
/* ??? flag_float_store and volatile mems are never
|
given values, but we could in theory use them for
|
given values, but we could in theory use them for
|
locations. */
|
locations. */
|
gcc_assert (val || 1);
|
gcc_assert (val || 1);
|
}
|
}
|
return MO_VAL_LOC;
|
return MO_VAL_LOC;
|
}
|
}
|
else
|
else
|
return MO_CLOBBER;
|
return MO_CLOBBER;
|
}
|
}
|
|
|
if (REG_P (loc) || MEM_P (loc))
|
if (REG_P (loc) || MEM_P (loc))
|
{
|
{
|
if (modep)
|
if (modep)
|
*modep = GET_MODE (loc);
|
*modep = GET_MODE (loc);
|
if (cui->store_p)
|
if (cui->store_p)
|
{
|
{
|
if (REG_P (loc)
|
if (REG_P (loc)
|
|| (find_use_val (loc, GET_MODE (loc), cui)
|
|| (find_use_val (loc, GET_MODE (loc), cui)
|
&& cselib_lookup (XEXP (loc, 0),
|
&& cselib_lookup (XEXP (loc, 0),
|
get_address_mode (loc), 0,
|
get_address_mode (loc), 0,
|
GET_MODE (loc))))
|
GET_MODE (loc))))
|
return MO_VAL_SET;
|
return MO_VAL_SET;
|
}
|
}
|
else
|
else
|
{
|
{
|
cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
|
cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
|
|
|
if (val && !cselib_preserved_value_p (val))
|
if (val && !cselib_preserved_value_p (val))
|
return MO_VAL_USE;
|
return MO_VAL_USE;
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
if (REG_P (loc))
|
if (REG_P (loc))
|
{
|
{
|
gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
|
gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
|
|
|
if (loc == cfa_base_rtx)
|
if (loc == cfa_base_rtx)
|
return MO_CLOBBER;
|
return MO_CLOBBER;
|
expr = REG_EXPR (loc);
|
expr = REG_EXPR (loc);
|
|
|
if (!expr)
|
if (!expr)
|
return MO_USE_NO_VAR;
|
return MO_USE_NO_VAR;
|
else if (target_for_debug_bind (var_debug_decl (expr)))
|
else if (target_for_debug_bind (var_debug_decl (expr)))
|
return MO_CLOBBER;
|
return MO_CLOBBER;
|
else if (track_loc_p (loc, expr, REG_OFFSET (loc),
|
else if (track_loc_p (loc, expr, REG_OFFSET (loc),
|
false, modep, NULL))
|
false, modep, NULL))
|
return MO_USE;
|
return MO_USE;
|
else
|
else
|
return MO_USE_NO_VAR;
|
return MO_USE_NO_VAR;
|
}
|
}
|
else if (MEM_P (loc))
|
else if (MEM_P (loc))
|
{
|
{
|
expr = MEM_EXPR (loc);
|
expr = MEM_EXPR (loc);
|
|
|
if (!expr)
|
if (!expr)
|
return MO_CLOBBER;
|
return MO_CLOBBER;
|
else if (target_for_debug_bind (var_debug_decl (expr)))
|
else if (target_for_debug_bind (var_debug_decl (expr)))
|
return MO_CLOBBER;
|
return MO_CLOBBER;
|
else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
|
else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
|
false, modep, NULL)
|
false, modep, NULL)
|
/* Multi-part variables shouldn't refer to one-part
|
/* Multi-part variables shouldn't refer to one-part
|
variable names such as VALUEs (never happens) or
|
variable names such as VALUEs (never happens) or
|
DEBUG_EXPRs (only happens in the presence of debug
|
DEBUG_EXPRs (only happens in the presence of debug
|
insns). */
|
insns). */
|
&& (!MAY_HAVE_DEBUG_INSNS
|
&& (!MAY_HAVE_DEBUG_INSNS
|
|| !for_each_rtx (&XEXP (loc, 0), rtx_debug_expr_p, NULL)))
|
|| !for_each_rtx (&XEXP (loc, 0), rtx_debug_expr_p, NULL)))
|
return MO_USE;
|
return MO_USE;
|
else
|
else
|
return MO_CLOBBER;
|
return MO_CLOBBER;
|
}
|
}
|
|
|
return MO_CLOBBER;
|
return MO_CLOBBER;
|
}
|
}
|
|
|
/* Log to OUT information about micro-operation MOPT involving X in
|
/* Log to OUT information about micro-operation MOPT involving X in
|
INSN of BB. */
|
INSN of BB. */
|
|
|
static inline void
|
static inline void
|
log_op_type (rtx x, basic_block bb, rtx insn,
|
log_op_type (rtx x, basic_block bb, rtx insn,
|
enum micro_operation_type mopt, FILE *out)
|
enum micro_operation_type mopt, FILE *out)
|
{
|
{
|
fprintf (out, "bb %i op %i insn %i %s ",
|
fprintf (out, "bb %i op %i insn %i %s ",
|
bb->index, VEC_length (micro_operation, VTI (bb)->mos),
|
bb->index, VEC_length (micro_operation, VTI (bb)->mos),
|
INSN_UID (insn), micro_operation_type_name[mopt]);
|
INSN_UID (insn), micro_operation_type_name[mopt]);
|
print_inline_rtx (out, x, 2);
|
print_inline_rtx (out, x, 2);
|
fputc ('\n', out);
|
fputc ('\n', out);
|
}
|
}
|
|
|
/* Tell whether the CONCAT used to holds a VALUE and its location
|
/* Tell whether the CONCAT used to holds a VALUE and its location
|
needs value resolution, i.e., an attempt of mapping the location
|
needs value resolution, i.e., an attempt of mapping the location
|
back to other incoming values. */
|
back to other incoming values. */
|
#define VAL_NEEDS_RESOLUTION(x) \
|
#define VAL_NEEDS_RESOLUTION(x) \
|
(RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
|
(RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
|
/* Whether the location in the CONCAT is a tracked expression, that
|
/* Whether the location in the CONCAT is a tracked expression, that
|
should also be handled like a MO_USE. */
|
should also be handled like a MO_USE. */
|
#define VAL_HOLDS_TRACK_EXPR(x) \
|
#define VAL_HOLDS_TRACK_EXPR(x) \
|
(RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
|
(RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
|
/* Whether the location in the CONCAT should be handled like a MO_COPY
|
/* Whether the location in the CONCAT should be handled like a MO_COPY
|
as well. */
|
as well. */
|
#define VAL_EXPR_IS_COPIED(x) \
|
#define VAL_EXPR_IS_COPIED(x) \
|
(RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
|
(RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
|
/* Whether the location in the CONCAT should be handled like a
|
/* Whether the location in the CONCAT should be handled like a
|
MO_CLOBBER as well. */
|
MO_CLOBBER as well. */
|
#define VAL_EXPR_IS_CLOBBERED(x) \
|
#define VAL_EXPR_IS_CLOBBERED(x) \
|
(RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
|
(RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
|
|
|
/* All preserved VALUEs. */
|
/* All preserved VALUEs. */
|
static VEC (rtx, heap) *preserved_values;
|
static VEC (rtx, heap) *preserved_values;
|
|
|
/* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
|
/* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
|
|
|
static void
|
static void
|
preserve_value (cselib_val *val)
|
preserve_value (cselib_val *val)
|
{
|
{
|
cselib_preserve_value (val);
|
cselib_preserve_value (val);
|
VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
|
VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
|
}
|
}
|
|
|
/* Helper function for MO_VAL_LOC handling. Return non-zero if
|
/* Helper function for MO_VAL_LOC handling. Return non-zero if
|
any rtxes not suitable for CONST use not replaced by VALUEs
|
any rtxes not suitable for CONST use not replaced by VALUEs
|
are discovered. */
|
are discovered. */
|
|
|
static int
|
static int
|
non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
|
non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
|
{
|
{
|
if (*x == NULL_RTX)
|
if (*x == NULL_RTX)
|
return 0;
|
return 0;
|
|
|
switch (GET_CODE (*x))
|
switch (GET_CODE (*x))
|
{
|
{
|
case REG:
|
case REG:
|
case DEBUG_EXPR:
|
case DEBUG_EXPR:
|
case PC:
|
case PC:
|
case SCRATCH:
|
case SCRATCH:
|
case CC0:
|
case CC0:
|
case ASM_INPUT:
|
case ASM_INPUT:
|
case ASM_OPERANDS:
|
case ASM_OPERANDS:
|
return 1;
|
return 1;
|
case MEM:
|
case MEM:
|
return !MEM_READONLY_P (*x);
|
return !MEM_READONLY_P (*x);
|
default:
|
default:
|
return 0;
|
return 0;
|
}
|
}
|
}
|
}
|
|
|
/* Add uses (register and memory references) LOC which will be tracked
|
/* Add uses (register and memory references) LOC which will be tracked
|
to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
|
to VTI (bb)->mos. INSN is instruction which the LOC is part of. */
|
|
|
static int
|
static int
|
add_uses (rtx *ploc, void *data)
|
add_uses (rtx *ploc, void *data)
|
{
|
{
|
rtx loc = *ploc;
|
rtx loc = *ploc;
|
enum machine_mode mode = VOIDmode;
|
enum machine_mode mode = VOIDmode;
|
struct count_use_info *cui = (struct count_use_info *)data;
|
struct count_use_info *cui = (struct count_use_info *)data;
|
enum micro_operation_type type = use_type (loc, cui, &mode);
|
enum micro_operation_type type = use_type (loc, cui, &mode);
|
|
|
if (type != MO_CLOBBER)
|
if (type != MO_CLOBBER)
|
{
|
{
|
basic_block bb = cui->bb;
|
basic_block bb = cui->bb;
|
micro_operation mo;
|
micro_operation mo;
|
|
|
mo.type = type;
|
mo.type = type;
|
mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
|
mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
|
mo.insn = cui->insn;
|
mo.insn = cui->insn;
|
|
|
if (type == MO_VAL_LOC)
|
if (type == MO_VAL_LOC)
|
{
|
{
|
rtx oloc = loc;
|
rtx oloc = loc;
|
rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
|
rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
|
cselib_val *val;
|
cselib_val *val;
|
|
|
gcc_assert (cui->sets);
|
gcc_assert (cui->sets);
|
|
|
if (MEM_P (vloc)
|
if (MEM_P (vloc)
|
&& !REG_P (XEXP (vloc, 0))
|
&& !REG_P (XEXP (vloc, 0))
|
&& !MEM_P (XEXP (vloc, 0)))
|
&& !MEM_P (XEXP (vloc, 0)))
|
{
|
{
|
rtx mloc = vloc;
|
rtx mloc = vloc;
|
enum machine_mode address_mode = get_address_mode (mloc);
|
enum machine_mode address_mode = get_address_mode (mloc);
|
cselib_val *val
|
cselib_val *val
|
= cselib_lookup (XEXP (mloc, 0), address_mode, 0,
|
= cselib_lookup (XEXP (mloc, 0), address_mode, 0,
|
GET_MODE (mloc));
|
GET_MODE (mloc));
|
|
|
if (val && !cselib_preserved_value_p (val))
|
if (val && !cselib_preserved_value_p (val))
|
preserve_value (val);
|
preserve_value (val);
|
}
|
}
|
|
|
if (CONSTANT_P (vloc)
|
if (CONSTANT_P (vloc)
|
&& (GET_CODE (vloc) != CONST
|
&& (GET_CODE (vloc) != CONST
|
|| for_each_rtx (&vloc, non_suitable_const, NULL)))
|
|| for_each_rtx (&vloc, non_suitable_const, NULL)))
|
/* For constants don't look up any value. */;
|
/* For constants don't look up any value. */;
|
else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
|
else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
|
&& (val = find_use_val (vloc, GET_MODE (oloc), cui)))
|
&& (val = find_use_val (vloc, GET_MODE (oloc), cui)))
|
{
|
{
|
enum machine_mode mode2;
|
enum machine_mode mode2;
|
enum micro_operation_type type2;
|
enum micro_operation_type type2;
|
rtx nloc = NULL;
|
rtx nloc = NULL;
|
bool resolvable = REG_P (vloc) || MEM_P (vloc);
|
bool resolvable = REG_P (vloc) || MEM_P (vloc);
|
|
|
if (resolvable)
|
if (resolvable)
|
nloc = replace_expr_with_values (vloc);
|
nloc = replace_expr_with_values (vloc);
|
|
|
if (nloc)
|
if (nloc)
|
{
|
{
|
oloc = shallow_copy_rtx (oloc);
|
oloc = shallow_copy_rtx (oloc);
|
PAT_VAR_LOCATION_LOC (oloc) = nloc;
|
PAT_VAR_LOCATION_LOC (oloc) = nloc;
|
}
|
}
|
|
|
oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
|
oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
|
|
|
type2 = use_type (vloc, 0, &mode2);
|
type2 = use_type (vloc, 0, &mode2);
|
|
|
gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
|
gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
|
|| type2 == MO_CLOBBER);
|
|| type2 == MO_CLOBBER);
|
|
|
if (type2 == MO_CLOBBER
|
if (type2 == MO_CLOBBER
|
&& !cselib_preserved_value_p (val))
|
&& !cselib_preserved_value_p (val))
|
{
|
{
|
VAL_NEEDS_RESOLUTION (oloc) = resolvable;
|
VAL_NEEDS_RESOLUTION (oloc) = resolvable;
|
preserve_value (val);
|
preserve_value (val);
|
}
|
}
|
}
|
}
|
else if (!VAR_LOC_UNKNOWN_P (vloc))
|
else if (!VAR_LOC_UNKNOWN_P (vloc))
|
{
|
{
|
oloc = shallow_copy_rtx (oloc);
|
oloc = shallow_copy_rtx (oloc);
|
PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
|
PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
|
}
|
}
|
|
|
mo.u.loc = oloc;
|
mo.u.loc = oloc;
|
}
|
}
|
else if (type == MO_VAL_USE)
|
else if (type == MO_VAL_USE)
|
{
|
{
|
enum machine_mode mode2 = VOIDmode;
|
enum machine_mode mode2 = VOIDmode;
|
enum micro_operation_type type2;
|
enum micro_operation_type type2;
|
cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
|
cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
|
rtx vloc, oloc = loc, nloc;
|
rtx vloc, oloc = loc, nloc;
|
|
|
gcc_assert (cui->sets);
|
gcc_assert (cui->sets);
|
|
|
if (MEM_P (oloc)
|
if (MEM_P (oloc)
|
&& !REG_P (XEXP (oloc, 0))
|
&& !REG_P (XEXP (oloc, 0))
|
&& !MEM_P (XEXP (oloc, 0)))
|
&& !MEM_P (XEXP (oloc, 0)))
|
{
|
{
|
rtx mloc = oloc;
|
rtx mloc = oloc;
|
enum machine_mode address_mode = get_address_mode (mloc);
|
enum machine_mode address_mode = get_address_mode (mloc);
|
cselib_val *val
|
cselib_val *val
|
= cselib_lookup (XEXP (mloc, 0), address_mode, 0,
|
= cselib_lookup (XEXP (mloc, 0), address_mode, 0,
|
GET_MODE (mloc));
|
GET_MODE (mloc));
|
|
|
if (val && !cselib_preserved_value_p (val))
|
if (val && !cselib_preserved_value_p (val))
|
preserve_value (val);
|
preserve_value (val);
|
}
|
}
|
|
|
type2 = use_type (loc, 0, &mode2);
|
type2 = use_type (loc, 0, &mode2);
|
|
|
gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
|
gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
|
|| type2 == MO_CLOBBER);
|
|| type2 == MO_CLOBBER);
|
|
|
if (type2 == MO_USE)
|
if (type2 == MO_USE)
|
vloc = var_lowpart (mode2, loc);
|
vloc = var_lowpart (mode2, loc);
|
else
|
else
|
vloc = oloc;
|
vloc = oloc;
|
|
|
/* The loc of a MO_VAL_USE may have two forms:
|
/* The loc of a MO_VAL_USE may have two forms:
|
|
|
(concat val src): val is at src, a value-based
|
(concat val src): val is at src, a value-based
|
representation.
|
representation.
|
|
|
(concat (concat val use) src): same as above, with use as
|
(concat (concat val use) src): same as above, with use as
|
the MO_USE tracked value, if it differs from src.
|
the MO_USE tracked value, if it differs from src.
|
|
|
*/
|
*/
|
|
|
gcc_checking_assert (REG_P (loc) || MEM_P (loc));
|
gcc_checking_assert (REG_P (loc) || MEM_P (loc));
|
nloc = replace_expr_with_values (loc);
|
nloc = replace_expr_with_values (loc);
|
if (!nloc)
|
if (!nloc)
|
nloc = oloc;
|
nloc = oloc;
|
|
|
if (vloc != nloc)
|
if (vloc != nloc)
|
oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
|
oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
|
else
|
else
|
oloc = val->val_rtx;
|
oloc = val->val_rtx;
|
|
|
mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
|
mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
|
|
|
if (type2 == MO_USE)
|
if (type2 == MO_USE)
|
VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
|
VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
|
if (!cselib_preserved_value_p (val))
|
if (!cselib_preserved_value_p (val))
|
{
|
{
|
VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
|
VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
|
preserve_value (val);
|
preserve_value (val);
|
}
|
}
|
}
|
}
|
else
|
else
|
gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
|
gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
|
|
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
|
log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
|
VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
|
VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
|
}
|
}
|
|
|
return 0;
|
return 0;
|
}
|
}
|
|
|
/* Helper function for finding all uses of REG/MEM in X in insn INSN. */
|
/* Helper function for finding all uses of REG/MEM in X in insn INSN. */
|
|
|
static void
|
static void
|
add_uses_1 (rtx *x, void *cui)
|
add_uses_1 (rtx *x, void *cui)
|
{
|
{
|
for_each_rtx (x, add_uses, cui);
|
for_each_rtx (x, add_uses, cui);
|
}
|
}
|
|
|
/* This is the value used during expansion of locations. We want it
|
/* This is the value used during expansion of locations. We want it
|
to be unbounded, so that variables expanded deep in a recursion
|
to be unbounded, so that variables expanded deep in a recursion
|
nest are fully evaluated, so that their values are cached
|
nest are fully evaluated, so that their values are cached
|
correctly. We avoid recursion cycles through other means, and we
|
correctly. We avoid recursion cycles through other means, and we
|
don't unshare RTL, so excess complexity is not a problem. */
|
don't unshare RTL, so excess complexity is not a problem. */
|
#define EXPR_DEPTH (INT_MAX)
|
#define EXPR_DEPTH (INT_MAX)
|
/* We use this to keep too-complex expressions from being emitted as
|
/* We use this to keep too-complex expressions from being emitted as
|
location notes, and then to debug information. Users can trade
|
location notes, and then to debug information. Users can trade
|
compile time for ridiculously complex expressions, although they're
|
compile time for ridiculously complex expressions, although they're
|
seldom useful, and they may often have to be discarded as not
|
seldom useful, and they may often have to be discarded as not
|
representable anyway. */
|
representable anyway. */
|
#define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
|
#define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
|
|
|
/* Attempt to reverse the EXPR operation in the debug info and record
|
/* Attempt to reverse the EXPR operation in the debug info and record
|
it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
|
it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
|
no longer live we can express its value as VAL - 6. */
|
no longer live we can express its value as VAL - 6. */
|
|
|
static void
|
static void
|
reverse_op (rtx val, const_rtx expr, rtx insn)
|
reverse_op (rtx val, const_rtx expr, rtx insn)
|
{
|
{
|
rtx src, arg, ret;
|
rtx src, arg, ret;
|
cselib_val *v;
|
cselib_val *v;
|
struct elt_loc_list *l;
|
struct elt_loc_list *l;
|
enum rtx_code code;
|
enum rtx_code code;
|
|
|
if (GET_CODE (expr) != SET)
|
if (GET_CODE (expr) != SET)
|
return;
|
return;
|
|
|
if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
|
if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
|
return;
|
return;
|
|
|
src = SET_SRC (expr);
|
src = SET_SRC (expr);
|
switch (GET_CODE (src))
|
switch (GET_CODE (src))
|
{
|
{
|
case PLUS:
|
case PLUS:
|
case MINUS:
|
case MINUS:
|
case XOR:
|
case XOR:
|
case NOT:
|
case NOT:
|
case NEG:
|
case NEG:
|
if (!REG_P (XEXP (src, 0)))
|
if (!REG_P (XEXP (src, 0)))
|
return;
|
return;
|
break;
|
break;
|
case SIGN_EXTEND:
|
case SIGN_EXTEND:
|
case ZERO_EXTEND:
|
case ZERO_EXTEND:
|
if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
|
if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
|
return;
|
return;
|
break;
|
break;
|
default:
|
default:
|
return;
|
return;
|
}
|
}
|
|
|
if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
|
if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
|
return;
|
return;
|
|
|
v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
|
v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
|
if (!v || !cselib_preserved_value_p (v))
|
if (!v || !cselib_preserved_value_p (v))
|
return;
|
return;
|
|
|
/* Use canonical V to avoid creating multiple redundant expressions
|
/* Use canonical V to avoid creating multiple redundant expressions
|
for different VALUES equivalent to V. */
|
for different VALUES equivalent to V. */
|
v = canonical_cselib_val (v);
|
v = canonical_cselib_val (v);
|
|
|
/* Adding a reverse op isn't useful if V already has an always valid
|
/* Adding a reverse op isn't useful if V already has an always valid
|
location. Ignore ENTRY_VALUE, while it is always constant, we should
|
location. Ignore ENTRY_VALUE, while it is always constant, we should
|
prefer non-ENTRY_VALUE locations whenever possible. */
|
prefer non-ENTRY_VALUE locations whenever possible. */
|
for (l = v->locs; l; l = l->next)
|
for (l = v->locs; l; l = l->next)
|
if (CONSTANT_P (l->loc)
|
if (CONSTANT_P (l->loc)
|
&& (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
|
&& (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
|
return;
|
return;
|
|
|
switch (GET_CODE (src))
|
switch (GET_CODE (src))
|
{
|
{
|
case NOT:
|
case NOT:
|
case NEG:
|
case NEG:
|
if (GET_MODE (v->val_rtx) != GET_MODE (val))
|
if (GET_MODE (v->val_rtx) != GET_MODE (val))
|
return;
|
return;
|
ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
|
ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
|
break;
|
break;
|
case SIGN_EXTEND:
|
case SIGN_EXTEND:
|
case ZERO_EXTEND:
|
case ZERO_EXTEND:
|
ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
|
ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
|
break;
|
break;
|
case XOR:
|
case XOR:
|
code = XOR;
|
code = XOR;
|
goto binary;
|
goto binary;
|
case PLUS:
|
case PLUS:
|
code = MINUS;
|
code = MINUS;
|
goto binary;
|
goto binary;
|
case MINUS:
|
case MINUS:
|
code = PLUS;
|
code = PLUS;
|
goto binary;
|
goto binary;
|
binary:
|
binary:
|
if (GET_MODE (v->val_rtx) != GET_MODE (val))
|
if (GET_MODE (v->val_rtx) != GET_MODE (val))
|
return;
|
return;
|
arg = XEXP (src, 1);
|
arg = XEXP (src, 1);
|
if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
|
if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
|
{
|
{
|
arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
|
arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
|
if (arg == NULL_RTX)
|
if (arg == NULL_RTX)
|
return;
|
return;
|
if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
|
if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
|
return;
|
return;
|
}
|
}
|
ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
|
ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
|
if (ret == val)
|
if (ret == val)
|
/* Ensure ret isn't VALUE itself (which can happen e.g. for
|
/* Ensure ret isn't VALUE itself (which can happen e.g. for
|
(plus (reg1) (reg2)) when reg2 is known to be 0), as that
|
(plus (reg1) (reg2)) when reg2 is known to be 0), as that
|
breaks a lot of routines during var-tracking. */
|
breaks a lot of routines during var-tracking. */
|
ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
|
ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
|
break;
|
break;
|
default:
|
default:
|
gcc_unreachable ();
|
gcc_unreachable ();
|
}
|
}
|
|
|
cselib_add_permanent_equiv (v, ret, insn);
|
cselib_add_permanent_equiv (v, ret, insn);
|
}
|
}
|
|
|
/* Add stores (register and memory references) LOC which will be tracked
|
/* Add stores (register and memory references) LOC which will be tracked
|
to VTI (bb)->mos. EXPR is the RTL expression containing the store.
|
to VTI (bb)->mos. EXPR is the RTL expression containing the store.
|
CUIP->insn is instruction which the LOC is part of. */
|
CUIP->insn is instruction which the LOC is part of. */
|
|
|
static void
|
static void
|
add_stores (rtx loc, const_rtx expr, void *cuip)
|
add_stores (rtx loc, const_rtx expr, void *cuip)
|
{
|
{
|
enum machine_mode mode = VOIDmode, mode2;
|
enum machine_mode mode = VOIDmode, mode2;
|
struct count_use_info *cui = (struct count_use_info *)cuip;
|
struct count_use_info *cui = (struct count_use_info *)cuip;
|
basic_block bb = cui->bb;
|
basic_block bb = cui->bb;
|
micro_operation mo;
|
micro_operation mo;
|
rtx oloc = loc, nloc, src = NULL;
|
rtx oloc = loc, nloc, src = NULL;
|
enum micro_operation_type type = use_type (loc, cui, &mode);
|
enum micro_operation_type type = use_type (loc, cui, &mode);
|
bool track_p = false;
|
bool track_p = false;
|
cselib_val *v;
|
cselib_val *v;
|
bool resolve, preserve;
|
bool resolve, preserve;
|
|
|
if (type == MO_CLOBBER)
|
if (type == MO_CLOBBER)
|
return;
|
return;
|
|
|
mode2 = mode;
|
mode2 = mode;
|
|
|
if (REG_P (loc))
|
if (REG_P (loc))
|
{
|
{
|
gcc_assert (loc != cfa_base_rtx);
|
gcc_assert (loc != cfa_base_rtx);
|
if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
|
if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
|
|| !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
|
|| !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
|
|| GET_CODE (expr) == CLOBBER)
|
|| GET_CODE (expr) == CLOBBER)
|
{
|
{
|
mo.type = MO_CLOBBER;
|
mo.type = MO_CLOBBER;
|
mo.u.loc = loc;
|
mo.u.loc = loc;
|
if (GET_CODE (expr) == SET
|
if (GET_CODE (expr) == SET
|
&& SET_DEST (expr) == loc
|
&& SET_DEST (expr) == loc
|
&& !unsuitable_loc (SET_SRC (expr))
|
&& !unsuitable_loc (SET_SRC (expr))
|
&& find_use_val (loc, mode, cui))
|
&& find_use_val (loc, mode, cui))
|
{
|
{
|
gcc_checking_assert (type == MO_VAL_SET);
|
gcc_checking_assert (type == MO_VAL_SET);
|
mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
|
mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
|
}
|
}
|
}
|
}
|
else
|
else
|
{
|
{
|
if (GET_CODE (expr) == SET
|
if (GET_CODE (expr) == SET
|
&& SET_DEST (expr) == loc
|
&& SET_DEST (expr) == loc
|
&& GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
|
&& GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
|
src = var_lowpart (mode2, SET_SRC (expr));
|
src = var_lowpart (mode2, SET_SRC (expr));
|
loc = var_lowpart (mode2, loc);
|
loc = var_lowpart (mode2, loc);
|
|
|
if (src == NULL)
|
if (src == NULL)
|
{
|
{
|
mo.type = MO_SET;
|
mo.type = MO_SET;
|
mo.u.loc = loc;
|
mo.u.loc = loc;
|
}
|
}
|
else
|
else
|
{
|
{
|
rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
|
rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
|
if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
|
if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
|
mo.type = MO_COPY;
|
mo.type = MO_COPY;
|
else
|
else
|
mo.type = MO_SET;
|
mo.type = MO_SET;
|
mo.u.loc = xexpr;
|
mo.u.loc = xexpr;
|
}
|
}
|
}
|
}
|
mo.insn = cui->insn;
|
mo.insn = cui->insn;
|
}
|
}
|
else if (MEM_P (loc)
|
else if (MEM_P (loc)
|
&& ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
|
&& ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
|
|| cui->sets))
|
|| cui->sets))
|
{
|
{
|
if (MEM_P (loc) && type == MO_VAL_SET
|
if (MEM_P (loc) && type == MO_VAL_SET
|
&& !REG_P (XEXP (loc, 0))
|
&& !REG_P (XEXP (loc, 0))
|
&& !MEM_P (XEXP (loc, 0)))
|
&& !MEM_P (XEXP (loc, 0)))
|
{
|
{
|
rtx mloc = loc;
|
rtx mloc = loc;
|
enum machine_mode address_mode = get_address_mode (mloc);
|
enum machine_mode address_mode = get_address_mode (mloc);
|
cselib_val *val = cselib_lookup (XEXP (mloc, 0),
|
cselib_val *val = cselib_lookup (XEXP (mloc, 0),
|
address_mode, 0,
|
address_mode, 0,
|
GET_MODE (mloc));
|
GET_MODE (mloc));
|
|
|
if (val && !cselib_preserved_value_p (val))
|
if (val && !cselib_preserved_value_p (val))
|
preserve_value (val);
|
preserve_value (val);
|
}
|
}
|
|
|
if (GET_CODE (expr) == CLOBBER || !track_p)
|
if (GET_CODE (expr) == CLOBBER || !track_p)
|
{
|
{
|
mo.type = MO_CLOBBER;
|
mo.type = MO_CLOBBER;
|
mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
|
mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
|
}
|
}
|
else
|
else
|
{
|
{
|
if (GET_CODE (expr) == SET
|
if (GET_CODE (expr) == SET
|
&& SET_DEST (expr) == loc
|
&& SET_DEST (expr) == loc
|
&& GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
|
&& GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
|
src = var_lowpart (mode2, SET_SRC (expr));
|
src = var_lowpart (mode2, SET_SRC (expr));
|
loc = var_lowpart (mode2, loc);
|
loc = var_lowpart (mode2, loc);
|
|
|
if (src == NULL)
|
if (src == NULL)
|
{
|
{
|
mo.type = MO_SET;
|
mo.type = MO_SET;
|
mo.u.loc = loc;
|
mo.u.loc = loc;
|
}
|
}
|
else
|
else
|
{
|
{
|
rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
|
rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
|
if (same_variable_part_p (SET_SRC (xexpr),
|
if (same_variable_part_p (SET_SRC (xexpr),
|
MEM_EXPR (loc),
|
MEM_EXPR (loc),
|
INT_MEM_OFFSET (loc)))
|
INT_MEM_OFFSET (loc)))
|
mo.type = MO_COPY;
|
mo.type = MO_COPY;
|
else
|
else
|
mo.type = MO_SET;
|
mo.type = MO_SET;
|
mo.u.loc = xexpr;
|
mo.u.loc = xexpr;
|
}
|
}
|
}
|
}
|
mo.insn = cui->insn;
|
mo.insn = cui->insn;
|
}
|
}
|
else
|
else
|
return;
|
return;
|
|
|
if (type != MO_VAL_SET)
|
if (type != MO_VAL_SET)
|
goto log_and_return;
|
goto log_and_return;
|
|
|
v = find_use_val (oloc, mode, cui);
|
v = find_use_val (oloc, mode, cui);
|
|
|
if (!v)
|
if (!v)
|
goto log_and_return;
|
goto log_and_return;
|
|
|
resolve = preserve = !cselib_preserved_value_p (v);
|
resolve = preserve = !cselib_preserved_value_p (v);
|
|
|
nloc = replace_expr_with_values (oloc);
|
nloc = replace_expr_with_values (oloc);
|
if (nloc)
|
if (nloc)
|
oloc = nloc;
|
oloc = nloc;
|
|
|
if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
|
if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
|
{
|
{
|
cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
|
cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
|
|
|
gcc_assert (oval != v);
|
gcc_assert (oval != v);
|
gcc_assert (REG_P (oloc) || MEM_P (oloc));
|
gcc_assert (REG_P (oloc) || MEM_P (oloc));
|
|
|
if (oval && !cselib_preserved_value_p (oval))
|
if (oval && !cselib_preserved_value_p (oval))
|
{
|
{
|
micro_operation moa;
|
micro_operation moa;
|
|
|
preserve_value (oval);
|
preserve_value (oval);
|
|
|
moa.type = MO_VAL_USE;
|
moa.type = MO_VAL_USE;
|
moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
|
moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
|
VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
|
VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
|
moa.insn = cui->insn;
|
moa.insn = cui->insn;
|
|
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
log_op_type (moa.u.loc, cui->bb, cui->insn,
|
log_op_type (moa.u.loc, cui->bb, cui->insn,
|
moa.type, dump_file);
|
moa.type, dump_file);
|
VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
|
VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
|
}
|
}
|
|
|
resolve = false;
|
resolve = false;
|
}
|
}
|
else if (resolve && GET_CODE (mo.u.loc) == SET)
|
else if (resolve && GET_CODE (mo.u.loc) == SET)
|
{
|
{
|
if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
|
if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
|
nloc = replace_expr_with_values (SET_SRC (expr));
|
nloc = replace_expr_with_values (SET_SRC (expr));
|
else
|
else
|
nloc = NULL_RTX;
|
nloc = NULL_RTX;
|
|
|
/* Avoid the mode mismatch between oexpr and expr. */
|
/* Avoid the mode mismatch between oexpr and expr. */
|
if (!nloc && mode != mode2)
|
if (!nloc && mode != mode2)
|
{
|
{
|
nloc = SET_SRC (expr);
|
nloc = SET_SRC (expr);
|
gcc_assert (oloc == SET_DEST (expr));
|
gcc_assert (oloc == SET_DEST (expr));
|
}
|
}
|
|
|
if (nloc && nloc != SET_SRC (mo.u.loc))
|
if (nloc && nloc != SET_SRC (mo.u.loc))
|
oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
|
oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
|
else
|
else
|
{
|
{
|
if (oloc == SET_DEST (mo.u.loc))
|
if (oloc == SET_DEST (mo.u.loc))
|
/* No point in duplicating. */
|
/* No point in duplicating. */
|
oloc = mo.u.loc;
|
oloc = mo.u.loc;
|
if (!REG_P (SET_SRC (mo.u.loc)))
|
if (!REG_P (SET_SRC (mo.u.loc)))
|
resolve = false;
|
resolve = false;
|
}
|
}
|
}
|
}
|
else if (!resolve)
|
else if (!resolve)
|
{
|
{
|
if (GET_CODE (mo.u.loc) == SET
|
if (GET_CODE (mo.u.loc) == SET
|
&& oloc == SET_DEST (mo.u.loc))
|
&& oloc == SET_DEST (mo.u.loc))
|
/* No point in duplicating. */
|
/* No point in duplicating. */
|
oloc = mo.u.loc;
|
oloc = mo.u.loc;
|
}
|
}
|
else
|
else
|
resolve = false;
|
resolve = false;
|
|
|
loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
|
loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
|
|
|
if (mo.u.loc != oloc)
|
if (mo.u.loc != oloc)
|
loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
|
loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
|
|
|
/* The loc of a MO_VAL_SET may have various forms:
|
/* The loc of a MO_VAL_SET may have various forms:
|
|
|
(concat val dst): dst now holds val
|
(concat val dst): dst now holds val
|
|
|
(concat val (set dst src)): dst now holds val, copied from src
|
(concat val (set dst src)): dst now holds val, copied from src
|
|
|
(concat (concat val dstv) dst): dst now holds val; dstv is dst
|
(concat (concat val dstv) dst): dst now holds val; dstv is dst
|
after replacing mems and non-top-level regs with values.
|
after replacing mems and non-top-level regs with values.
|
|
|
(concat (concat val dstv) (set dst src)): dst now holds val,
|
(concat (concat val dstv) (set dst src)): dst now holds val,
|
copied from src. dstv is a value-based representation of dst, if
|
copied from src. dstv is a value-based representation of dst, if
|
it differs from dst. If resolution is needed, src is a REG, and
|
it differs from dst. If resolution is needed, src is a REG, and
|
its mode is the same as that of val.
|
its mode is the same as that of val.
|
|
|
(concat (concat val (set dstv srcv)) (set dst src)): src
|
(concat (concat val (set dstv srcv)) (set dst src)): src
|
copied to dst, holding val. dstv and srcv are value-based
|
copied to dst, holding val. dstv and srcv are value-based
|
representations of dst and src, respectively.
|
representations of dst and src, respectively.
|
|
|
*/
|
*/
|
|
|
if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
|
if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
|
reverse_op (v->val_rtx, expr, cui->insn);
|
reverse_op (v->val_rtx, expr, cui->insn);
|
|
|
mo.u.loc = loc;
|
mo.u.loc = loc;
|
|
|
if (track_p)
|
if (track_p)
|
VAL_HOLDS_TRACK_EXPR (loc) = 1;
|
VAL_HOLDS_TRACK_EXPR (loc) = 1;
|
if (preserve)
|
if (preserve)
|
{
|
{
|
VAL_NEEDS_RESOLUTION (loc) = resolve;
|
VAL_NEEDS_RESOLUTION (loc) = resolve;
|
preserve_value (v);
|
preserve_value (v);
|
}
|
}
|
if (mo.type == MO_CLOBBER)
|
if (mo.type == MO_CLOBBER)
|
VAL_EXPR_IS_CLOBBERED (loc) = 1;
|
VAL_EXPR_IS_CLOBBERED (loc) = 1;
|
if (mo.type == MO_COPY)
|
if (mo.type == MO_COPY)
|
VAL_EXPR_IS_COPIED (loc) = 1;
|
VAL_EXPR_IS_COPIED (loc) = 1;
|
|
|
mo.type = MO_VAL_SET;
|
mo.type = MO_VAL_SET;
|
|
|
log_and_return:
|
log_and_return:
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
|
log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
|
VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
|
VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
|
}
|
}
|
|
|
/* Arguments to the call. */
|
/* Arguments to the call. */
|
static rtx call_arguments;
|
static rtx call_arguments;
|
|
|
/* Compute call_arguments. */
|
/* Compute call_arguments. */
|
|
|
static void
|
static void
|
prepare_call_arguments (basic_block bb, rtx insn)
|
prepare_call_arguments (basic_block bb, rtx insn)
|
{
|
{
|
rtx link, x;
|
rtx link, x;
|
rtx prev, cur, next;
|
rtx prev, cur, next;
|
rtx call = PATTERN (insn);
|
rtx call = PATTERN (insn);
|
rtx this_arg = NULL_RTX;
|
rtx this_arg = NULL_RTX;
|
tree type = NULL_TREE, t, fndecl = NULL_TREE;
|
tree type = NULL_TREE, t, fndecl = NULL_TREE;
|
tree obj_type_ref = NULL_TREE;
|
tree obj_type_ref = NULL_TREE;
|
CUMULATIVE_ARGS args_so_far_v;
|
CUMULATIVE_ARGS args_so_far_v;
|
cumulative_args_t args_so_far;
|
cumulative_args_t args_so_far;
|
|
|
memset (&args_so_far_v, 0, sizeof (args_so_far_v));
|
memset (&args_so_far_v, 0, sizeof (args_so_far_v));
|
args_so_far = pack_cumulative_args (&args_so_far_v);
|
args_so_far = pack_cumulative_args (&args_so_far_v);
|
if (GET_CODE (call) == PARALLEL)
|
if (GET_CODE (call) == PARALLEL)
|
call = XVECEXP (call, 0, 0);
|
call = XVECEXP (call, 0, 0);
|
if (GET_CODE (call) == SET)
|
if (GET_CODE (call) == SET)
|
call = SET_SRC (call);
|
call = SET_SRC (call);
|
if (GET_CODE (call) == CALL && MEM_P (XEXP (call, 0)))
|
if (GET_CODE (call) == CALL && MEM_P (XEXP (call, 0)))
|
{
|
{
|
if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
|
if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
|
{
|
{
|
rtx symbol = XEXP (XEXP (call, 0), 0);
|
rtx symbol = XEXP (XEXP (call, 0), 0);
|
if (SYMBOL_REF_DECL (symbol))
|
if (SYMBOL_REF_DECL (symbol))
|
fndecl = SYMBOL_REF_DECL (symbol);
|
fndecl = SYMBOL_REF_DECL (symbol);
|
}
|
}
|
if (fndecl == NULL_TREE)
|
if (fndecl == NULL_TREE)
|
fndecl = MEM_EXPR (XEXP (call, 0));
|
fndecl = MEM_EXPR (XEXP (call, 0));
|
if (fndecl
|
if (fndecl
|
&& TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
|
&& TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
|
&& TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
|
&& TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
|
fndecl = NULL_TREE;
|
fndecl = NULL_TREE;
|
if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
|
if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
|
type = TREE_TYPE (fndecl);
|
type = TREE_TYPE (fndecl);
|
if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
|
if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
|
{
|
{
|
if (TREE_CODE (fndecl) == INDIRECT_REF
|
if (TREE_CODE (fndecl) == INDIRECT_REF
|
&& TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
|
&& TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
|
obj_type_ref = TREE_OPERAND (fndecl, 0);
|
obj_type_ref = TREE_OPERAND (fndecl, 0);
|
fndecl = NULL_TREE;
|
fndecl = NULL_TREE;
|
}
|
}
|
if (type)
|
if (type)
|
{
|
{
|
for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
|
for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
|
t = TREE_CHAIN (t))
|
t = TREE_CHAIN (t))
|
if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
|
if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
|
&& INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
|
&& INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
|
break;
|
break;
|
if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
|
if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
|
type = NULL;
|
type = NULL;
|
else
|
else
|
{
|
{
|
int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
|
int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
|
link = CALL_INSN_FUNCTION_USAGE (insn);
|
link = CALL_INSN_FUNCTION_USAGE (insn);
|
#ifndef PCC_STATIC_STRUCT_RETURN
|
#ifndef PCC_STATIC_STRUCT_RETURN
|
if (aggregate_value_p (TREE_TYPE (type), type)
|
if (aggregate_value_p (TREE_TYPE (type), type)
|
&& targetm.calls.struct_value_rtx (type, 0) == 0)
|
&& targetm.calls.struct_value_rtx (type, 0) == 0)
|
{
|
{
|
tree struct_addr = build_pointer_type (TREE_TYPE (type));
|
tree struct_addr = build_pointer_type (TREE_TYPE (type));
|
enum machine_mode mode = TYPE_MODE (struct_addr);
|
enum machine_mode mode = TYPE_MODE (struct_addr);
|
rtx reg;
|
rtx reg;
|
INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
|
INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
|
nargs + 1);
|
nargs + 1);
|
reg = targetm.calls.function_arg (args_so_far, mode,
|
reg = targetm.calls.function_arg (args_so_far, mode,
|
struct_addr, true);
|
struct_addr, true);
|
targetm.calls.function_arg_advance (args_so_far, mode,
|
targetm.calls.function_arg_advance (args_so_far, mode,
|
struct_addr, true);
|
struct_addr, true);
|
if (reg == NULL_RTX)
|
if (reg == NULL_RTX)
|
{
|
{
|
for (; link; link = XEXP (link, 1))
|
for (; link; link = XEXP (link, 1))
|
if (GET_CODE (XEXP (link, 0)) == USE
|
if (GET_CODE (XEXP (link, 0)) == USE
|
&& MEM_P (XEXP (XEXP (link, 0), 0)))
|
&& MEM_P (XEXP (XEXP (link, 0), 0)))
|
{
|
{
|
link = XEXP (link, 1);
|
link = XEXP (link, 1);
|
break;
|
break;
|
}
|
}
|
}
|
}
|
}
|
}
|
else
|
else
|
#endif
|
#endif
|
INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
|
INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
|
nargs);
|
nargs);
|
if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
|
if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
|
{
|
{
|
enum machine_mode mode;
|
enum machine_mode mode;
|
t = TYPE_ARG_TYPES (type);
|
t = TYPE_ARG_TYPES (type);
|
mode = TYPE_MODE (TREE_VALUE (t));
|
mode = TYPE_MODE (TREE_VALUE (t));
|
this_arg = targetm.calls.function_arg (args_so_far, mode,
|
this_arg = targetm.calls.function_arg (args_so_far, mode,
|
TREE_VALUE (t), true);
|
TREE_VALUE (t), true);
|
if (this_arg && !REG_P (this_arg))
|
if (this_arg && !REG_P (this_arg))
|
this_arg = NULL_RTX;
|
this_arg = NULL_RTX;
|
else if (this_arg == NULL_RTX)
|
else if (this_arg == NULL_RTX)
|
{
|
{
|
for (; link; link = XEXP (link, 1))
|
for (; link; link = XEXP (link, 1))
|
if (GET_CODE (XEXP (link, 0)) == USE
|
if (GET_CODE (XEXP (link, 0)) == USE
|
&& MEM_P (XEXP (XEXP (link, 0), 0)))
|
&& MEM_P (XEXP (XEXP (link, 0), 0)))
|
{
|
{
|
this_arg = XEXP (XEXP (link, 0), 0);
|
this_arg = XEXP (XEXP (link, 0), 0);
|
break;
|
break;
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
|
t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
|
|
|
for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
|
for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
|
if (GET_CODE (XEXP (link, 0)) == USE)
|
if (GET_CODE (XEXP (link, 0)) == USE)
|
{
|
{
|
rtx item = NULL_RTX;
|
rtx item = NULL_RTX;
|
x = XEXP (XEXP (link, 0), 0);
|
x = XEXP (XEXP (link, 0), 0);
|
if (GET_MODE (link) == VOIDmode
|
if (GET_MODE (link) == VOIDmode
|
|| GET_MODE (link) == BLKmode
|
|| GET_MODE (link) == BLKmode
|
|| (GET_MODE (link) != GET_MODE (x)
|
|| (GET_MODE (link) != GET_MODE (x)
|
&& (GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
|
&& (GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
|
|| GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)))
|
|| GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)))
|
/* Can't do anything for these, if the original type mode
|
/* Can't do anything for these, if the original type mode
|
isn't known or can't be converted. */;
|
isn't known or can't be converted. */;
|
else if (REG_P (x))
|
else if (REG_P (x))
|
{
|
{
|
cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
|
cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
|
if (val && cselib_preserved_value_p (val))
|
if (val && cselib_preserved_value_p (val))
|
item = val->val_rtx;
|
item = val->val_rtx;
|
else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
|
else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
|
{
|
{
|
enum machine_mode mode = GET_MODE (x);
|
enum machine_mode mode = GET_MODE (x);
|
|
|
while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
|
while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
|
&& GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
|
&& GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
|
{
|
{
|
rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
|
rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
|
|
|
if (reg == NULL_RTX || !REG_P (reg))
|
if (reg == NULL_RTX || !REG_P (reg))
|
continue;
|
continue;
|
val = cselib_lookup (reg, mode, 0, VOIDmode);
|
val = cselib_lookup (reg, mode, 0, VOIDmode);
|
if (val && cselib_preserved_value_p (val))
|
if (val && cselib_preserved_value_p (val))
|
{
|
{
|
item = val->val_rtx;
|
item = val->val_rtx;
|
break;
|
break;
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
else if (MEM_P (x))
|
else if (MEM_P (x))
|
{
|
{
|
rtx mem = x;
|
rtx mem = x;
|
cselib_val *val;
|
cselib_val *val;
|
|
|
if (!frame_pointer_needed)
|
if (!frame_pointer_needed)
|
{
|
{
|
struct adjust_mem_data amd;
|
struct adjust_mem_data amd;
|
amd.mem_mode = VOIDmode;
|
amd.mem_mode = VOIDmode;
|
amd.stack_adjust = -VTI (bb)->out.stack_adjust;
|
amd.stack_adjust = -VTI (bb)->out.stack_adjust;
|
amd.side_effects = NULL_RTX;
|
amd.side_effects = NULL_RTX;
|
amd.store = true;
|
amd.store = true;
|
mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
|
mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
|
&amd);
|
&amd);
|
gcc_assert (amd.side_effects == NULL_RTX);
|
gcc_assert (amd.side_effects == NULL_RTX);
|
}
|
}
|
val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
|
val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
|
if (val && cselib_preserved_value_p (val))
|
if (val && cselib_preserved_value_p (val))
|
item = val->val_rtx;
|
item = val->val_rtx;
|
else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT)
|
else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT)
|
{
|
{
|
/* For non-integer stack argument see also if they weren't
|
/* For non-integer stack argument see also if they weren't
|
initialized by integers. */
|
initialized by integers. */
|
enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
|
enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
|
if (imode != GET_MODE (mem) && imode != BLKmode)
|
if (imode != GET_MODE (mem) && imode != BLKmode)
|
{
|
{
|
val = cselib_lookup (adjust_address_nv (mem, imode, 0),
|
val = cselib_lookup (adjust_address_nv (mem, imode, 0),
|
imode, 0, VOIDmode);
|
imode, 0, VOIDmode);
|
if (val && cselib_preserved_value_p (val))
|
if (val && cselib_preserved_value_p (val))
|
item = lowpart_subreg (GET_MODE (x), val->val_rtx,
|
item = lowpart_subreg (GET_MODE (x), val->val_rtx,
|
imode);
|
imode);
|
}
|
}
|
}
|
}
|
}
|
}
|
if (item)
|
if (item)
|
{
|
{
|
rtx x2 = x;
|
rtx x2 = x;
|
if (GET_MODE (item) != GET_MODE (link))
|
if (GET_MODE (item) != GET_MODE (link))
|
item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
|
item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
|
if (GET_MODE (x2) != GET_MODE (link))
|
if (GET_MODE (x2) != GET_MODE (link))
|
x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
|
x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
|
item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
|
item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
|
call_arguments
|
call_arguments
|
= gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
|
= gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
|
}
|
}
|
if (t && t != void_list_node)
|
if (t && t != void_list_node)
|
{
|
{
|
tree argtype = TREE_VALUE (t);
|
tree argtype = TREE_VALUE (t);
|
enum machine_mode mode = TYPE_MODE (argtype);
|
enum machine_mode mode = TYPE_MODE (argtype);
|
rtx reg;
|
rtx reg;
|
if (pass_by_reference (&args_so_far_v, mode, argtype, true))
|
if (pass_by_reference (&args_so_far_v, mode, argtype, true))
|
{
|
{
|
argtype = build_pointer_type (argtype);
|
argtype = build_pointer_type (argtype);
|
mode = TYPE_MODE (argtype);
|
mode = TYPE_MODE (argtype);
|
}
|
}
|
reg = targetm.calls.function_arg (args_so_far, mode,
|
reg = targetm.calls.function_arg (args_so_far, mode,
|
argtype, true);
|
argtype, true);
|
if (TREE_CODE (argtype) == REFERENCE_TYPE
|
if (TREE_CODE (argtype) == REFERENCE_TYPE
|
&& INTEGRAL_TYPE_P (TREE_TYPE (argtype))
|
&& INTEGRAL_TYPE_P (TREE_TYPE (argtype))
|
&& reg
|
&& reg
|
&& REG_P (reg)
|
&& REG_P (reg)
|
&& GET_MODE (reg) == mode
|
&& GET_MODE (reg) == mode
|
&& GET_MODE_CLASS (mode) == MODE_INT
|
&& GET_MODE_CLASS (mode) == MODE_INT
|
&& REG_P (x)
|
&& REG_P (x)
|
&& REGNO (x) == REGNO (reg)
|
&& REGNO (x) == REGNO (reg)
|
&& GET_MODE (x) == mode
|
&& GET_MODE (x) == mode
|
&& item)
|
&& item)
|
{
|
{
|
enum machine_mode indmode
|
enum machine_mode indmode
|
= TYPE_MODE (TREE_TYPE (argtype));
|
= TYPE_MODE (TREE_TYPE (argtype));
|
rtx mem = gen_rtx_MEM (indmode, x);
|
rtx mem = gen_rtx_MEM (indmode, x);
|
cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
|
cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
|
if (val && cselib_preserved_value_p (val))
|
if (val && cselib_preserved_value_p (val))
|
{
|
{
|
item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
|
item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
|
call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
|
call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
|
call_arguments);
|
call_arguments);
|
}
|
}
|
else
|
else
|
{
|
{
|
struct elt_loc_list *l;
|
struct elt_loc_list *l;
|
tree initial;
|
tree initial;
|
|
|
/* Try harder, when passing address of a constant
|
/* Try harder, when passing address of a constant
|
pool integer it can be easily read back. */
|
pool integer it can be easily read back. */
|
item = XEXP (item, 1);
|
item = XEXP (item, 1);
|
if (GET_CODE (item) == SUBREG)
|
if (GET_CODE (item) == SUBREG)
|
item = SUBREG_REG (item);
|
item = SUBREG_REG (item);
|
gcc_assert (GET_CODE (item) == VALUE);
|
gcc_assert (GET_CODE (item) == VALUE);
|
val = CSELIB_VAL_PTR (item);
|
val = CSELIB_VAL_PTR (item);
|
for (l = val->locs; l; l = l->next)
|
for (l = val->locs; l; l = l->next)
|
if (GET_CODE (l->loc) == SYMBOL_REF
|
if (GET_CODE (l->loc) == SYMBOL_REF
|
&& TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
|
&& TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
|
&& SYMBOL_REF_DECL (l->loc)
|
&& SYMBOL_REF_DECL (l->loc)
|
&& DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
|
&& DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
|
{
|
{
|
initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
|
initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
|
if (host_integerp (initial, 0))
|
if (host_integerp (initial, 0))
|
{
|
{
|
item = GEN_INT (tree_low_cst (initial, 0));
|
item = GEN_INT (tree_low_cst (initial, 0));
|
item = gen_rtx_CONCAT (indmode, mem, item);
|
item = gen_rtx_CONCAT (indmode, mem, item);
|
call_arguments
|
call_arguments
|
= gen_rtx_EXPR_LIST (VOIDmode, item,
|
= gen_rtx_EXPR_LIST (VOIDmode, item,
|
call_arguments);
|
call_arguments);
|
}
|
}
|
break;
|
break;
|
}
|
}
|
}
|
}
|
}
|
}
|
targetm.calls.function_arg_advance (args_so_far, mode,
|
targetm.calls.function_arg_advance (args_so_far, mode,
|
argtype, true);
|
argtype, true);
|
t = TREE_CHAIN (t);
|
t = TREE_CHAIN (t);
|
}
|
}
|
}
|
}
|
|
|
/* Add debug arguments. */
|
/* Add debug arguments. */
|
if (fndecl
|
if (fndecl
|
&& TREE_CODE (fndecl) == FUNCTION_DECL
|
&& TREE_CODE (fndecl) == FUNCTION_DECL
|
&& DECL_HAS_DEBUG_ARGS_P (fndecl))
|
&& DECL_HAS_DEBUG_ARGS_P (fndecl))
|
{
|
{
|
VEC(tree, gc) **debug_args = decl_debug_args_lookup (fndecl);
|
VEC(tree, gc) **debug_args = decl_debug_args_lookup (fndecl);
|
if (debug_args)
|
if (debug_args)
|
{
|
{
|
unsigned int ix;
|
unsigned int ix;
|
tree param;
|
tree param;
|
for (ix = 0; VEC_iterate (tree, *debug_args, ix, param); ix += 2)
|
for (ix = 0; VEC_iterate (tree, *debug_args, ix, param); ix += 2)
|
{
|
{
|
rtx item;
|
rtx item;
|
tree dtemp = VEC_index (tree, *debug_args, ix + 1);
|
tree dtemp = VEC_index (tree, *debug_args, ix + 1);
|
enum machine_mode mode = DECL_MODE (dtemp);
|
enum machine_mode mode = DECL_MODE (dtemp);
|
item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
|
item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
|
item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
|
item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
|
call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
|
call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
|
call_arguments);
|
call_arguments);
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
/* Reverse call_arguments chain. */
|
/* Reverse call_arguments chain. */
|
prev = NULL_RTX;
|
prev = NULL_RTX;
|
for (cur = call_arguments; cur; cur = next)
|
for (cur = call_arguments; cur; cur = next)
|
{
|
{
|
next = XEXP (cur, 1);
|
next = XEXP (cur, 1);
|
XEXP (cur, 1) = prev;
|
XEXP (cur, 1) = prev;
|
prev = cur;
|
prev = cur;
|
}
|
}
|
call_arguments = prev;
|
call_arguments = prev;
|
|
|
x = PATTERN (insn);
|
x = PATTERN (insn);
|
if (GET_CODE (x) == PARALLEL)
|
if (GET_CODE (x) == PARALLEL)
|
x = XVECEXP (x, 0, 0);
|
x = XVECEXP (x, 0, 0);
|
if (GET_CODE (x) == SET)
|
if (GET_CODE (x) == SET)
|
x = SET_SRC (x);
|
x = SET_SRC (x);
|
if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
|
if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
|
{
|
{
|
x = XEXP (XEXP (x, 0), 0);
|
x = XEXP (XEXP (x, 0), 0);
|
if (GET_CODE (x) == SYMBOL_REF)
|
if (GET_CODE (x) == SYMBOL_REF)
|
/* Don't record anything. */;
|
/* Don't record anything. */;
|
else if (CONSTANT_P (x))
|
else if (CONSTANT_P (x))
|
{
|
{
|
x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
|
x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
|
pc_rtx, x);
|
pc_rtx, x);
|
call_arguments
|
call_arguments
|
= gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
|
= gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
|
}
|
}
|
else
|
else
|
{
|
{
|
cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
|
cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
|
if (val && cselib_preserved_value_p (val))
|
if (val && cselib_preserved_value_p (val))
|
{
|
{
|
x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
|
x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
|
call_arguments
|
call_arguments
|
= gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
|
= gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
|
}
|
}
|
}
|
}
|
}
|
}
|
if (this_arg)
|
if (this_arg)
|
{
|
{
|
enum machine_mode mode
|
enum machine_mode mode
|
= TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
|
= TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
|
rtx clobbered = gen_rtx_MEM (mode, this_arg);
|
rtx clobbered = gen_rtx_MEM (mode, this_arg);
|
HOST_WIDE_INT token
|
HOST_WIDE_INT token
|
= tree_low_cst (OBJ_TYPE_REF_TOKEN (obj_type_ref), 0);
|
= tree_low_cst (OBJ_TYPE_REF_TOKEN (obj_type_ref), 0);
|
if (token)
|
if (token)
|
clobbered = plus_constant (clobbered, token * GET_MODE_SIZE (mode));
|
clobbered = plus_constant (clobbered, token * GET_MODE_SIZE (mode));
|
clobbered = gen_rtx_MEM (mode, clobbered);
|
clobbered = gen_rtx_MEM (mode, clobbered);
|
x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
|
x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
|
call_arguments
|
call_arguments
|
= gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
|
= gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
|
}
|
}
|
}
|
}
|
|
|
/* Callback for cselib_record_sets_hook, that records as micro
|
/* Callback for cselib_record_sets_hook, that records as micro
|
operations uses and stores in an insn after cselib_record_sets has
|
operations uses and stores in an insn after cselib_record_sets has
|
analyzed the sets in an insn, but before it modifies the stored
|
analyzed the sets in an insn, but before it modifies the stored
|
values in the internal tables, unless cselib_record_sets doesn't
|
values in the internal tables, unless cselib_record_sets doesn't
|
call it directly (perhaps because we're not doing cselib in the
|
call it directly (perhaps because we're not doing cselib in the
|
first place, in which case sets and n_sets will be 0). */
|
first place, in which case sets and n_sets will be 0). */
|
|
|
static void
|
static void
|
add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
|
add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
|
{
|
{
|
basic_block bb = BLOCK_FOR_INSN (insn);
|
basic_block bb = BLOCK_FOR_INSN (insn);
|
int n1, n2;
|
int n1, n2;
|
struct count_use_info cui;
|
struct count_use_info cui;
|
micro_operation *mos;
|
micro_operation *mos;
|
|
|
cselib_hook_called = true;
|
cselib_hook_called = true;
|
|
|
cui.insn = insn;
|
cui.insn = insn;
|
cui.bb = bb;
|
cui.bb = bb;
|
cui.sets = sets;
|
cui.sets = sets;
|
cui.n_sets = n_sets;
|
cui.n_sets = n_sets;
|
|
|
n1 = VEC_length (micro_operation, VTI (bb)->mos);
|
n1 = VEC_length (micro_operation, VTI (bb)->mos);
|
cui.store_p = false;
|
cui.store_p = false;
|
note_uses (&PATTERN (insn), add_uses_1, &cui);
|
note_uses (&PATTERN (insn), add_uses_1, &cui);
|
n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
|
n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
|
mos = VEC_address (micro_operation, VTI (bb)->mos);
|
mos = VEC_address (micro_operation, VTI (bb)->mos);
|
|
|
/* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
|
/* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
|
MO_VAL_LOC last. */
|
MO_VAL_LOC last. */
|
while (n1 < n2)
|
while (n1 < n2)
|
{
|
{
|
while (n1 < n2 && mos[n1].type == MO_USE)
|
while (n1 < n2 && mos[n1].type == MO_USE)
|
n1++;
|
n1++;
|
while (n1 < n2 && mos[n2].type != MO_USE)
|
while (n1 < n2 && mos[n2].type != MO_USE)
|
n2--;
|
n2--;
|
if (n1 < n2)
|
if (n1 < n2)
|
{
|
{
|
micro_operation sw;
|
micro_operation sw;
|
|
|
sw = mos[n1];
|
sw = mos[n1];
|
mos[n1] = mos[n2];
|
mos[n1] = mos[n2];
|
mos[n2] = sw;
|
mos[n2] = sw;
|
}
|
}
|
}
|
}
|
|
|
n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
|
n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
|
while (n1 < n2)
|
while (n1 < n2)
|
{
|
{
|
while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
|
while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
|
n1++;
|
n1++;
|
while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
|
while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
|
n2--;
|
n2--;
|
if (n1 < n2)
|
if (n1 < n2)
|
{
|
{
|
micro_operation sw;
|
micro_operation sw;
|
|
|
sw = mos[n1];
|
sw = mos[n1];
|
mos[n1] = mos[n2];
|
mos[n1] = mos[n2];
|
mos[n2] = sw;
|
mos[n2] = sw;
|
}
|
}
|
}
|
}
|
|
|
if (CALL_P (insn))
|
if (CALL_P (insn))
|
{
|
{
|
micro_operation mo;
|
micro_operation mo;
|
|
|
mo.type = MO_CALL;
|
mo.type = MO_CALL;
|
mo.insn = insn;
|
mo.insn = insn;
|
mo.u.loc = call_arguments;
|
mo.u.loc = call_arguments;
|
call_arguments = NULL_RTX;
|
call_arguments = NULL_RTX;
|
|
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
|
log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
|
VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
|
VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
|
}
|
}
|
|
|
n1 = VEC_length (micro_operation, VTI (bb)->mos);
|
n1 = VEC_length (micro_operation, VTI (bb)->mos);
|
/* This will record NEXT_INSN (insn), such that we can
|
/* This will record NEXT_INSN (insn), such that we can
|
insert notes before it without worrying about any
|
insert notes before it without worrying about any
|
notes that MO_USEs might emit after the insn. */
|
notes that MO_USEs might emit after the insn. */
|
cui.store_p = true;
|
cui.store_p = true;
|
note_stores (PATTERN (insn), add_stores, &cui);
|
note_stores (PATTERN (insn), add_stores, &cui);
|
n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
|
n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
|
mos = VEC_address (micro_operation, VTI (bb)->mos);
|
mos = VEC_address (micro_operation, VTI (bb)->mos);
|
|
|
/* Order the MO_VAL_USEs first (note_stores does nothing
|
/* Order the MO_VAL_USEs first (note_stores does nothing
|
on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
|
on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
|
insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
|
insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
|
while (n1 < n2)
|
while (n1 < n2)
|
{
|
{
|
while (n1 < n2 && mos[n1].type == MO_VAL_USE)
|
while (n1 < n2 && mos[n1].type == MO_VAL_USE)
|
n1++;
|
n1++;
|
while (n1 < n2 && mos[n2].type != MO_VAL_USE)
|
while (n1 < n2 && mos[n2].type != MO_VAL_USE)
|
n2--;
|
n2--;
|
if (n1 < n2)
|
if (n1 < n2)
|
{
|
{
|
micro_operation sw;
|
micro_operation sw;
|
|
|
sw = mos[n1];
|
sw = mos[n1];
|
mos[n1] = mos[n2];
|
mos[n1] = mos[n2];
|
mos[n2] = sw;
|
mos[n2] = sw;
|
}
|
}
|
}
|
}
|
|
|
n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
|
n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
|
while (n1 < n2)
|
while (n1 < n2)
|
{
|
{
|
while (n1 < n2 && mos[n1].type == MO_CLOBBER)
|
while (n1 < n2 && mos[n1].type == MO_CLOBBER)
|
n1++;
|
n1++;
|
while (n1 < n2 && mos[n2].type != MO_CLOBBER)
|
while (n1 < n2 && mos[n2].type != MO_CLOBBER)
|
n2--;
|
n2--;
|
if (n1 < n2)
|
if (n1 < n2)
|
{
|
{
|
micro_operation sw;
|
micro_operation sw;
|
|
|
sw = mos[n1];
|
sw = mos[n1];
|
mos[n1] = mos[n2];
|
mos[n1] = mos[n2];
|
mos[n2] = sw;
|
mos[n2] = sw;
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
static enum var_init_status
|
static enum var_init_status
|
find_src_status (dataflow_set *in, rtx src)
|
find_src_status (dataflow_set *in, rtx src)
|
{
|
{
|
tree decl = NULL_TREE;
|
tree decl = NULL_TREE;
|
enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
|
enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
|
|
|
if (! flag_var_tracking_uninit)
|
if (! flag_var_tracking_uninit)
|
status = VAR_INIT_STATUS_INITIALIZED;
|
status = VAR_INIT_STATUS_INITIALIZED;
|
|
|
if (src && REG_P (src))
|
if (src && REG_P (src))
|
decl = var_debug_decl (REG_EXPR (src));
|
decl = var_debug_decl (REG_EXPR (src));
|
else if (src && MEM_P (src))
|
else if (src && MEM_P (src))
|
decl = var_debug_decl (MEM_EXPR (src));
|
decl = var_debug_decl (MEM_EXPR (src));
|
|
|
if (src && decl)
|
if (src && decl)
|
status = get_init_value (in, src, dv_from_decl (decl));
|
status = get_init_value (in, src, dv_from_decl (decl));
|
|
|
return status;
|
return status;
|
}
|
}
|
|
|
/* SRC is the source of an assignment. Use SET to try to find what
|
/* SRC is the source of an assignment. Use SET to try to find what
|
was ultimately assigned to SRC. Return that value if known,
|
was ultimately assigned to SRC. Return that value if known,
|
otherwise return SRC itself. */
|
otherwise return SRC itself. */
|
|
|
static rtx
|
static rtx
|
find_src_set_src (dataflow_set *set, rtx src)
|
find_src_set_src (dataflow_set *set, rtx src)
|
{
|
{
|
tree decl = NULL_TREE; /* The variable being copied around. */
|
tree decl = NULL_TREE; /* The variable being copied around. */
|
rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
|
rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
|
variable var;
|
variable var;
|
location_chain nextp;
|
location_chain nextp;
|
int i;
|
int i;
|
bool found;
|
bool found;
|
|
|
if (src && REG_P (src))
|
if (src && REG_P (src))
|
decl = var_debug_decl (REG_EXPR (src));
|
decl = var_debug_decl (REG_EXPR (src));
|
else if (src && MEM_P (src))
|
else if (src && MEM_P (src))
|
decl = var_debug_decl (MEM_EXPR (src));
|
decl = var_debug_decl (MEM_EXPR (src));
|
|
|
if (src && decl)
|
if (src && decl)
|
{
|
{
|
decl_or_value dv = dv_from_decl (decl);
|
decl_or_value dv = dv_from_decl (decl);
|
|
|
var = shared_hash_find (set->vars, dv);
|
var = shared_hash_find (set->vars, dv);
|
if (var)
|
if (var)
|
{
|
{
|
found = false;
|
found = false;
|
for (i = 0; i < var->n_var_parts && !found; i++)
|
for (i = 0; i < var->n_var_parts && !found; i++)
|
for (nextp = var->var_part[i].loc_chain; nextp && !found;
|
for (nextp = var->var_part[i].loc_chain; nextp && !found;
|
nextp = nextp->next)
|
nextp = nextp->next)
|
if (rtx_equal_p (nextp->loc, src))
|
if (rtx_equal_p (nextp->loc, src))
|
{
|
{
|
set_src = nextp->set_src;
|
set_src = nextp->set_src;
|
found = true;
|
found = true;
|
}
|
}
|
|
|
}
|
}
|
}
|
}
|
|
|
return set_src;
|
return set_src;
|
}
|
}
|
|
|
/* Compute the changes of variable locations in the basic block BB. */
|
/* Compute the changes of variable locations in the basic block BB. */
|
|
|
static bool
|
static bool
|
compute_bb_dataflow (basic_block bb)
|
compute_bb_dataflow (basic_block bb)
|
{
|
{
|
unsigned int i;
|
unsigned int i;
|
micro_operation *mo;
|
micro_operation *mo;
|
bool changed;
|
bool changed;
|
dataflow_set old_out;
|
dataflow_set old_out;
|
dataflow_set *in = &VTI (bb)->in;
|
dataflow_set *in = &VTI (bb)->in;
|
dataflow_set *out = &VTI (bb)->out;
|
dataflow_set *out = &VTI (bb)->out;
|
|
|
dataflow_set_init (&old_out);
|
dataflow_set_init (&old_out);
|
dataflow_set_copy (&old_out, out);
|
dataflow_set_copy (&old_out, out);
|
dataflow_set_copy (out, in);
|
dataflow_set_copy (out, in);
|
|
|
FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
|
FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
|
{
|
{
|
rtx insn = mo->insn;
|
rtx insn = mo->insn;
|
|
|
switch (mo->type)
|
switch (mo->type)
|
{
|
{
|
case MO_CALL:
|
case MO_CALL:
|
dataflow_set_clear_at_call (out);
|
dataflow_set_clear_at_call (out);
|
break;
|
break;
|
|
|
case MO_USE:
|
case MO_USE:
|
{
|
{
|
rtx loc = mo->u.loc;
|
rtx loc = mo->u.loc;
|
|
|
if (REG_P (loc))
|
if (REG_P (loc))
|
var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
|
var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
|
else if (MEM_P (loc))
|
else if (MEM_P (loc))
|
var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
|
var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
|
}
|
}
|
break;
|
break;
|
|
|
case MO_VAL_LOC:
|
case MO_VAL_LOC:
|
{
|
{
|
rtx loc = mo->u.loc;
|
rtx loc = mo->u.loc;
|
rtx val, vloc;
|
rtx val, vloc;
|
tree var;
|
tree var;
|
|
|
if (GET_CODE (loc) == CONCAT)
|
if (GET_CODE (loc) == CONCAT)
|
{
|
{
|
val = XEXP (loc, 0);
|
val = XEXP (loc, 0);
|
vloc = XEXP (loc, 1);
|
vloc = XEXP (loc, 1);
|
}
|
}
|
else
|
else
|
{
|
{
|
val = NULL_RTX;
|
val = NULL_RTX;
|
vloc = loc;
|
vloc = loc;
|
}
|
}
|
|
|
var = PAT_VAR_LOCATION_DECL (vloc);
|
var = PAT_VAR_LOCATION_DECL (vloc);
|
|
|
clobber_variable_part (out, NULL_RTX,
|
clobber_variable_part (out, NULL_RTX,
|
dv_from_decl (var), 0, NULL_RTX);
|
dv_from_decl (var), 0, NULL_RTX);
|
if (val)
|
if (val)
|
{
|
{
|
if (VAL_NEEDS_RESOLUTION (loc))
|
if (VAL_NEEDS_RESOLUTION (loc))
|
val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
|
val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
|
set_variable_part (out, val, dv_from_decl (var), 0,
|
set_variable_part (out, val, dv_from_decl (var), 0,
|
VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
|
VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
|
INSERT);
|
INSERT);
|
}
|
}
|
else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
|
else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
|
set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
|
set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
|
dv_from_decl (var), 0,
|
dv_from_decl (var), 0,
|
VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
|
VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
|
INSERT);
|
INSERT);
|
}
|
}
|
break;
|
break;
|
|
|
case MO_VAL_USE:
|
case MO_VAL_USE:
|
{
|
{
|
rtx loc = mo->u.loc;
|
rtx loc = mo->u.loc;
|
rtx val, vloc, uloc;
|
rtx val, vloc, uloc;
|
|
|
vloc = uloc = XEXP (loc, 1);
|
vloc = uloc = XEXP (loc, 1);
|
val = XEXP (loc, 0);
|
val = XEXP (loc, 0);
|
|
|
if (GET_CODE (val) == CONCAT)
|
if (GET_CODE (val) == CONCAT)
|
{
|
{
|
uloc = XEXP (val, 1);
|
uloc = XEXP (val, 1);
|
val = XEXP (val, 0);
|
val = XEXP (val, 0);
|
}
|
}
|
|
|
if (VAL_NEEDS_RESOLUTION (loc))
|
if (VAL_NEEDS_RESOLUTION (loc))
|
val_resolve (out, val, vloc, insn);
|
val_resolve (out, val, vloc, insn);
|
else
|
else
|
val_store (out, val, uloc, insn, false);
|
val_store (out, val, uloc, insn, false);
|
|
|
if (VAL_HOLDS_TRACK_EXPR (loc))
|
if (VAL_HOLDS_TRACK_EXPR (loc))
|
{
|
{
|
if (GET_CODE (uloc) == REG)
|
if (GET_CODE (uloc) == REG)
|
var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
|
var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
|
NULL);
|
NULL);
|
else if (GET_CODE (uloc) == MEM)
|
else if (GET_CODE (uloc) == MEM)
|
var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
|
var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
|
NULL);
|
NULL);
|
}
|
}
|
}
|
}
|
break;
|
break;
|
|
|
case MO_VAL_SET:
|
case MO_VAL_SET:
|
{
|
{
|
rtx loc = mo->u.loc;
|
rtx loc = mo->u.loc;
|
rtx val, vloc, uloc;
|
rtx val, vloc, uloc;
|
|
|
vloc = loc;
|
vloc = loc;
|
uloc = XEXP (vloc, 1);
|
uloc = XEXP (vloc, 1);
|
val = XEXP (vloc, 0);
|
val = XEXP (vloc, 0);
|
vloc = uloc;
|
vloc = uloc;
|
|
|
if (GET_CODE (val) == CONCAT)
|
if (GET_CODE (val) == CONCAT)
|
{
|
{
|
vloc = XEXP (val, 1);
|
vloc = XEXP (val, 1);
|
val = XEXP (val, 0);
|
val = XEXP (val, 0);
|
}
|
}
|
|
|
if (GET_CODE (vloc) == SET)
|
if (GET_CODE (vloc) == SET)
|
{
|
{
|
rtx vsrc = SET_SRC (vloc);
|
rtx vsrc = SET_SRC (vloc);
|
|
|
gcc_assert (val != vsrc);
|
gcc_assert (val != vsrc);
|
gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
|
gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
|
|
|
vloc = SET_DEST (vloc);
|
vloc = SET_DEST (vloc);
|
|
|
if (VAL_NEEDS_RESOLUTION (loc))
|
if (VAL_NEEDS_RESOLUTION (loc))
|
val_resolve (out, val, vsrc, insn);
|
val_resolve (out, val, vsrc, insn);
|
}
|
}
|
else if (VAL_NEEDS_RESOLUTION (loc))
|
else if (VAL_NEEDS_RESOLUTION (loc))
|
{
|
{
|
gcc_assert (GET_CODE (uloc) == SET
|
gcc_assert (GET_CODE (uloc) == SET
|
&& GET_CODE (SET_SRC (uloc)) == REG);
|
&& GET_CODE (SET_SRC (uloc)) == REG);
|
val_resolve (out, val, SET_SRC (uloc), insn);
|
val_resolve (out, val, SET_SRC (uloc), insn);
|
}
|
}
|
|
|
if (VAL_HOLDS_TRACK_EXPR (loc))
|
if (VAL_HOLDS_TRACK_EXPR (loc))
|
{
|
{
|
if (VAL_EXPR_IS_CLOBBERED (loc))
|
if (VAL_EXPR_IS_CLOBBERED (loc))
|
{
|
{
|
if (REG_P (uloc))
|
if (REG_P (uloc))
|
var_reg_delete (out, uloc, true);
|
var_reg_delete (out, uloc, true);
|
else if (MEM_P (uloc))
|
else if (MEM_P (uloc))
|
var_mem_delete (out, uloc, true);
|
var_mem_delete (out, uloc, true);
|
}
|
}
|
else
|
else
|
{
|
{
|
bool copied_p = VAL_EXPR_IS_COPIED (loc);
|
bool copied_p = VAL_EXPR_IS_COPIED (loc);
|
rtx set_src = NULL;
|
rtx set_src = NULL;
|
enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
|
enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
|
|
|
if (GET_CODE (uloc) == SET)
|
if (GET_CODE (uloc) == SET)
|
{
|
{
|
set_src = SET_SRC (uloc);
|
set_src = SET_SRC (uloc);
|
uloc = SET_DEST (uloc);
|
uloc = SET_DEST (uloc);
|
}
|
}
|
|
|
if (copied_p)
|
if (copied_p)
|
{
|
{
|
if (flag_var_tracking_uninit)
|
if (flag_var_tracking_uninit)
|
{
|
{
|
status = find_src_status (in, set_src);
|
status = find_src_status (in, set_src);
|
|
|
if (status == VAR_INIT_STATUS_UNKNOWN)
|
if (status == VAR_INIT_STATUS_UNKNOWN)
|
status = find_src_status (out, set_src);
|
status = find_src_status (out, set_src);
|
}
|
}
|
|
|
set_src = find_src_set_src (in, set_src);
|
set_src = find_src_set_src (in, set_src);
|
}
|
}
|
|
|
if (REG_P (uloc))
|
if (REG_P (uloc))
|
var_reg_delete_and_set (out, uloc, !copied_p,
|
var_reg_delete_and_set (out, uloc, !copied_p,
|
status, set_src);
|
status, set_src);
|
else if (MEM_P (uloc))
|
else if (MEM_P (uloc))
|
var_mem_delete_and_set (out, uloc, !copied_p,
|
var_mem_delete_and_set (out, uloc, !copied_p,
|
status, set_src);
|
status, set_src);
|
}
|
}
|
}
|
}
|
else if (REG_P (uloc))
|
else if (REG_P (uloc))
|
var_regno_delete (out, REGNO (uloc));
|
var_regno_delete (out, REGNO (uloc));
|
|
|
val_store (out, val, vloc, insn, true);
|
val_store (out, val, vloc, insn, true);
|
}
|
}
|
break;
|
break;
|
|
|
case MO_SET:
|
case MO_SET:
|
{
|
{
|
rtx loc = mo->u.loc;
|
rtx loc = mo->u.loc;
|
rtx set_src = NULL;
|
rtx set_src = NULL;
|
|
|
if (GET_CODE (loc) == SET)
|
if (GET_CODE (loc) == SET)
|
{
|
{
|
set_src = SET_SRC (loc);
|
set_src = SET_SRC (loc);
|
loc = SET_DEST (loc);
|
loc = SET_DEST (loc);
|
}
|
}
|
|
|
if (REG_P (loc))
|
if (REG_P (loc))
|
var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
|
var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
|
set_src);
|
set_src);
|
else if (MEM_P (loc))
|
else if (MEM_P (loc))
|
var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
|
var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
|
set_src);
|
set_src);
|
}
|
}
|
break;
|
break;
|
|
|
case MO_COPY:
|
case MO_COPY:
|
{
|
{
|
rtx loc = mo->u.loc;
|
rtx loc = mo->u.loc;
|
enum var_init_status src_status;
|
enum var_init_status src_status;
|
rtx set_src = NULL;
|
rtx set_src = NULL;
|
|
|
if (GET_CODE (loc) == SET)
|
if (GET_CODE (loc) == SET)
|
{
|
{
|
set_src = SET_SRC (loc);
|
set_src = SET_SRC (loc);
|
loc = SET_DEST (loc);
|
loc = SET_DEST (loc);
|
}
|
}
|
|
|
if (! flag_var_tracking_uninit)
|
if (! flag_var_tracking_uninit)
|
src_status = VAR_INIT_STATUS_INITIALIZED;
|
src_status = VAR_INIT_STATUS_INITIALIZED;
|
else
|
else
|
{
|
{
|
src_status = find_src_status (in, set_src);
|
src_status = find_src_status (in, set_src);
|
|
|
if (src_status == VAR_INIT_STATUS_UNKNOWN)
|
if (src_status == VAR_INIT_STATUS_UNKNOWN)
|
src_status = find_src_status (out, set_src);
|
src_status = find_src_status (out, set_src);
|
}
|
}
|
|
|
set_src = find_src_set_src (in, set_src);
|
set_src = find_src_set_src (in, set_src);
|
|
|
if (REG_P (loc))
|
if (REG_P (loc))
|
var_reg_delete_and_set (out, loc, false, src_status, set_src);
|
var_reg_delete_and_set (out, loc, false, src_status, set_src);
|
else if (MEM_P (loc))
|
else if (MEM_P (loc))
|
var_mem_delete_and_set (out, loc, false, src_status, set_src);
|
var_mem_delete_and_set (out, loc, false, src_status, set_src);
|
}
|
}
|
break;
|
break;
|
|
|
case MO_USE_NO_VAR:
|
case MO_USE_NO_VAR:
|
{
|
{
|
rtx loc = mo->u.loc;
|
rtx loc = mo->u.loc;
|
|
|
if (REG_P (loc))
|
if (REG_P (loc))
|
var_reg_delete (out, loc, false);
|
var_reg_delete (out, loc, false);
|
else if (MEM_P (loc))
|
else if (MEM_P (loc))
|
var_mem_delete (out, loc, false);
|
var_mem_delete (out, loc, false);
|
}
|
}
|
break;
|
break;
|
|
|
case MO_CLOBBER:
|
case MO_CLOBBER:
|
{
|
{
|
rtx loc = mo->u.loc;
|
rtx loc = mo->u.loc;
|
|
|
if (REG_P (loc))
|
if (REG_P (loc))
|
var_reg_delete (out, loc, true);
|
var_reg_delete (out, loc, true);
|
else if (MEM_P (loc))
|
else if (MEM_P (loc))
|
var_mem_delete (out, loc, true);
|
var_mem_delete (out, loc, true);
|
}
|
}
|
break;
|
break;
|
|
|
case MO_ADJUST:
|
case MO_ADJUST:
|
out->stack_adjust += mo->u.adjust;
|
out->stack_adjust += mo->u.adjust;
|
break;
|
break;
|
}
|
}
|
}
|
}
|
|
|
if (MAY_HAVE_DEBUG_INSNS)
|
if (MAY_HAVE_DEBUG_INSNS)
|
{
|
{
|
dataflow_set_equiv_regs (out);
|
dataflow_set_equiv_regs (out);
|
htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
|
htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
|
out);
|
out);
|
htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
|
htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
|
out);
|
out);
|
#if ENABLE_CHECKING
|
#if ENABLE_CHECKING
|
htab_traverse (shared_hash_htab (out->vars),
|
htab_traverse (shared_hash_htab (out->vars),
|
canonicalize_loc_order_check, out);
|
canonicalize_loc_order_check, out);
|
#endif
|
#endif
|
}
|
}
|
changed = dataflow_set_different (&old_out, out);
|
changed = dataflow_set_different (&old_out, out);
|
dataflow_set_destroy (&old_out);
|
dataflow_set_destroy (&old_out);
|
return changed;
|
return changed;
|
}
|
}
|
|
|
/* Find the locations of variables in the whole function. */
|
/* Find the locations of variables in the whole function. */
|
|
|
static bool
|
static bool
|
vt_find_locations (void)
|
vt_find_locations (void)
|
{
|
{
|
fibheap_t worklist, pending, fibheap_swap;
|
fibheap_t worklist, pending, fibheap_swap;
|
sbitmap visited, in_worklist, in_pending, sbitmap_swap;
|
sbitmap visited, in_worklist, in_pending, sbitmap_swap;
|
basic_block bb;
|
basic_block bb;
|
edge e;
|
edge e;
|
int *bb_order;
|
int *bb_order;
|
int *rc_order;
|
int *rc_order;
|
int i;
|
int i;
|
int htabsz = 0;
|
int htabsz = 0;
|
int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
|
int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
|
bool success = true;
|
bool success = true;
|
|
|
timevar_push (TV_VAR_TRACKING_DATAFLOW);
|
timevar_push (TV_VAR_TRACKING_DATAFLOW);
|
/* Compute reverse completion order of depth first search of the CFG
|
/* Compute reverse completion order of depth first search of the CFG
|
so that the data-flow runs faster. */
|
so that the data-flow runs faster. */
|
rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
|
rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
|
bb_order = XNEWVEC (int, last_basic_block);
|
bb_order = XNEWVEC (int, last_basic_block);
|
pre_and_rev_post_order_compute (NULL, rc_order, false);
|
pre_and_rev_post_order_compute (NULL, rc_order, false);
|
for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
|
for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
|
bb_order[rc_order[i]] = i;
|
bb_order[rc_order[i]] = i;
|
free (rc_order);
|
free (rc_order);
|
|
|
worklist = fibheap_new ();
|
worklist = fibheap_new ();
|
pending = fibheap_new ();
|
pending = fibheap_new ();
|
visited = sbitmap_alloc (last_basic_block);
|
visited = sbitmap_alloc (last_basic_block);
|
in_worklist = sbitmap_alloc (last_basic_block);
|
in_worklist = sbitmap_alloc (last_basic_block);
|
in_pending = sbitmap_alloc (last_basic_block);
|
in_pending = sbitmap_alloc (last_basic_block);
|
sbitmap_zero (in_worklist);
|
sbitmap_zero (in_worklist);
|
|
|
FOR_EACH_BB (bb)
|
FOR_EACH_BB (bb)
|
fibheap_insert (pending, bb_order[bb->index], bb);
|
fibheap_insert (pending, bb_order[bb->index], bb);
|
sbitmap_ones (in_pending);
|
sbitmap_ones (in_pending);
|
|
|
while (success && !fibheap_empty (pending))
|
while (success && !fibheap_empty (pending))
|
{
|
{
|
fibheap_swap = pending;
|
fibheap_swap = pending;
|
pending = worklist;
|
pending = worklist;
|
worklist = fibheap_swap;
|
worklist = fibheap_swap;
|
sbitmap_swap = in_pending;
|
sbitmap_swap = in_pending;
|
in_pending = in_worklist;
|
in_pending = in_worklist;
|
in_worklist = sbitmap_swap;
|
in_worklist = sbitmap_swap;
|
|
|
sbitmap_zero (visited);
|
sbitmap_zero (visited);
|
|
|
while (!fibheap_empty (worklist))
|
while (!fibheap_empty (worklist))
|
{
|
{
|
bb = (basic_block) fibheap_extract_min (worklist);
|
bb = (basic_block) fibheap_extract_min (worklist);
|
RESET_BIT (in_worklist, bb->index);
|
RESET_BIT (in_worklist, bb->index);
|
gcc_assert (!TEST_BIT (visited, bb->index));
|
gcc_assert (!TEST_BIT (visited, bb->index));
|
if (!TEST_BIT (visited, bb->index))
|
if (!TEST_BIT (visited, bb->index))
|
{
|
{
|
bool changed;
|
bool changed;
|
edge_iterator ei;
|
edge_iterator ei;
|
int oldinsz, oldoutsz;
|
int oldinsz, oldoutsz;
|
|
|
SET_BIT (visited, bb->index);
|
SET_BIT (visited, bb->index);
|
|
|
if (VTI (bb)->in.vars)
|
if (VTI (bb)->in.vars)
|
{
|
{
|
htabsz
|
htabsz
|
-= (htab_size (shared_hash_htab (VTI (bb)->in.vars))
|
-= (htab_size (shared_hash_htab (VTI (bb)->in.vars))
|
+ htab_size (shared_hash_htab (VTI (bb)->out.vars)));
|
+ htab_size (shared_hash_htab (VTI (bb)->out.vars)));
|
oldinsz
|
oldinsz
|
= htab_elements (shared_hash_htab (VTI (bb)->in.vars));
|
= htab_elements (shared_hash_htab (VTI (bb)->in.vars));
|
oldoutsz
|
oldoutsz
|
= htab_elements (shared_hash_htab (VTI (bb)->out.vars));
|
= htab_elements (shared_hash_htab (VTI (bb)->out.vars));
|
}
|
}
|
else
|
else
|
oldinsz = oldoutsz = 0;
|
oldinsz = oldoutsz = 0;
|
|
|
if (MAY_HAVE_DEBUG_INSNS)
|
if (MAY_HAVE_DEBUG_INSNS)
|
{
|
{
|
dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
|
dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
|
bool first = true, adjust = false;
|
bool first = true, adjust = false;
|
|
|
/* Calculate the IN set as the intersection of
|
/* Calculate the IN set as the intersection of
|
predecessor OUT sets. */
|
predecessor OUT sets. */
|
|
|
dataflow_set_clear (in);
|
dataflow_set_clear (in);
|
dst_can_be_shared = true;
|
dst_can_be_shared = true;
|
|
|
FOR_EACH_EDGE (e, ei, bb->preds)
|
FOR_EACH_EDGE (e, ei, bb->preds)
|
if (!VTI (e->src)->flooded)
|
if (!VTI (e->src)->flooded)
|
gcc_assert (bb_order[bb->index]
|
gcc_assert (bb_order[bb->index]
|
<= bb_order[e->src->index]);
|
<= bb_order[e->src->index]);
|
else if (first)
|
else if (first)
|
{
|
{
|
dataflow_set_copy (in, &VTI (e->src)->out);
|
dataflow_set_copy (in, &VTI (e->src)->out);
|
first_out = &VTI (e->src)->out;
|
first_out = &VTI (e->src)->out;
|
first = false;
|
first = false;
|
}
|
}
|
else
|
else
|
{
|
{
|
dataflow_set_merge (in, &VTI (e->src)->out);
|
dataflow_set_merge (in, &VTI (e->src)->out);
|
adjust = true;
|
adjust = true;
|
}
|
}
|
|
|
if (adjust)
|
if (adjust)
|
{
|
{
|
dataflow_post_merge_adjust (in, &VTI (bb)->permp);
|
dataflow_post_merge_adjust (in, &VTI (bb)->permp);
|
#if ENABLE_CHECKING
|
#if ENABLE_CHECKING
|
/* Merge and merge_adjust should keep entries in
|
/* Merge and merge_adjust should keep entries in
|
canonical order. */
|
canonical order. */
|
htab_traverse (shared_hash_htab (in->vars),
|
htab_traverse (shared_hash_htab (in->vars),
|
canonicalize_loc_order_check,
|
canonicalize_loc_order_check,
|
in);
|
in);
|
#endif
|
#endif
|
if (dst_can_be_shared)
|
if (dst_can_be_shared)
|
{
|
{
|
shared_hash_destroy (in->vars);
|
shared_hash_destroy (in->vars);
|
in->vars = shared_hash_copy (first_out->vars);
|
in->vars = shared_hash_copy (first_out->vars);
|
}
|
}
|
}
|
}
|
|
|
VTI (bb)->flooded = true;
|
VTI (bb)->flooded = true;
|
}
|
}
|
else
|
else
|
{
|
{
|
/* Calculate the IN set as union of predecessor OUT sets. */
|
/* Calculate the IN set as union of predecessor OUT sets. */
|
dataflow_set_clear (&VTI (bb)->in);
|
dataflow_set_clear (&VTI (bb)->in);
|
FOR_EACH_EDGE (e, ei, bb->preds)
|
FOR_EACH_EDGE (e, ei, bb->preds)
|
dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
|
dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
|
}
|
}
|
|
|
changed = compute_bb_dataflow (bb);
|
changed = compute_bb_dataflow (bb);
|
htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars))
|
htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars))
|
+ htab_size (shared_hash_htab (VTI (bb)->out.vars)));
|
+ htab_size (shared_hash_htab (VTI (bb)->out.vars)));
|
|
|
if (htabmax && htabsz > htabmax)
|
if (htabmax && htabsz > htabmax)
|
{
|
{
|
if (MAY_HAVE_DEBUG_INSNS)
|
if (MAY_HAVE_DEBUG_INSNS)
|
inform (DECL_SOURCE_LOCATION (cfun->decl),
|
inform (DECL_SOURCE_LOCATION (cfun->decl),
|
"variable tracking size limit exceeded with "
|
"variable tracking size limit exceeded with "
|
"-fvar-tracking-assignments, retrying without");
|
"-fvar-tracking-assignments, retrying without");
|
else
|
else
|
inform (DECL_SOURCE_LOCATION (cfun->decl),
|
inform (DECL_SOURCE_LOCATION (cfun->decl),
|
"variable tracking size limit exceeded");
|
"variable tracking size limit exceeded");
|
success = false;
|
success = false;
|
break;
|
break;
|
}
|
}
|
|
|
if (changed)
|
if (changed)
|
{
|
{
|
FOR_EACH_EDGE (e, ei, bb->succs)
|
FOR_EACH_EDGE (e, ei, bb->succs)
|
{
|
{
|
if (e->dest == EXIT_BLOCK_PTR)
|
if (e->dest == EXIT_BLOCK_PTR)
|
continue;
|
continue;
|
|
|
if (TEST_BIT (visited, e->dest->index))
|
if (TEST_BIT (visited, e->dest->index))
|
{
|
{
|
if (!TEST_BIT (in_pending, e->dest->index))
|
if (!TEST_BIT (in_pending, e->dest->index))
|
{
|
{
|
/* Send E->DEST to next round. */
|
/* Send E->DEST to next round. */
|
SET_BIT (in_pending, e->dest->index);
|
SET_BIT (in_pending, e->dest->index);
|
fibheap_insert (pending,
|
fibheap_insert (pending,
|
bb_order[e->dest->index],
|
bb_order[e->dest->index],
|
e->dest);
|
e->dest);
|
}
|
}
|
}
|
}
|
else if (!TEST_BIT (in_worklist, e->dest->index))
|
else if (!TEST_BIT (in_worklist, e->dest->index))
|
{
|
{
|
/* Add E->DEST to current round. */
|
/* Add E->DEST to current round. */
|
SET_BIT (in_worklist, e->dest->index);
|
SET_BIT (in_worklist, e->dest->index);
|
fibheap_insert (worklist, bb_order[e->dest->index],
|
fibheap_insert (worklist, bb_order[e->dest->index],
|
e->dest);
|
e->dest);
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
if (dump_file)
|
if (dump_file)
|
fprintf (dump_file,
|
fprintf (dump_file,
|
"BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
|
"BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
|
bb->index,
|
bb->index,
|
(int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
|
(int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
|
oldinsz,
|
oldinsz,
|
(int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
|
(int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
|
oldoutsz,
|
oldoutsz,
|
(int)worklist->nodes, (int)pending->nodes, htabsz);
|
(int)worklist->nodes, (int)pending->nodes, htabsz);
|
|
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
{
|
{
|
fprintf (dump_file, "BB %i IN:\n", bb->index);
|
fprintf (dump_file, "BB %i IN:\n", bb->index);
|
dump_dataflow_set (&VTI (bb)->in);
|
dump_dataflow_set (&VTI (bb)->in);
|
fprintf (dump_file, "BB %i OUT:\n", bb->index);
|
fprintf (dump_file, "BB %i OUT:\n", bb->index);
|
dump_dataflow_set (&VTI (bb)->out);
|
dump_dataflow_set (&VTI (bb)->out);
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
if (success && MAY_HAVE_DEBUG_INSNS)
|
if (success && MAY_HAVE_DEBUG_INSNS)
|
FOR_EACH_BB (bb)
|
FOR_EACH_BB (bb)
|
gcc_assert (VTI (bb)->flooded);
|
gcc_assert (VTI (bb)->flooded);
|
|
|
free (bb_order);
|
free (bb_order);
|
fibheap_delete (worklist);
|
fibheap_delete (worklist);
|
fibheap_delete (pending);
|
fibheap_delete (pending);
|
sbitmap_free (visited);
|
sbitmap_free (visited);
|
sbitmap_free (in_worklist);
|
sbitmap_free (in_worklist);
|
sbitmap_free (in_pending);
|
sbitmap_free (in_pending);
|
|
|
timevar_pop (TV_VAR_TRACKING_DATAFLOW);
|
timevar_pop (TV_VAR_TRACKING_DATAFLOW);
|
return success;
|
return success;
|
}
|
}
|
|
|
/* Print the content of the LIST to dump file. */
|
/* Print the content of the LIST to dump file. */
|
|
|
static void
|
static void
|
dump_attrs_list (attrs list)
|
dump_attrs_list (attrs list)
|
{
|
{
|
for (; list; list = list->next)
|
for (; list; list = list->next)
|
{
|
{
|
if (dv_is_decl_p (list->dv))
|
if (dv_is_decl_p (list->dv))
|
print_mem_expr (dump_file, dv_as_decl (list->dv));
|
print_mem_expr (dump_file, dv_as_decl (list->dv));
|
else
|
else
|
print_rtl_single (dump_file, dv_as_value (list->dv));
|
print_rtl_single (dump_file, dv_as_value (list->dv));
|
fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
|
fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
|
}
|
}
|
fprintf (dump_file, "\n");
|
fprintf (dump_file, "\n");
|
}
|
}
|
|
|
/* Print the information about variable *SLOT to dump file. */
|
/* Print the information about variable *SLOT to dump file. */
|
|
|
static int
|
static int
|
dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
|
dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
|
{
|
{
|
variable var = (variable) *slot;
|
variable var = (variable) *slot;
|
|
|
dump_var (var);
|
dump_var (var);
|
|
|
/* Continue traversing the hash table. */
|
/* Continue traversing the hash table. */
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Print the information about variable VAR to dump file. */
|
/* Print the information about variable VAR to dump file. */
|
|
|
static void
|
static void
|
dump_var (variable var)
|
dump_var (variable var)
|
{
|
{
|
int i;
|
int i;
|
location_chain node;
|
location_chain node;
|
|
|
if (dv_is_decl_p (var->dv))
|
if (dv_is_decl_p (var->dv))
|
{
|
{
|
const_tree decl = dv_as_decl (var->dv);
|
const_tree decl = dv_as_decl (var->dv);
|
|
|
if (DECL_NAME (decl))
|
if (DECL_NAME (decl))
|
{
|
{
|
fprintf (dump_file, " name: %s",
|
fprintf (dump_file, " name: %s",
|
IDENTIFIER_POINTER (DECL_NAME (decl)));
|
IDENTIFIER_POINTER (DECL_NAME (decl)));
|
if (dump_flags & TDF_UID)
|
if (dump_flags & TDF_UID)
|
fprintf (dump_file, "D.%u", DECL_UID (decl));
|
fprintf (dump_file, "D.%u", DECL_UID (decl));
|
}
|
}
|
else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
|
else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
|
fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
|
fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
|
else
|
else
|
fprintf (dump_file, " name: D.%u", DECL_UID (decl));
|
fprintf (dump_file, " name: D.%u", DECL_UID (decl));
|
fprintf (dump_file, "\n");
|
fprintf (dump_file, "\n");
|
}
|
}
|
else
|
else
|
{
|
{
|
fputc (' ', dump_file);
|
fputc (' ', dump_file);
|
print_rtl_single (dump_file, dv_as_value (var->dv));
|
print_rtl_single (dump_file, dv_as_value (var->dv));
|
}
|
}
|
|
|
for (i = 0; i < var->n_var_parts; i++)
|
for (i = 0; i < var->n_var_parts; i++)
|
{
|
{
|
fprintf (dump_file, " offset %ld\n",
|
fprintf (dump_file, " offset %ld\n",
|
(long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
|
(long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
|
for (node = var->var_part[i].loc_chain; node; node = node->next)
|
for (node = var->var_part[i].loc_chain; node; node = node->next)
|
{
|
{
|
fprintf (dump_file, " ");
|
fprintf (dump_file, " ");
|
if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
|
if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
|
fprintf (dump_file, "[uninit]");
|
fprintf (dump_file, "[uninit]");
|
print_rtl_single (dump_file, node->loc);
|
print_rtl_single (dump_file, node->loc);
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
/* Print the information about variables from hash table VARS to dump file. */
|
/* Print the information about variables from hash table VARS to dump file. */
|
|
|
static void
|
static void
|
dump_vars (htab_t vars)
|
dump_vars (htab_t vars)
|
{
|
{
|
if (htab_elements (vars) > 0)
|
if (htab_elements (vars) > 0)
|
{
|
{
|
fprintf (dump_file, "Variables:\n");
|
fprintf (dump_file, "Variables:\n");
|
htab_traverse (vars, dump_var_slot, NULL);
|
htab_traverse (vars, dump_var_slot, NULL);
|
}
|
}
|
}
|
}
|
|
|
/* Print the dataflow set SET to dump file. */
|
/* Print the dataflow set SET to dump file. */
|
|
|
static void
|
static void
|
dump_dataflow_set (dataflow_set *set)
|
dump_dataflow_set (dataflow_set *set)
|
{
|
{
|
int i;
|
int i;
|
|
|
fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
|
fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
|
set->stack_adjust);
|
set->stack_adjust);
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
{
|
{
|
if (set->regs[i])
|
if (set->regs[i])
|
{
|
{
|
fprintf (dump_file, "Reg %d:", i);
|
fprintf (dump_file, "Reg %d:", i);
|
dump_attrs_list (set->regs[i]);
|
dump_attrs_list (set->regs[i]);
|
}
|
}
|
}
|
}
|
dump_vars (shared_hash_htab (set->vars));
|
dump_vars (shared_hash_htab (set->vars));
|
fprintf (dump_file, "\n");
|
fprintf (dump_file, "\n");
|
}
|
}
|
|
|
/* Print the IN and OUT sets for each basic block to dump file. */
|
/* Print the IN and OUT sets for each basic block to dump file. */
|
|
|
static void
|
static void
|
dump_dataflow_sets (void)
|
dump_dataflow_sets (void)
|
{
|
{
|
basic_block bb;
|
basic_block bb;
|
|
|
FOR_EACH_BB (bb)
|
FOR_EACH_BB (bb)
|
{
|
{
|
fprintf (dump_file, "\nBasic block %d:\n", bb->index);
|
fprintf (dump_file, "\nBasic block %d:\n", bb->index);
|
fprintf (dump_file, "IN:\n");
|
fprintf (dump_file, "IN:\n");
|
dump_dataflow_set (&VTI (bb)->in);
|
dump_dataflow_set (&VTI (bb)->in);
|
fprintf (dump_file, "OUT:\n");
|
fprintf (dump_file, "OUT:\n");
|
dump_dataflow_set (&VTI (bb)->out);
|
dump_dataflow_set (&VTI (bb)->out);
|
}
|
}
|
}
|
}
|
|
|
/* Return the variable for DV in dropped_values, inserting one if
|
/* Return the variable for DV in dropped_values, inserting one if
|
requested with INSERT. */
|
requested with INSERT. */
|
|
|
static inline variable
|
static inline variable
|
variable_from_dropped (decl_or_value dv, enum insert_option insert)
|
variable_from_dropped (decl_or_value dv, enum insert_option insert)
|
{
|
{
|
void **slot;
|
void **slot;
|
variable empty_var;
|
variable empty_var;
|
onepart_enum_t onepart;
|
onepart_enum_t onepart;
|
|
|
slot = htab_find_slot_with_hash (dropped_values, dv, dv_htab_hash (dv),
|
slot = htab_find_slot_with_hash (dropped_values, dv, dv_htab_hash (dv),
|
insert);
|
insert);
|
|
|
if (!slot)
|
if (!slot)
|
return NULL;
|
return NULL;
|
|
|
if (*slot)
|
if (*slot)
|
return (variable) *slot;
|
return (variable) *slot;
|
|
|
gcc_checking_assert (insert == INSERT);
|
gcc_checking_assert (insert == INSERT);
|
|
|
onepart = dv_onepart_p (dv);
|
onepart = dv_onepart_p (dv);
|
|
|
gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
|
gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
|
|
|
empty_var = (variable) pool_alloc (onepart_pool (onepart));
|
empty_var = (variable) pool_alloc (onepart_pool (onepart));
|
empty_var->dv = dv;
|
empty_var->dv = dv;
|
empty_var->refcount = 1;
|
empty_var->refcount = 1;
|
empty_var->n_var_parts = 0;
|
empty_var->n_var_parts = 0;
|
empty_var->onepart = onepart;
|
empty_var->onepart = onepart;
|
empty_var->in_changed_variables = false;
|
empty_var->in_changed_variables = false;
|
empty_var->var_part[0].loc_chain = NULL;
|
empty_var->var_part[0].loc_chain = NULL;
|
empty_var->var_part[0].cur_loc = NULL;
|
empty_var->var_part[0].cur_loc = NULL;
|
VAR_LOC_1PAUX (empty_var) = NULL;
|
VAR_LOC_1PAUX (empty_var) = NULL;
|
set_dv_changed (dv, true);
|
set_dv_changed (dv, true);
|
|
|
*slot = empty_var;
|
*slot = empty_var;
|
|
|
return empty_var;
|
return empty_var;
|
}
|
}
|
|
|
/* Recover the one-part aux from dropped_values. */
|
/* Recover the one-part aux from dropped_values. */
|
|
|
static struct onepart_aux *
|
static struct onepart_aux *
|
recover_dropped_1paux (variable var)
|
recover_dropped_1paux (variable var)
|
{
|
{
|
variable dvar;
|
variable dvar;
|
|
|
gcc_checking_assert (var->onepart);
|
gcc_checking_assert (var->onepart);
|
|
|
if (VAR_LOC_1PAUX (var))
|
if (VAR_LOC_1PAUX (var))
|
return VAR_LOC_1PAUX (var);
|
return VAR_LOC_1PAUX (var);
|
|
|
if (var->onepart == ONEPART_VDECL)
|
if (var->onepart == ONEPART_VDECL)
|
return NULL;
|
return NULL;
|
|
|
dvar = variable_from_dropped (var->dv, NO_INSERT);
|
dvar = variable_from_dropped (var->dv, NO_INSERT);
|
|
|
if (!dvar)
|
if (!dvar)
|
return NULL;
|
return NULL;
|
|
|
VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
|
VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
|
VAR_LOC_1PAUX (dvar) = NULL;
|
VAR_LOC_1PAUX (dvar) = NULL;
|
|
|
return VAR_LOC_1PAUX (var);
|
return VAR_LOC_1PAUX (var);
|
}
|
}
|
|
|
/* Add variable VAR to the hash table of changed variables and
|
/* Add variable VAR to the hash table of changed variables and
|
if it has no locations delete it from SET's hash table. */
|
if it has no locations delete it from SET's hash table. */
|
|
|
static void
|
static void
|
variable_was_changed (variable var, dataflow_set *set)
|
variable_was_changed (variable var, dataflow_set *set)
|
{
|
{
|
hashval_t hash = dv_htab_hash (var->dv);
|
hashval_t hash = dv_htab_hash (var->dv);
|
|
|
if (emit_notes)
|
if (emit_notes)
|
{
|
{
|
void **slot;
|
void **slot;
|
|
|
/* Remember this decl or VALUE has been added to changed_variables. */
|
/* Remember this decl or VALUE has been added to changed_variables. */
|
set_dv_changed (var->dv, true);
|
set_dv_changed (var->dv, true);
|
|
|
slot = htab_find_slot_with_hash (changed_variables,
|
slot = htab_find_slot_with_hash (changed_variables,
|
var->dv,
|
var->dv,
|
hash, INSERT);
|
hash, INSERT);
|
|
|
if (*slot)
|
if (*slot)
|
{
|
{
|
variable old_var = (variable) *slot;
|
variable old_var = (variable) *slot;
|
gcc_assert (old_var->in_changed_variables);
|
gcc_assert (old_var->in_changed_variables);
|
old_var->in_changed_variables = false;
|
old_var->in_changed_variables = false;
|
if (var != old_var && var->onepart)
|
if (var != old_var && var->onepart)
|
{
|
{
|
/* Restore the auxiliary info from an empty variable
|
/* Restore the auxiliary info from an empty variable
|
previously created for changed_variables, so it is
|
previously created for changed_variables, so it is
|
not lost. */
|
not lost. */
|
gcc_checking_assert (!VAR_LOC_1PAUX (var));
|
gcc_checking_assert (!VAR_LOC_1PAUX (var));
|
VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
|
VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
|
VAR_LOC_1PAUX (old_var) = NULL;
|
VAR_LOC_1PAUX (old_var) = NULL;
|
}
|
}
|
variable_htab_free (*slot);
|
variable_htab_free (*slot);
|
}
|
}
|
|
|
if (set && var->n_var_parts == 0)
|
if (set && var->n_var_parts == 0)
|
{
|
{
|
onepart_enum_t onepart = var->onepart;
|
onepart_enum_t onepart = var->onepart;
|
variable empty_var = NULL;
|
variable empty_var = NULL;
|
void **dslot = NULL;
|
void **dslot = NULL;
|
|
|
if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
|
if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
|
{
|
{
|
dslot = htab_find_slot_with_hash (dropped_values, var->dv,
|
dslot = htab_find_slot_with_hash (dropped_values, var->dv,
|
dv_htab_hash (var->dv),
|
dv_htab_hash (var->dv),
|
INSERT);
|
INSERT);
|
empty_var = (variable) *dslot;
|
empty_var = (variable) *dslot;
|
|
|
if (empty_var)
|
if (empty_var)
|
{
|
{
|
gcc_checking_assert (!empty_var->in_changed_variables);
|
gcc_checking_assert (!empty_var->in_changed_variables);
|
if (!VAR_LOC_1PAUX (var))
|
if (!VAR_LOC_1PAUX (var))
|
{
|
{
|
VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
|
VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
|
VAR_LOC_1PAUX (empty_var) = NULL;
|
VAR_LOC_1PAUX (empty_var) = NULL;
|
}
|
}
|
else
|
else
|
gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
|
gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
|
}
|
}
|
}
|
}
|
|
|
if (!empty_var)
|
if (!empty_var)
|
{
|
{
|
empty_var = (variable) pool_alloc (onepart_pool (onepart));
|
empty_var = (variable) pool_alloc (onepart_pool (onepart));
|
empty_var->dv = var->dv;
|
empty_var->dv = var->dv;
|
empty_var->refcount = 1;
|
empty_var->refcount = 1;
|
empty_var->n_var_parts = 0;
|
empty_var->n_var_parts = 0;
|
empty_var->onepart = onepart;
|
empty_var->onepart = onepart;
|
if (dslot)
|
if (dslot)
|
{
|
{
|
empty_var->refcount++;
|
empty_var->refcount++;
|
*dslot = empty_var;
|
*dslot = empty_var;
|
}
|
}
|
}
|
}
|
else
|
else
|
empty_var->refcount++;
|
empty_var->refcount++;
|
empty_var->in_changed_variables = true;
|
empty_var->in_changed_variables = true;
|
*slot = empty_var;
|
*slot = empty_var;
|
if (onepart)
|
if (onepart)
|
{
|
{
|
empty_var->var_part[0].loc_chain = NULL;
|
empty_var->var_part[0].loc_chain = NULL;
|
empty_var->var_part[0].cur_loc = NULL;
|
empty_var->var_part[0].cur_loc = NULL;
|
VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
|
VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
|
VAR_LOC_1PAUX (var) = NULL;
|
VAR_LOC_1PAUX (var) = NULL;
|
}
|
}
|
goto drop_var;
|
goto drop_var;
|
}
|
}
|
else
|
else
|
{
|
{
|
if (var->onepart && !VAR_LOC_1PAUX (var))
|
if (var->onepart && !VAR_LOC_1PAUX (var))
|
recover_dropped_1paux (var);
|
recover_dropped_1paux (var);
|
var->refcount++;
|
var->refcount++;
|
var->in_changed_variables = true;
|
var->in_changed_variables = true;
|
*slot = var;
|
*slot = var;
|
}
|
}
|
}
|
}
|
else
|
else
|
{
|
{
|
gcc_assert (set);
|
gcc_assert (set);
|
if (var->n_var_parts == 0)
|
if (var->n_var_parts == 0)
|
{
|
{
|
void **slot;
|
void **slot;
|
|
|
drop_var:
|
drop_var:
|
slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
|
slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
|
if (slot)
|
if (slot)
|
{
|
{
|
if (shared_hash_shared (set->vars))
|
if (shared_hash_shared (set->vars))
|
slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
|
slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
|
NO_INSERT);
|
NO_INSERT);
|
htab_clear_slot (shared_hash_htab (set->vars), slot);
|
htab_clear_slot (shared_hash_htab (set->vars), slot);
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
/* Look for the index in VAR->var_part corresponding to OFFSET.
|
/* Look for the index in VAR->var_part corresponding to OFFSET.
|
Return -1 if not found. If INSERTION_POINT is non-NULL, the
|
Return -1 if not found. If INSERTION_POINT is non-NULL, the
|
referenced int will be set to the index that the part has or should
|
referenced int will be set to the index that the part has or should
|
have, if it should be inserted. */
|
have, if it should be inserted. */
|
|
|
static inline int
|
static inline int
|
find_variable_location_part (variable var, HOST_WIDE_INT offset,
|
find_variable_location_part (variable var, HOST_WIDE_INT offset,
|
int *insertion_point)
|
int *insertion_point)
|
{
|
{
|
int pos, low, high;
|
int pos, low, high;
|
|
|
if (var->onepart)
|
if (var->onepart)
|
{
|
{
|
if (offset != 0)
|
if (offset != 0)
|
return -1;
|
return -1;
|
|
|
if (insertion_point)
|
if (insertion_point)
|
*insertion_point = 0;
|
*insertion_point = 0;
|
|
|
return var->n_var_parts - 1;
|
return var->n_var_parts - 1;
|
}
|
}
|
|
|
/* Find the location part. */
|
/* Find the location part. */
|
low = 0;
|
low = 0;
|
high = var->n_var_parts;
|
high = var->n_var_parts;
|
while (low != high)
|
while (low != high)
|
{
|
{
|
pos = (low + high) / 2;
|
pos = (low + high) / 2;
|
if (VAR_PART_OFFSET (var, pos) < offset)
|
if (VAR_PART_OFFSET (var, pos) < offset)
|
low = pos + 1;
|
low = pos + 1;
|
else
|
else
|
high = pos;
|
high = pos;
|
}
|
}
|
pos = low;
|
pos = low;
|
|
|
if (insertion_point)
|
if (insertion_point)
|
*insertion_point = pos;
|
*insertion_point = pos;
|
|
|
if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
|
if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
|
return pos;
|
return pos;
|
|
|
return -1;
|
return -1;
|
}
|
}
|
|
|
static void **
|
static void **
|
set_slot_part (dataflow_set *set, rtx loc, void **slot,
|
set_slot_part (dataflow_set *set, rtx loc, void **slot,
|
decl_or_value dv, HOST_WIDE_INT offset,
|
decl_or_value dv, HOST_WIDE_INT offset,
|
enum var_init_status initialized, rtx set_src)
|
enum var_init_status initialized, rtx set_src)
|
{
|
{
|
int pos;
|
int pos;
|
location_chain node, next;
|
location_chain node, next;
|
location_chain *nextp;
|
location_chain *nextp;
|
variable var;
|
variable var;
|
onepart_enum_t onepart;
|
onepart_enum_t onepart;
|
|
|
var = (variable) *slot;
|
var = (variable) *slot;
|
|
|
if (var)
|
if (var)
|
onepart = var->onepart;
|
onepart = var->onepart;
|
else
|
else
|
onepart = dv_onepart_p (dv);
|
onepart = dv_onepart_p (dv);
|
|
|
gcc_checking_assert (offset == 0 || !onepart);
|
gcc_checking_assert (offset == 0 || !onepart);
|
gcc_checking_assert (loc != dv_as_opaque (dv));
|
gcc_checking_assert (loc != dv_as_opaque (dv));
|
|
|
if (! flag_var_tracking_uninit)
|
if (! flag_var_tracking_uninit)
|
initialized = VAR_INIT_STATUS_INITIALIZED;
|
initialized = VAR_INIT_STATUS_INITIALIZED;
|
|
|
if (!var)
|
if (!var)
|
{
|
{
|
/* Create new variable information. */
|
/* Create new variable information. */
|
var = (variable) pool_alloc (onepart_pool (onepart));
|
var = (variable) pool_alloc (onepart_pool (onepart));
|
var->dv = dv;
|
var->dv = dv;
|
var->refcount = 1;
|
var->refcount = 1;
|
var->n_var_parts = 1;
|
var->n_var_parts = 1;
|
var->onepart = onepart;
|
var->onepart = onepart;
|
var->in_changed_variables = false;
|
var->in_changed_variables = false;
|
if (var->onepart)
|
if (var->onepart)
|
VAR_LOC_1PAUX (var) = NULL;
|
VAR_LOC_1PAUX (var) = NULL;
|
else
|
else
|
VAR_PART_OFFSET (var, 0) = offset;
|
VAR_PART_OFFSET (var, 0) = offset;
|
var->var_part[0].loc_chain = NULL;
|
var->var_part[0].loc_chain = NULL;
|
var->var_part[0].cur_loc = NULL;
|
var->var_part[0].cur_loc = NULL;
|
*slot = var;
|
*slot = var;
|
pos = 0;
|
pos = 0;
|
nextp = &var->var_part[0].loc_chain;
|
nextp = &var->var_part[0].loc_chain;
|
}
|
}
|
else if (onepart)
|
else if (onepart)
|
{
|
{
|
int r = -1, c = 0;
|
int r = -1, c = 0;
|
|
|
gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
|
gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
|
|
|
pos = 0;
|
pos = 0;
|
|
|
if (GET_CODE (loc) == VALUE)
|
if (GET_CODE (loc) == VALUE)
|
{
|
{
|
for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
|
for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
|
nextp = &node->next)
|
nextp = &node->next)
|
if (GET_CODE (node->loc) == VALUE)
|
if (GET_CODE (node->loc) == VALUE)
|
{
|
{
|
if (node->loc == loc)
|
if (node->loc == loc)
|
{
|
{
|
r = 0;
|
r = 0;
|
break;
|
break;
|
}
|
}
|
if (canon_value_cmp (node->loc, loc))
|
if (canon_value_cmp (node->loc, loc))
|
c++;
|
c++;
|
else
|
else
|
{
|
{
|
r = 1;
|
r = 1;
|
break;
|
break;
|
}
|
}
|
}
|
}
|
else if (REG_P (node->loc) || MEM_P (node->loc))
|
else if (REG_P (node->loc) || MEM_P (node->loc))
|
c++;
|
c++;
|
else
|
else
|
{
|
{
|
r = 1;
|
r = 1;
|
break;
|
break;
|
}
|
}
|
}
|
}
|
else if (REG_P (loc))
|
else if (REG_P (loc))
|
{
|
{
|
for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
|
for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
|
nextp = &node->next)
|
nextp = &node->next)
|
if (REG_P (node->loc))
|
if (REG_P (node->loc))
|
{
|
{
|
if (REGNO (node->loc) < REGNO (loc))
|
if (REGNO (node->loc) < REGNO (loc))
|
c++;
|
c++;
|
else
|
else
|
{
|
{
|
if (REGNO (node->loc) == REGNO (loc))
|
if (REGNO (node->loc) == REGNO (loc))
|
r = 0;
|
r = 0;
|
else
|
else
|
r = 1;
|
r = 1;
|
break;
|
break;
|
}
|
}
|
}
|
}
|
else
|
else
|
{
|
{
|
r = 1;
|
r = 1;
|
break;
|
break;
|
}
|
}
|
}
|
}
|
else if (MEM_P (loc))
|
else if (MEM_P (loc))
|
{
|
{
|
for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
|
for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
|
nextp = &node->next)
|
nextp = &node->next)
|
if (REG_P (node->loc))
|
if (REG_P (node->loc))
|
c++;
|
c++;
|
else if (MEM_P (node->loc))
|
else if (MEM_P (node->loc))
|
{
|
{
|
if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
|
if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
|
break;
|
break;
|
else
|
else
|
c++;
|
c++;
|
}
|
}
|
else
|
else
|
{
|
{
|
r = 1;
|
r = 1;
|
break;
|
break;
|
}
|
}
|
}
|
}
|
else
|
else
|
for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
|
for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
|
nextp = &node->next)
|
nextp = &node->next)
|
if ((r = loc_cmp (node->loc, loc)) >= 0)
|
if ((r = loc_cmp (node->loc, loc)) >= 0)
|
break;
|
break;
|
else
|
else
|
c++;
|
c++;
|
|
|
if (r == 0)
|
if (r == 0)
|
return slot;
|
return slot;
|
|
|
if (shared_var_p (var, set->vars))
|
if (shared_var_p (var, set->vars))
|
{
|
{
|
slot = unshare_variable (set, slot, var, initialized);
|
slot = unshare_variable (set, slot, var, initialized);
|
var = (variable)*slot;
|
var = (variable)*slot;
|
for (nextp = &var->var_part[0].loc_chain; c;
|
for (nextp = &var->var_part[0].loc_chain; c;
|
nextp = &(*nextp)->next)
|
nextp = &(*nextp)->next)
|
c--;
|
c--;
|
gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
|
gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
|
}
|
}
|
}
|
}
|
else
|
else
|
{
|
{
|
int inspos = 0;
|
int inspos = 0;
|
|
|
gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
|
gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
|
|
|
pos = find_variable_location_part (var, offset, &inspos);
|
pos = find_variable_location_part (var, offset, &inspos);
|
|
|
if (pos >= 0)
|
if (pos >= 0)
|
{
|
{
|
node = var->var_part[pos].loc_chain;
|
node = var->var_part[pos].loc_chain;
|
|
|
if (node
|
if (node
|
&& ((REG_P (node->loc) && REG_P (loc)
|
&& ((REG_P (node->loc) && REG_P (loc)
|
&& REGNO (node->loc) == REGNO (loc))
|
&& REGNO (node->loc) == REGNO (loc))
|
|| rtx_equal_p (node->loc, loc)))
|
|| rtx_equal_p (node->loc, loc)))
|
{
|
{
|
/* LOC is in the beginning of the chain so we have nothing
|
/* LOC is in the beginning of the chain so we have nothing
|
to do. */
|
to do. */
|
if (node->init < initialized)
|
if (node->init < initialized)
|
node->init = initialized;
|
node->init = initialized;
|
if (set_src != NULL)
|
if (set_src != NULL)
|
node->set_src = set_src;
|
node->set_src = set_src;
|
|
|
return slot;
|
return slot;
|
}
|
}
|
else
|
else
|
{
|
{
|
/* We have to make a copy of a shared variable. */
|
/* We have to make a copy of a shared variable. */
|
if (shared_var_p (var, set->vars))
|
if (shared_var_p (var, set->vars))
|
{
|
{
|
slot = unshare_variable (set, slot, var, initialized);
|
slot = unshare_variable (set, slot, var, initialized);
|
var = (variable)*slot;
|
var = (variable)*slot;
|
}
|
}
|
}
|
}
|
}
|
}
|
else
|
else
|
{
|
{
|
/* We have not found the location part, new one will be created. */
|
/* We have not found the location part, new one will be created. */
|
|
|
/* We have to make a copy of the shared variable. */
|
/* We have to make a copy of the shared variable. */
|
if (shared_var_p (var, set->vars))
|
if (shared_var_p (var, set->vars))
|
{
|
{
|
slot = unshare_variable (set, slot, var, initialized);
|
slot = unshare_variable (set, slot, var, initialized);
|
var = (variable)*slot;
|
var = (variable)*slot;
|
}
|
}
|
|
|
/* We track only variables whose size is <= MAX_VAR_PARTS bytes
|
/* We track only variables whose size is <= MAX_VAR_PARTS bytes
|
thus there are at most MAX_VAR_PARTS different offsets. */
|
thus there are at most MAX_VAR_PARTS different offsets. */
|
gcc_assert (var->n_var_parts < MAX_VAR_PARTS
|
gcc_assert (var->n_var_parts < MAX_VAR_PARTS
|
&& (!var->n_var_parts || !onepart));
|
&& (!var->n_var_parts || !onepart));
|
|
|
/* We have to move the elements of array starting at index
|
/* We have to move the elements of array starting at index
|
inspos to the next position. */
|
inspos to the next position. */
|
for (pos = var->n_var_parts; pos > inspos; pos--)
|
for (pos = var->n_var_parts; pos > inspos; pos--)
|
var->var_part[pos] = var->var_part[pos - 1];
|
var->var_part[pos] = var->var_part[pos - 1];
|
|
|
var->n_var_parts++;
|
var->n_var_parts++;
|
gcc_checking_assert (!onepart);
|
gcc_checking_assert (!onepart);
|
VAR_PART_OFFSET (var, pos) = offset;
|
VAR_PART_OFFSET (var, pos) = offset;
|
var->var_part[pos].loc_chain = NULL;
|
var->var_part[pos].loc_chain = NULL;
|
var->var_part[pos].cur_loc = NULL;
|
var->var_part[pos].cur_loc = NULL;
|
}
|
}
|
|
|
/* Delete the location from the list. */
|
/* Delete the location from the list. */
|
nextp = &var->var_part[pos].loc_chain;
|
nextp = &var->var_part[pos].loc_chain;
|
for (node = var->var_part[pos].loc_chain; node; node = next)
|
for (node = var->var_part[pos].loc_chain; node; node = next)
|
{
|
{
|
next = node->next;
|
next = node->next;
|
if ((REG_P (node->loc) && REG_P (loc)
|
if ((REG_P (node->loc) && REG_P (loc)
|
&& REGNO (node->loc) == REGNO (loc))
|
&& REGNO (node->loc) == REGNO (loc))
|
|| rtx_equal_p (node->loc, loc))
|
|| rtx_equal_p (node->loc, loc))
|
{
|
{
|
/* Save these values, to assign to the new node, before
|
/* Save these values, to assign to the new node, before
|
deleting this one. */
|
deleting this one. */
|
if (node->init > initialized)
|
if (node->init > initialized)
|
initialized = node->init;
|
initialized = node->init;
|
if (node->set_src != NULL && set_src == NULL)
|
if (node->set_src != NULL && set_src == NULL)
|
set_src = node->set_src;
|
set_src = node->set_src;
|
if (var->var_part[pos].cur_loc == node->loc)
|
if (var->var_part[pos].cur_loc == node->loc)
|
var->var_part[pos].cur_loc = NULL;
|
var->var_part[pos].cur_loc = NULL;
|
pool_free (loc_chain_pool, node);
|
pool_free (loc_chain_pool, node);
|
*nextp = next;
|
*nextp = next;
|
break;
|
break;
|
}
|
}
|
else
|
else
|
nextp = &node->next;
|
nextp = &node->next;
|
}
|
}
|
|
|
nextp = &var->var_part[pos].loc_chain;
|
nextp = &var->var_part[pos].loc_chain;
|
}
|
}
|
|
|
/* Add the location to the beginning. */
|
/* Add the location to the beginning. */
|
node = (location_chain) pool_alloc (loc_chain_pool);
|
node = (location_chain) pool_alloc (loc_chain_pool);
|
node->loc = loc;
|
node->loc = loc;
|
node->init = initialized;
|
node->init = initialized;
|
node->set_src = set_src;
|
node->set_src = set_src;
|
node->next = *nextp;
|
node->next = *nextp;
|
*nextp = node;
|
*nextp = node;
|
|
|
/* If no location was emitted do so. */
|
/* If no location was emitted do so. */
|
if (var->var_part[pos].cur_loc == NULL)
|
if (var->var_part[pos].cur_loc == NULL)
|
variable_was_changed (var, set);
|
variable_was_changed (var, set);
|
|
|
return slot;
|
return slot;
|
}
|
}
|
|
|
/* Set the part of variable's location in the dataflow set SET. The
|
/* Set the part of variable's location in the dataflow set SET. The
|
variable part is specified by variable's declaration in DV and
|
variable part is specified by variable's declaration in DV and
|
offset OFFSET and the part's location by LOC. IOPT should be
|
offset OFFSET and the part's location by LOC. IOPT should be
|
NO_INSERT if the variable is known to be in SET already and the
|
NO_INSERT if the variable is known to be in SET already and the
|
variable hash table must not be resized, and INSERT otherwise. */
|
variable hash table must not be resized, and INSERT otherwise. */
|
|
|
static void
|
static void
|
set_variable_part (dataflow_set *set, rtx loc,
|
set_variable_part (dataflow_set *set, rtx loc,
|
decl_or_value dv, HOST_WIDE_INT offset,
|
decl_or_value dv, HOST_WIDE_INT offset,
|
enum var_init_status initialized, rtx set_src,
|
enum var_init_status initialized, rtx set_src,
|
enum insert_option iopt)
|
enum insert_option iopt)
|
{
|
{
|
void **slot;
|
void **slot;
|
|
|
if (iopt == NO_INSERT)
|
if (iopt == NO_INSERT)
|
slot = shared_hash_find_slot_noinsert (set->vars, dv);
|
slot = shared_hash_find_slot_noinsert (set->vars, dv);
|
else
|
else
|
{
|
{
|
slot = shared_hash_find_slot (set->vars, dv);
|
slot = shared_hash_find_slot (set->vars, dv);
|
if (!slot)
|
if (!slot)
|
slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
|
slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
|
}
|
}
|
set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
|
set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
|
}
|
}
|
|
|
/* Remove all recorded register locations for the given variable part
|
/* Remove all recorded register locations for the given variable part
|
from dataflow set SET, except for those that are identical to loc.
|
from dataflow set SET, except for those that are identical to loc.
|
The variable part is specified by variable's declaration or value
|
The variable part is specified by variable's declaration or value
|
DV and offset OFFSET. */
|
DV and offset OFFSET. */
|
|
|
static void **
|
static void **
|
clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
|
clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
|
HOST_WIDE_INT offset, rtx set_src)
|
HOST_WIDE_INT offset, rtx set_src)
|
{
|
{
|
variable var = (variable) *slot;
|
variable var = (variable) *slot;
|
int pos = find_variable_location_part (var, offset, NULL);
|
int pos = find_variable_location_part (var, offset, NULL);
|
|
|
if (pos >= 0)
|
if (pos >= 0)
|
{
|
{
|
location_chain node, next;
|
location_chain node, next;
|
|
|
/* Remove the register locations from the dataflow set. */
|
/* Remove the register locations from the dataflow set. */
|
next = var->var_part[pos].loc_chain;
|
next = var->var_part[pos].loc_chain;
|
for (node = next; node; node = next)
|
for (node = next; node; node = next)
|
{
|
{
|
next = node->next;
|
next = node->next;
|
if (node->loc != loc
|
if (node->loc != loc
|
&& (!flag_var_tracking_uninit
|
&& (!flag_var_tracking_uninit
|
|| !set_src
|
|| !set_src
|
|| MEM_P (set_src)
|
|| MEM_P (set_src)
|
|| !rtx_equal_p (set_src, node->set_src)))
|
|| !rtx_equal_p (set_src, node->set_src)))
|
{
|
{
|
if (REG_P (node->loc))
|
if (REG_P (node->loc))
|
{
|
{
|
attrs anode, anext;
|
attrs anode, anext;
|
attrs *anextp;
|
attrs *anextp;
|
|
|
/* Remove the variable part from the register's
|
/* Remove the variable part from the register's
|
list, but preserve any other variable parts
|
list, but preserve any other variable parts
|
that might be regarded as live in that same
|
that might be regarded as live in that same
|
register. */
|
register. */
|
anextp = &set->regs[REGNO (node->loc)];
|
anextp = &set->regs[REGNO (node->loc)];
|
for (anode = *anextp; anode; anode = anext)
|
for (anode = *anextp; anode; anode = anext)
|
{
|
{
|
anext = anode->next;
|
anext = anode->next;
|
if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
|
if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
|
&& anode->offset == offset)
|
&& anode->offset == offset)
|
{
|
{
|
pool_free (attrs_pool, anode);
|
pool_free (attrs_pool, anode);
|
*anextp = anext;
|
*anextp = anext;
|
}
|
}
|
else
|
else
|
anextp = &anode->next;
|
anextp = &anode->next;
|
}
|
}
|
}
|
}
|
|
|
slot = delete_slot_part (set, node->loc, slot, offset);
|
slot = delete_slot_part (set, node->loc, slot, offset);
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
return slot;
|
return slot;
|
}
|
}
|
|
|
/* Remove all recorded register locations for the given variable part
|
/* Remove all recorded register locations for the given variable part
|
from dataflow set SET, except for those that are identical to loc.
|
from dataflow set SET, except for those that are identical to loc.
|
The variable part is specified by variable's declaration or value
|
The variable part is specified by variable's declaration or value
|
DV and offset OFFSET. */
|
DV and offset OFFSET. */
|
|
|
static void
|
static void
|
clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
|
clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
|
HOST_WIDE_INT offset, rtx set_src)
|
HOST_WIDE_INT offset, rtx set_src)
|
{
|
{
|
void **slot;
|
void **slot;
|
|
|
if (!dv_as_opaque (dv)
|
if (!dv_as_opaque (dv)
|
|| (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
|
|| (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
|
return;
|
return;
|
|
|
slot = shared_hash_find_slot_noinsert (set->vars, dv);
|
slot = shared_hash_find_slot_noinsert (set->vars, dv);
|
if (!slot)
|
if (!slot)
|
return;
|
return;
|
|
|
clobber_slot_part (set, loc, slot, offset, set_src);
|
clobber_slot_part (set, loc, slot, offset, set_src);
|
}
|
}
|
|
|
/* Delete the part of variable's location from dataflow set SET. The
|
/* Delete the part of variable's location from dataflow set SET. The
|
variable part is specified by its SET->vars slot SLOT and offset
|
variable part is specified by its SET->vars slot SLOT and offset
|
OFFSET and the part's location by LOC. */
|
OFFSET and the part's location by LOC. */
|
|
|
static void **
|
static void **
|
delete_slot_part (dataflow_set *set, rtx loc, void **slot,
|
delete_slot_part (dataflow_set *set, rtx loc, void **slot,
|
HOST_WIDE_INT offset)
|
HOST_WIDE_INT offset)
|
{
|
{
|
variable var = (variable) *slot;
|
variable var = (variable) *slot;
|
int pos = find_variable_location_part (var, offset, NULL);
|
int pos = find_variable_location_part (var, offset, NULL);
|
|
|
if (pos >= 0)
|
if (pos >= 0)
|
{
|
{
|
location_chain node, next;
|
location_chain node, next;
|
location_chain *nextp;
|
location_chain *nextp;
|
bool changed;
|
bool changed;
|
rtx cur_loc;
|
rtx cur_loc;
|
|
|
if (shared_var_p (var, set->vars))
|
if (shared_var_p (var, set->vars))
|
{
|
{
|
/* If the variable contains the location part we have to
|
/* If the variable contains the location part we have to
|
make a copy of the variable. */
|
make a copy of the variable. */
|
for (node = var->var_part[pos].loc_chain; node;
|
for (node = var->var_part[pos].loc_chain; node;
|
node = node->next)
|
node = node->next)
|
{
|
{
|
if ((REG_P (node->loc) && REG_P (loc)
|
if ((REG_P (node->loc) && REG_P (loc)
|
&& REGNO (node->loc) == REGNO (loc))
|
&& REGNO (node->loc) == REGNO (loc))
|
|| rtx_equal_p (node->loc, loc))
|
|| rtx_equal_p (node->loc, loc))
|
{
|
{
|
slot = unshare_variable (set, slot, var,
|
slot = unshare_variable (set, slot, var,
|
VAR_INIT_STATUS_UNKNOWN);
|
VAR_INIT_STATUS_UNKNOWN);
|
var = (variable)*slot;
|
var = (variable)*slot;
|
break;
|
break;
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
|
if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
|
cur_loc = VAR_LOC_FROM (var);
|
cur_loc = VAR_LOC_FROM (var);
|
else
|
else
|
cur_loc = var->var_part[pos].cur_loc;
|
cur_loc = var->var_part[pos].cur_loc;
|
|
|
/* Delete the location part. */
|
/* Delete the location part. */
|
changed = false;
|
changed = false;
|
nextp = &var->var_part[pos].loc_chain;
|
nextp = &var->var_part[pos].loc_chain;
|
for (node = *nextp; node; node = next)
|
for (node = *nextp; node; node = next)
|
{
|
{
|
next = node->next;
|
next = node->next;
|
if ((REG_P (node->loc) && REG_P (loc)
|
if ((REG_P (node->loc) && REG_P (loc)
|
&& REGNO (node->loc) == REGNO (loc))
|
&& REGNO (node->loc) == REGNO (loc))
|
|| rtx_equal_p (node->loc, loc))
|
|| rtx_equal_p (node->loc, loc))
|
{
|
{
|
/* If we have deleted the location which was last emitted
|
/* If we have deleted the location which was last emitted
|
we have to emit new location so add the variable to set
|
we have to emit new location so add the variable to set
|
of changed variables. */
|
of changed variables. */
|
if (cur_loc == node->loc)
|
if (cur_loc == node->loc)
|
{
|
{
|
changed = true;
|
changed = true;
|
var->var_part[pos].cur_loc = NULL;
|
var->var_part[pos].cur_loc = NULL;
|
if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
|
if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
|
VAR_LOC_FROM (var) = NULL;
|
VAR_LOC_FROM (var) = NULL;
|
}
|
}
|
pool_free (loc_chain_pool, node);
|
pool_free (loc_chain_pool, node);
|
*nextp = next;
|
*nextp = next;
|
break;
|
break;
|
}
|
}
|
else
|
else
|
nextp = &node->next;
|
nextp = &node->next;
|
}
|
}
|
|
|
if (var->var_part[pos].loc_chain == NULL)
|
if (var->var_part[pos].loc_chain == NULL)
|
{
|
{
|
changed = true;
|
changed = true;
|
var->n_var_parts--;
|
var->n_var_parts--;
|
while (pos < var->n_var_parts)
|
while (pos < var->n_var_parts)
|
{
|
{
|
var->var_part[pos] = var->var_part[pos + 1];
|
var->var_part[pos] = var->var_part[pos + 1];
|
pos++;
|
pos++;
|
}
|
}
|
}
|
}
|
if (changed)
|
if (changed)
|
variable_was_changed (var, set);
|
variable_was_changed (var, set);
|
}
|
}
|
|
|
return slot;
|
return slot;
|
}
|
}
|
|
|
/* Delete the part of variable's location from dataflow set SET. The
|
/* Delete the part of variable's location from dataflow set SET. The
|
variable part is specified by variable's declaration or value DV
|
variable part is specified by variable's declaration or value DV
|
and offset OFFSET and the part's location by LOC. */
|
and offset OFFSET and the part's location by LOC. */
|
|
|
static void
|
static void
|
delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
|
delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
|
HOST_WIDE_INT offset)
|
HOST_WIDE_INT offset)
|
{
|
{
|
void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
|
void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
|
if (!slot)
|
if (!slot)
|
return;
|
return;
|
|
|
delete_slot_part (set, loc, slot, offset);
|
delete_slot_part (set, loc, slot, offset);
|
}
|
}
|
|
|
DEF_VEC_P (variable);
|
DEF_VEC_P (variable);
|
DEF_VEC_ALLOC_P (variable, heap);
|
DEF_VEC_ALLOC_P (variable, heap);
|
|
|
DEF_VEC_ALLOC_P_STACK (rtx);
|
DEF_VEC_ALLOC_P_STACK (rtx);
|
#define VEC_rtx_stack_alloc(alloc) VEC_stack_alloc (rtx, alloc)
|
#define VEC_rtx_stack_alloc(alloc) VEC_stack_alloc (rtx, alloc)
|
|
|
/* Structure for passing some other parameters to function
|
/* Structure for passing some other parameters to function
|
vt_expand_loc_callback. */
|
vt_expand_loc_callback. */
|
struct expand_loc_callback_data
|
struct expand_loc_callback_data
|
{
|
{
|
/* The variables and values active at this point. */
|
/* The variables and values active at this point. */
|
htab_t vars;
|
htab_t vars;
|
|
|
/* Stack of values and debug_exprs under expansion, and their
|
/* Stack of values and debug_exprs under expansion, and their
|
children. */
|
children. */
|
VEC (rtx, stack) *expanding;
|
VEC (rtx, stack) *expanding;
|
|
|
/* Stack of values and debug_exprs whose expansion hit recursion
|
/* Stack of values and debug_exprs whose expansion hit recursion
|
cycles. They will have VALUE_RECURSED_INTO marked when added to
|
cycles. They will have VALUE_RECURSED_INTO marked when added to
|
this list. This flag will be cleared if any of its dependencies
|
this list. This flag will be cleared if any of its dependencies
|
resolves to a valid location. So, if the flag remains set at the
|
resolves to a valid location. So, if the flag remains set at the
|
end of the search, we know no valid location for this one can
|
end of the search, we know no valid location for this one can
|
possibly exist. */
|
possibly exist. */
|
VEC (rtx, stack) *pending;
|
VEC (rtx, stack) *pending;
|
|
|
/* The maximum depth among the sub-expressions under expansion.
|
/* The maximum depth among the sub-expressions under expansion.
|
Zero indicates no expansion so far. */
|
Zero indicates no expansion so far. */
|
int depth;
|
int depth;
|
};
|
};
|
|
|
/* Allocate the one-part auxiliary data structure for VAR, with enough
|
/* Allocate the one-part auxiliary data structure for VAR, with enough
|
room for COUNT dependencies. */
|
room for COUNT dependencies. */
|
|
|
static void
|
static void
|
loc_exp_dep_alloc (variable var, int count)
|
loc_exp_dep_alloc (variable var, int count)
|
{
|
{
|
size_t allocsize;
|
size_t allocsize;
|
|
|
gcc_checking_assert (var->onepart);
|
gcc_checking_assert (var->onepart);
|
|
|
/* We can be called with COUNT == 0 to allocate the data structure
|
/* We can be called with COUNT == 0 to allocate the data structure
|
without any dependencies, e.g. for the backlinks only. However,
|
without any dependencies, e.g. for the backlinks only. However,
|
if we are specifying a COUNT, then the dependency list must have
|
if we are specifying a COUNT, then the dependency list must have
|
been emptied before. It would be possible to adjust pointers or
|
been emptied before. It would be possible to adjust pointers or
|
force it empty here, but this is better done at an earlier point
|
force it empty here, but this is better done at an earlier point
|
in the algorithm, so we instead leave an assertion to catch
|
in the algorithm, so we instead leave an assertion to catch
|
errors. */
|
errors. */
|
gcc_checking_assert (!count
|
gcc_checking_assert (!count
|
|| VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)));
|
|| VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)));
|
|
|
if (VAR_LOC_1PAUX (var)
|
if (VAR_LOC_1PAUX (var)
|
&& VEC_space (loc_exp_dep, VAR_LOC_DEP_VEC (var), count))
|
&& VEC_space (loc_exp_dep, VAR_LOC_DEP_VEC (var), count))
|
return;
|
return;
|
|
|
allocsize = offsetof (struct onepart_aux, deps)
|
allocsize = offsetof (struct onepart_aux, deps)
|
+ VEC_embedded_size (loc_exp_dep, count);
|
+ VEC_embedded_size (loc_exp_dep, count);
|
|
|
if (VAR_LOC_1PAUX (var))
|
if (VAR_LOC_1PAUX (var))
|
{
|
{
|
VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
|
VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
|
VAR_LOC_1PAUX (var), allocsize);
|
VAR_LOC_1PAUX (var), allocsize);
|
/* If the reallocation moves the onepaux structure, the
|
/* If the reallocation moves the onepaux structure, the
|
back-pointer to BACKLINKS in the first list member will still
|
back-pointer to BACKLINKS in the first list member will still
|
point to its old location. Adjust it. */
|
point to its old location. Adjust it. */
|
if (VAR_LOC_DEP_LST (var))
|
if (VAR_LOC_DEP_LST (var))
|
VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
|
VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
|
}
|
}
|
else
|
else
|
{
|
{
|
VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
|
VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
|
*VAR_LOC_DEP_LSTP (var) = NULL;
|
*VAR_LOC_DEP_LSTP (var) = NULL;
|
VAR_LOC_FROM (var) = NULL;
|
VAR_LOC_FROM (var) = NULL;
|
VAR_LOC_DEPTH (var) = 0;
|
VAR_LOC_DEPTH (var) = 0;
|
}
|
}
|
VEC_embedded_init (loc_exp_dep, VAR_LOC_DEP_VEC (var), count);
|
VEC_embedded_init (loc_exp_dep, VAR_LOC_DEP_VEC (var), count);
|
}
|
}
|
|
|
/* Remove all entries from the vector of active dependencies of VAR,
|
/* Remove all entries from the vector of active dependencies of VAR,
|
removing them from the back-links lists too. */
|
removing them from the back-links lists too. */
|
|
|
static void
|
static void
|
loc_exp_dep_clear (variable var)
|
loc_exp_dep_clear (variable var)
|
{
|
{
|
while (!VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)))
|
while (!VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)))
|
{
|
{
|
loc_exp_dep *led = VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
|
loc_exp_dep *led = VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
|
if (led->next)
|
if (led->next)
|
led->next->pprev = led->pprev;
|
led->next->pprev = led->pprev;
|
if (led->pprev)
|
if (led->pprev)
|
*led->pprev = led->next;
|
*led->pprev = led->next;
|
VEC_pop (loc_exp_dep, VAR_LOC_DEP_VEC (var));
|
VEC_pop (loc_exp_dep, VAR_LOC_DEP_VEC (var));
|
}
|
}
|
}
|
}
|
|
|
/* Insert an active dependency from VAR on X to the vector of
|
/* Insert an active dependency from VAR on X to the vector of
|
dependencies, and add the corresponding back-link to X's list of
|
dependencies, and add the corresponding back-link to X's list of
|
back-links in VARS. */
|
back-links in VARS. */
|
|
|
static void
|
static void
|
loc_exp_insert_dep (variable var, rtx x, htab_t vars)
|
loc_exp_insert_dep (variable var, rtx x, htab_t vars)
|
{
|
{
|
decl_or_value dv;
|
decl_or_value dv;
|
variable xvar;
|
variable xvar;
|
loc_exp_dep *led;
|
loc_exp_dep *led;
|
|
|
dv = dv_from_rtx (x);
|
dv = dv_from_rtx (x);
|
|
|
/* ??? Build a vector of variables parallel to EXPANDING, to avoid
|
/* ??? Build a vector of variables parallel to EXPANDING, to avoid
|
an additional look up? */
|
an additional look up? */
|
xvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
|
xvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
|
|
|
if (!xvar)
|
if (!xvar)
|
{
|
{
|
xvar = variable_from_dropped (dv, NO_INSERT);
|
xvar = variable_from_dropped (dv, NO_INSERT);
|
gcc_checking_assert (xvar);
|
gcc_checking_assert (xvar);
|
}
|
}
|
|
|
/* No point in adding the same backlink more than once. This may
|
/* No point in adding the same backlink more than once. This may
|
arise if say the same value appears in two complex expressions in
|
arise if say the same value appears in two complex expressions in
|
the same loc_list, or even more than once in a single
|
the same loc_list, or even more than once in a single
|
expression. */
|
expression. */
|
if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
|
if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
|
return;
|
return;
|
|
|
VEC_quick_push (loc_exp_dep, VAR_LOC_DEP_VEC (var), NULL);
|
VEC_quick_push (loc_exp_dep, VAR_LOC_DEP_VEC (var), NULL);
|
led = VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
|
led = VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
|
led->dv = var->dv;
|
led->dv = var->dv;
|
led->value = x;
|
led->value = x;
|
|
|
loc_exp_dep_alloc (xvar, 0);
|
loc_exp_dep_alloc (xvar, 0);
|
led->pprev = VAR_LOC_DEP_LSTP (xvar);
|
led->pprev = VAR_LOC_DEP_LSTP (xvar);
|
led->next = *led->pprev;
|
led->next = *led->pprev;
|
if (led->next)
|
if (led->next)
|
led->next->pprev = &led->next;
|
led->next->pprev = &led->next;
|
*led->pprev = led;
|
*led->pprev = led;
|
}
|
}
|
|
|
/* Create active dependencies of VAR on COUNT values starting at
|
/* Create active dependencies of VAR on COUNT values starting at
|
VALUE, and corresponding back-links to the entries in VARS. Return
|
VALUE, and corresponding back-links to the entries in VARS. Return
|
true if we found any pending-recursion results. */
|
true if we found any pending-recursion results. */
|
|
|
static bool
|
static bool
|
loc_exp_dep_set (variable var, rtx result, rtx *value, int count, htab_t vars)
|
loc_exp_dep_set (variable var, rtx result, rtx *value, int count, htab_t vars)
|
{
|
{
|
bool pending_recursion = false;
|
bool pending_recursion = false;
|
|
|
gcc_checking_assert (VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)));
|
gcc_checking_assert (VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)));
|
|
|
/* Set up all dependencies from last_child (as set up at the end of
|
/* Set up all dependencies from last_child (as set up at the end of
|
the loop above) to the end. */
|
the loop above) to the end. */
|
loc_exp_dep_alloc (var, count);
|
loc_exp_dep_alloc (var, count);
|
|
|
while (count--)
|
while (count--)
|
{
|
{
|
rtx x = *value++;
|
rtx x = *value++;
|
|
|
if (!pending_recursion)
|
if (!pending_recursion)
|
pending_recursion = !result && VALUE_RECURSED_INTO (x);
|
pending_recursion = !result && VALUE_RECURSED_INTO (x);
|
|
|
loc_exp_insert_dep (var, x, vars);
|
loc_exp_insert_dep (var, x, vars);
|
}
|
}
|
|
|
return pending_recursion;
|
return pending_recursion;
|
}
|
}
|
|
|
/* Notify the back-links of IVAR that are pending recursion that we
|
/* Notify the back-links of IVAR that are pending recursion that we
|
have found a non-NIL value for it, so they are cleared for another
|
have found a non-NIL value for it, so they are cleared for another
|
attempt to compute a current location. */
|
attempt to compute a current location. */
|
|
|
static void
|
static void
|
notify_dependents_of_resolved_value (variable ivar, htab_t vars)
|
notify_dependents_of_resolved_value (variable ivar, htab_t vars)
|
{
|
{
|
loc_exp_dep *led, *next;
|
loc_exp_dep *led, *next;
|
|
|
for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
|
for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
|
{
|
{
|
decl_or_value dv = led->dv;
|
decl_or_value dv = led->dv;
|
variable var;
|
variable var;
|
|
|
next = led->next;
|
next = led->next;
|
|
|
if (dv_is_value_p (dv))
|
if (dv_is_value_p (dv))
|
{
|
{
|
rtx value = dv_as_value (dv);
|
rtx value = dv_as_value (dv);
|
|
|
/* If we have already resolved it, leave it alone. */
|
/* If we have already resolved it, leave it alone. */
|
if (!VALUE_RECURSED_INTO (value))
|
if (!VALUE_RECURSED_INTO (value))
|
continue;
|
continue;
|
|
|
/* Check that VALUE_RECURSED_INTO, true from the test above,
|
/* Check that VALUE_RECURSED_INTO, true from the test above,
|
implies NO_LOC_P. */
|
implies NO_LOC_P. */
|
gcc_checking_assert (NO_LOC_P (value));
|
gcc_checking_assert (NO_LOC_P (value));
|
|
|
/* We won't notify variables that are being expanded,
|
/* We won't notify variables that are being expanded,
|
because their dependency list is cleared before
|
because their dependency list is cleared before
|
recursing. */
|
recursing. */
|
NO_LOC_P (value) = false;
|
NO_LOC_P (value) = false;
|
VALUE_RECURSED_INTO (value) = false;
|
VALUE_RECURSED_INTO (value) = false;
|
|
|
gcc_checking_assert (dv_changed_p (dv));
|
gcc_checking_assert (dv_changed_p (dv));
|
}
|
}
|
else if (!dv_changed_p (dv))
|
else if (!dv_changed_p (dv))
|
continue;
|
continue;
|
|
|
var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
|
var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
|
|
|
if (!var)
|
if (!var)
|
var = variable_from_dropped (dv, NO_INSERT);
|
var = variable_from_dropped (dv, NO_INSERT);
|
|
|
if (var)
|
if (var)
|
notify_dependents_of_resolved_value (var, vars);
|
notify_dependents_of_resolved_value (var, vars);
|
|
|
if (next)
|
if (next)
|
next->pprev = led->pprev;
|
next->pprev = led->pprev;
|
if (led->pprev)
|
if (led->pprev)
|
*led->pprev = next;
|
*led->pprev = next;
|
led->next = NULL;
|
led->next = NULL;
|
led->pprev = NULL;
|
led->pprev = NULL;
|
}
|
}
|
}
|
}
|
|
|
static rtx vt_expand_loc_callback (rtx x, bitmap regs,
|
static rtx vt_expand_loc_callback (rtx x, bitmap regs,
|
int max_depth, void *data);
|
int max_depth, void *data);
|
|
|
/* Return the combined depth, when one sub-expression evaluated to
|
/* Return the combined depth, when one sub-expression evaluated to
|
BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
|
BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
|
|
|
static inline int
|
static inline int
|
update_depth (int saved_depth, int best_depth)
|
update_depth (int saved_depth, int best_depth)
|
{
|
{
|
/* If we didn't find anything, stick with what we had. */
|
/* If we didn't find anything, stick with what we had. */
|
if (!best_depth)
|
if (!best_depth)
|
return saved_depth;
|
return saved_depth;
|
|
|
/* If we found hadn't found anything, use the depth of the current
|
/* If we found hadn't found anything, use the depth of the current
|
expression. Do NOT add one extra level, we want to compute the
|
expression. Do NOT add one extra level, we want to compute the
|
maximum depth among sub-expressions. We'll increment it later,
|
maximum depth among sub-expressions. We'll increment it later,
|
if appropriate. */
|
if appropriate. */
|
if (!saved_depth)
|
if (!saved_depth)
|
return best_depth;
|
return best_depth;
|
|
|
if (saved_depth < best_depth)
|
if (saved_depth < best_depth)
|
return best_depth;
|
return best_depth;
|
else
|
else
|
return saved_depth;
|
return saved_depth;
|
}
|
}
|
|
|
/* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
|
/* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
|
DATA for cselib expand callback. If PENDRECP is given, indicate in
|
DATA for cselib expand callback. If PENDRECP is given, indicate in
|
it whether any sub-expression couldn't be fully evaluated because
|
it whether any sub-expression couldn't be fully evaluated because
|
it is pending recursion resolution. */
|
it is pending recursion resolution. */
|
|
|
static inline rtx
|
static inline rtx
|
vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
|
vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
|
{
|
{
|
struct expand_loc_callback_data *elcd
|
struct expand_loc_callback_data *elcd
|
= (struct expand_loc_callback_data *) data;
|
= (struct expand_loc_callback_data *) data;
|
location_chain loc, next;
|
location_chain loc, next;
|
rtx result = NULL;
|
rtx result = NULL;
|
int first_child, result_first_child, last_child;
|
int first_child, result_first_child, last_child;
|
bool pending_recursion;
|
bool pending_recursion;
|
rtx loc_from = NULL;
|
rtx loc_from = NULL;
|
struct elt_loc_list *cloc = NULL;
|
struct elt_loc_list *cloc = NULL;
|
int depth = 0, saved_depth = elcd->depth;
|
int depth = 0, saved_depth = elcd->depth;
|
|
|
/* Clear all backlinks pointing at this, so that we're not notified
|
/* Clear all backlinks pointing at this, so that we're not notified
|
while we're active. */
|
while we're active. */
|
loc_exp_dep_clear (var);
|
loc_exp_dep_clear (var);
|
|
|
if (var->onepart == ONEPART_VALUE)
|
if (var->onepart == ONEPART_VALUE)
|
{
|
{
|
cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
|
cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
|
|
|
gcc_checking_assert (cselib_preserved_value_p (val));
|
gcc_checking_assert (cselib_preserved_value_p (val));
|
|
|
cloc = val->locs;
|
cloc = val->locs;
|
}
|
}
|
|
|
first_child = result_first_child = last_child
|
first_child = result_first_child = last_child
|
= VEC_length (rtx, elcd->expanding);
|
= VEC_length (rtx, elcd->expanding);
|
|
|
/* Attempt to expand each available location in turn. */
|
/* Attempt to expand each available location in turn. */
|
for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
|
for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
|
loc || cloc; loc = next)
|
loc || cloc; loc = next)
|
{
|
{
|
result_first_child = last_child;
|
result_first_child = last_child;
|
|
|
if (!loc || (GET_CODE (loc->loc) == ENTRY_VALUE && cloc))
|
if (!loc || (GET_CODE (loc->loc) == ENTRY_VALUE && cloc))
|
{
|
{
|
loc_from = cloc->loc;
|
loc_from = cloc->loc;
|
next = loc;
|
next = loc;
|
cloc = cloc->next;
|
cloc = cloc->next;
|
if (unsuitable_loc (loc_from))
|
if (unsuitable_loc (loc_from))
|
continue;
|
continue;
|
}
|
}
|
else
|
else
|
{
|
{
|
loc_from = loc->loc;
|
loc_from = loc->loc;
|
next = loc->next;
|
next = loc->next;
|
}
|
}
|
|
|
gcc_checking_assert (!unsuitable_loc (loc_from));
|
gcc_checking_assert (!unsuitable_loc (loc_from));
|
|
|
elcd->depth = 0;
|
elcd->depth = 0;
|
result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
|
result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
|
vt_expand_loc_callback, data);
|
vt_expand_loc_callback, data);
|
last_child = VEC_length (rtx, elcd->expanding);
|
last_child = VEC_length (rtx, elcd->expanding);
|
|
|
if (result)
|
if (result)
|
{
|
{
|
depth = elcd->depth;
|
depth = elcd->depth;
|
|
|
gcc_checking_assert (depth || result_first_child == last_child);
|
gcc_checking_assert (depth || result_first_child == last_child);
|
|
|
if (last_child - result_first_child != 1)
|
if (last_child - result_first_child != 1)
|
depth++;
|
depth++;
|
|
|
if (depth <= EXPR_USE_DEPTH)
|
if (depth <= EXPR_USE_DEPTH)
|
break;
|
break;
|
|
|
result = NULL;
|
result = NULL;
|
}
|
}
|
|
|
/* Set it up in case we leave the loop. */
|
/* Set it up in case we leave the loop. */
|
depth = 0;
|
depth = 0;
|
loc_from = NULL;
|
loc_from = NULL;
|
result_first_child = first_child;
|
result_first_child = first_child;
|
}
|
}
|
|
|
/* Register all encountered dependencies as active. */
|
/* Register all encountered dependencies as active. */
|
pending_recursion = loc_exp_dep_set
|
pending_recursion = loc_exp_dep_set
|
(var, result, VEC_address (rtx, elcd->expanding) + result_first_child,
|
(var, result, VEC_address (rtx, elcd->expanding) + result_first_child,
|
last_child - result_first_child, elcd->vars);
|
last_child - result_first_child, elcd->vars);
|
|
|
VEC_truncate (rtx, elcd->expanding, first_child);
|
VEC_truncate (rtx, elcd->expanding, first_child);
|
|
|
/* Record where the expansion came from. */
|
/* Record where the expansion came from. */
|
gcc_checking_assert (!result || !pending_recursion);
|
gcc_checking_assert (!result || !pending_recursion);
|
VAR_LOC_FROM (var) = loc_from;
|
VAR_LOC_FROM (var) = loc_from;
|
VAR_LOC_DEPTH (var) = depth;
|
VAR_LOC_DEPTH (var) = depth;
|
|
|
gcc_checking_assert (!depth == !result);
|
gcc_checking_assert (!depth == !result);
|
|
|
elcd->depth = update_depth (saved_depth, depth);
|
elcd->depth = update_depth (saved_depth, depth);
|
|
|
/* Indicate whether any of the dependencies are pending recursion
|
/* Indicate whether any of the dependencies are pending recursion
|
resolution. */
|
resolution. */
|
if (pendrecp)
|
if (pendrecp)
|
*pendrecp = pending_recursion;
|
*pendrecp = pending_recursion;
|
|
|
if (!pendrecp || !pending_recursion)
|
if (!pendrecp || !pending_recursion)
|
var->var_part[0].cur_loc = result;
|
var->var_part[0].cur_loc = result;
|
|
|
return result;
|
return result;
|
}
|
}
|
|
|
/* Callback for cselib_expand_value, that looks for expressions
|
/* Callback for cselib_expand_value, that looks for expressions
|
holding the value in the var-tracking hash tables. Return X for
|
holding the value in the var-tracking hash tables. Return X for
|
standard processing, anything else is to be used as-is. */
|
standard processing, anything else is to be used as-is. */
|
|
|
static rtx
|
static rtx
|
vt_expand_loc_callback (rtx x, bitmap regs,
|
vt_expand_loc_callback (rtx x, bitmap regs,
|
int max_depth ATTRIBUTE_UNUSED,
|
int max_depth ATTRIBUTE_UNUSED,
|
void *data)
|
void *data)
|
{
|
{
|
struct expand_loc_callback_data *elcd
|
struct expand_loc_callback_data *elcd
|
= (struct expand_loc_callback_data *) data;
|
= (struct expand_loc_callback_data *) data;
|
decl_or_value dv;
|
decl_or_value dv;
|
variable var;
|
variable var;
|
rtx result, subreg;
|
rtx result, subreg;
|
bool pending_recursion = false;
|
bool pending_recursion = false;
|
bool from_empty = false;
|
bool from_empty = false;
|
|
|
switch (GET_CODE (x))
|
switch (GET_CODE (x))
|
{
|
{
|
case SUBREG:
|
case SUBREG:
|
subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
|
subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
|
EXPR_DEPTH,
|
EXPR_DEPTH,
|
vt_expand_loc_callback, data);
|
vt_expand_loc_callback, data);
|
|
|
if (!subreg)
|
if (!subreg)
|
return NULL;
|
return NULL;
|
|
|
result = simplify_gen_subreg (GET_MODE (x), subreg,
|
result = simplify_gen_subreg (GET_MODE (x), subreg,
|
GET_MODE (SUBREG_REG (x)),
|
GET_MODE (SUBREG_REG (x)),
|
SUBREG_BYTE (x));
|
SUBREG_BYTE (x));
|
|
|
/* Invalid SUBREGs are ok in debug info. ??? We could try
|
/* Invalid SUBREGs are ok in debug info. ??? We could try
|
alternate expansions for the VALUE as well. */
|
alternate expansions for the VALUE as well. */
|
if (!result)
|
if (!result)
|
result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
|
result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
|
|
|
return result;
|
return result;
|
|
|
case DEBUG_EXPR:
|
case DEBUG_EXPR:
|
case VALUE:
|
case VALUE:
|
dv = dv_from_rtx (x);
|
dv = dv_from_rtx (x);
|
break;
|
break;
|
|
|
default:
|
default:
|
return x;
|
return x;
|
}
|
}
|
|
|
VEC_safe_push (rtx, stack, elcd->expanding, x);
|
VEC_safe_push (rtx, stack, elcd->expanding, x);
|
|
|
/* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
|
/* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
|
gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
|
gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
|
|
|
if (NO_LOC_P (x))
|
if (NO_LOC_P (x))
|
{
|
{
|
gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
|
gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
|
return NULL;
|
return NULL;
|
}
|
}
|
|
|
var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
|
var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
|
|
|
if (!var)
|
if (!var)
|
{
|
{
|
from_empty = true;
|
from_empty = true;
|
var = variable_from_dropped (dv, INSERT);
|
var = variable_from_dropped (dv, INSERT);
|
}
|
}
|
|
|
gcc_checking_assert (var);
|
gcc_checking_assert (var);
|
|
|
if (!dv_changed_p (dv))
|
if (!dv_changed_p (dv))
|
{
|
{
|
gcc_checking_assert (!NO_LOC_P (x));
|
gcc_checking_assert (!NO_LOC_P (x));
|
gcc_checking_assert (var->var_part[0].cur_loc);
|
gcc_checking_assert (var->var_part[0].cur_loc);
|
gcc_checking_assert (VAR_LOC_1PAUX (var));
|
gcc_checking_assert (VAR_LOC_1PAUX (var));
|
gcc_checking_assert (VAR_LOC_1PAUX (var)->depth);
|
gcc_checking_assert (VAR_LOC_1PAUX (var)->depth);
|
|
|
elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
|
elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
|
|
|
return var->var_part[0].cur_loc;
|
return var->var_part[0].cur_loc;
|
}
|
}
|
|
|
VALUE_RECURSED_INTO (x) = true;
|
VALUE_RECURSED_INTO (x) = true;
|
/* This is tentative, but it makes some tests simpler. */
|
/* This is tentative, but it makes some tests simpler. */
|
NO_LOC_P (x) = true;
|
NO_LOC_P (x) = true;
|
|
|
gcc_checking_assert (var->n_var_parts == 1 || from_empty);
|
gcc_checking_assert (var->n_var_parts == 1 || from_empty);
|
|
|
result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
|
result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
|
|
|
if (pending_recursion)
|
if (pending_recursion)
|
{
|
{
|
gcc_checking_assert (!result);
|
gcc_checking_assert (!result);
|
VEC_safe_push (rtx, stack, elcd->pending, x);
|
VEC_safe_push (rtx, stack, elcd->pending, x);
|
}
|
}
|
else
|
else
|
{
|
{
|
NO_LOC_P (x) = !result;
|
NO_LOC_P (x) = !result;
|
VALUE_RECURSED_INTO (x) = false;
|
VALUE_RECURSED_INTO (x) = false;
|
set_dv_changed (dv, false);
|
set_dv_changed (dv, false);
|
|
|
if (result)
|
if (result)
|
notify_dependents_of_resolved_value (var, elcd->vars);
|
notify_dependents_of_resolved_value (var, elcd->vars);
|
}
|
}
|
|
|
return result;
|
return result;
|
}
|
}
|
|
|
/* While expanding variables, we may encounter recursion cycles
|
/* While expanding variables, we may encounter recursion cycles
|
because of mutual (possibly indirect) dependencies between two
|
because of mutual (possibly indirect) dependencies between two
|
particular variables (or values), say A and B. If we're trying to
|
particular variables (or values), say A and B. If we're trying to
|
expand A when we get to B, which in turn attempts to expand A, if
|
expand A when we get to B, which in turn attempts to expand A, if
|
we can't find any other expansion for B, we'll add B to this
|
we can't find any other expansion for B, we'll add B to this
|
pending-recursion stack, and tentatively return NULL for its
|
pending-recursion stack, and tentatively return NULL for its
|
location. This tentative value will be used for any other
|
location. This tentative value will be used for any other
|
occurrences of B, unless A gets some other location, in which case
|
occurrences of B, unless A gets some other location, in which case
|
it will notify B that it is worth another try at computing a
|
it will notify B that it is worth another try at computing a
|
location for it, and it will use the location computed for A then.
|
location for it, and it will use the location computed for A then.
|
At the end of the expansion, the tentative NULL locations become
|
At the end of the expansion, the tentative NULL locations become
|
final for all members of PENDING that didn't get a notification.
|
final for all members of PENDING that didn't get a notification.
|
This function performs this finalization of NULL locations. */
|
This function performs this finalization of NULL locations. */
|
|
|
static void
|
static void
|
resolve_expansions_pending_recursion (VEC (rtx, stack) *pending)
|
resolve_expansions_pending_recursion (VEC (rtx, stack) *pending)
|
{
|
{
|
while (!VEC_empty (rtx, pending))
|
while (!VEC_empty (rtx, pending))
|
{
|
{
|
rtx x = VEC_pop (rtx, pending);
|
rtx x = VEC_pop (rtx, pending);
|
decl_or_value dv;
|
decl_or_value dv;
|
|
|
if (!VALUE_RECURSED_INTO (x))
|
if (!VALUE_RECURSED_INTO (x))
|
continue;
|
continue;
|
|
|
gcc_checking_assert (NO_LOC_P (x));
|
gcc_checking_assert (NO_LOC_P (x));
|
VALUE_RECURSED_INTO (x) = false;
|
VALUE_RECURSED_INTO (x) = false;
|
dv = dv_from_rtx (x);
|
dv = dv_from_rtx (x);
|
gcc_checking_assert (dv_changed_p (dv));
|
gcc_checking_assert (dv_changed_p (dv));
|
set_dv_changed (dv, false);
|
set_dv_changed (dv, false);
|
}
|
}
|
}
|
}
|
|
|
/* Initialize expand_loc_callback_data D with variable hash table V.
|
/* Initialize expand_loc_callback_data D with variable hash table V.
|
It must be a macro because of alloca (VEC stack). */
|
It must be a macro because of alloca (VEC stack). */
|
#define INIT_ELCD(d, v) \
|
#define INIT_ELCD(d, v) \
|
do \
|
do \
|
{ \
|
{ \
|
(d).vars = (v); \
|
(d).vars = (v); \
|
(d).expanding = VEC_alloc (rtx, stack, 4); \
|
(d).expanding = VEC_alloc (rtx, stack, 4); \
|
(d).pending = VEC_alloc (rtx, stack, 4); \
|
(d).pending = VEC_alloc (rtx, stack, 4); \
|
(d).depth = 0; \
|
(d).depth = 0; \
|
} \
|
} \
|
while (0)
|
while (0)
|
/* Finalize expand_loc_callback_data D, resolved to location L. */
|
/* Finalize expand_loc_callback_data D, resolved to location L. */
|
#define FINI_ELCD(d, l) \
|
#define FINI_ELCD(d, l) \
|
do \
|
do \
|
{ \
|
{ \
|
resolve_expansions_pending_recursion ((d).pending); \
|
resolve_expansions_pending_recursion ((d).pending); \
|
VEC_free (rtx, stack, (d).pending); \
|
VEC_free (rtx, stack, (d).pending); \
|
VEC_free (rtx, stack, (d).expanding); \
|
VEC_free (rtx, stack, (d).expanding); \
|
\
|
\
|
if ((l) && MEM_P (l)) \
|
if ((l) && MEM_P (l)) \
|
(l) = targetm.delegitimize_address (l); \
|
(l) = targetm.delegitimize_address (l); \
|
} \
|
} \
|
while (0)
|
while (0)
|
|
|
/* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
|
/* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
|
equivalences in VARS, updating their CUR_LOCs in the process. */
|
equivalences in VARS, updating their CUR_LOCs in the process. */
|
|
|
static rtx
|
static rtx
|
vt_expand_loc (rtx loc, htab_t vars)
|
vt_expand_loc (rtx loc, htab_t vars)
|
{
|
{
|
struct expand_loc_callback_data data;
|
struct expand_loc_callback_data data;
|
rtx result;
|
rtx result;
|
|
|
if (!MAY_HAVE_DEBUG_INSNS)
|
if (!MAY_HAVE_DEBUG_INSNS)
|
return loc;
|
return loc;
|
|
|
INIT_ELCD (data, vars);
|
INIT_ELCD (data, vars);
|
|
|
result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
|
result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
|
vt_expand_loc_callback, &data);
|
vt_expand_loc_callback, &data);
|
|
|
FINI_ELCD (data, result);
|
FINI_ELCD (data, result);
|
|
|
return result;
|
return result;
|
}
|
}
|
|
|
/* Expand the one-part VARiable to a location, using the equivalences
|
/* Expand the one-part VARiable to a location, using the equivalences
|
in VARS, updating their CUR_LOCs in the process. */
|
in VARS, updating their CUR_LOCs in the process. */
|
|
|
static rtx
|
static rtx
|
vt_expand_1pvar (variable var, htab_t vars)
|
vt_expand_1pvar (variable var, htab_t vars)
|
{
|
{
|
struct expand_loc_callback_data data;
|
struct expand_loc_callback_data data;
|
rtx loc;
|
rtx loc;
|
|
|
gcc_checking_assert (var->onepart && var->n_var_parts == 1);
|
gcc_checking_assert (var->onepart && var->n_var_parts == 1);
|
|
|
if (!dv_changed_p (var->dv))
|
if (!dv_changed_p (var->dv))
|
return var->var_part[0].cur_loc;
|
return var->var_part[0].cur_loc;
|
|
|
INIT_ELCD (data, vars);
|
INIT_ELCD (data, vars);
|
|
|
loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
|
loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
|
|
|
gcc_checking_assert (VEC_empty (rtx, data.expanding));
|
gcc_checking_assert (VEC_empty (rtx, data.expanding));
|
|
|
FINI_ELCD (data, loc);
|
FINI_ELCD (data, loc);
|
|
|
return loc;
|
return loc;
|
}
|
}
|
|
|
/* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
|
/* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
|
additional parameters: WHERE specifies whether the note shall be emitted
|
additional parameters: WHERE specifies whether the note shall be emitted
|
before or after instruction INSN. */
|
before or after instruction INSN. */
|
|
|
static int
|
static int
|
emit_note_insn_var_location (void **varp, void *data)
|
emit_note_insn_var_location (void **varp, void *data)
|
{
|
{
|
variable var = (variable) *varp;
|
variable var = (variable) *varp;
|
rtx insn = ((emit_note_data *)data)->insn;
|
rtx insn = ((emit_note_data *)data)->insn;
|
enum emit_note_where where = ((emit_note_data *)data)->where;
|
enum emit_note_where where = ((emit_note_data *)data)->where;
|
htab_t vars = ((emit_note_data *)data)->vars;
|
htab_t vars = ((emit_note_data *)data)->vars;
|
rtx note, note_vl;
|
rtx note, note_vl;
|
int i, j, n_var_parts;
|
int i, j, n_var_parts;
|
bool complete;
|
bool complete;
|
enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
|
enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
|
HOST_WIDE_INT last_limit;
|
HOST_WIDE_INT last_limit;
|
tree type_size_unit;
|
tree type_size_unit;
|
HOST_WIDE_INT offsets[MAX_VAR_PARTS];
|
HOST_WIDE_INT offsets[MAX_VAR_PARTS];
|
rtx loc[MAX_VAR_PARTS];
|
rtx loc[MAX_VAR_PARTS];
|
tree decl;
|
tree decl;
|
location_chain lc;
|
location_chain lc;
|
|
|
gcc_checking_assert (var->onepart == NOT_ONEPART
|
gcc_checking_assert (var->onepart == NOT_ONEPART
|
|| var->onepart == ONEPART_VDECL);
|
|| var->onepart == ONEPART_VDECL);
|
|
|
decl = dv_as_decl (var->dv);
|
decl = dv_as_decl (var->dv);
|
|
|
complete = true;
|
complete = true;
|
last_limit = 0;
|
last_limit = 0;
|
n_var_parts = 0;
|
n_var_parts = 0;
|
if (!var->onepart)
|
if (!var->onepart)
|
for (i = 0; i < var->n_var_parts; i++)
|
for (i = 0; i < var->n_var_parts; i++)
|
if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
|
if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
|
var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
|
var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
|
for (i = 0; i < var->n_var_parts; i++)
|
for (i = 0; i < var->n_var_parts; i++)
|
{
|
{
|
enum machine_mode mode, wider_mode;
|
enum machine_mode mode, wider_mode;
|
rtx loc2;
|
rtx loc2;
|
HOST_WIDE_INT offset;
|
HOST_WIDE_INT offset;
|
|
|
if (i == 0 && var->onepart)
|
if (i == 0 && var->onepart)
|
{
|
{
|
gcc_checking_assert (var->n_var_parts == 1);
|
gcc_checking_assert (var->n_var_parts == 1);
|
offset = 0;
|
offset = 0;
|
initialized = VAR_INIT_STATUS_INITIALIZED;
|
initialized = VAR_INIT_STATUS_INITIALIZED;
|
loc2 = vt_expand_1pvar (var, vars);
|
loc2 = vt_expand_1pvar (var, vars);
|
}
|
}
|
else
|
else
|
{
|
{
|
if (last_limit < VAR_PART_OFFSET (var, i))
|
if (last_limit < VAR_PART_OFFSET (var, i))
|
{
|
{
|
complete = false;
|
complete = false;
|
break;
|
break;
|
}
|
}
|
else if (last_limit > VAR_PART_OFFSET (var, i))
|
else if (last_limit > VAR_PART_OFFSET (var, i))
|
continue;
|
continue;
|
offset = VAR_PART_OFFSET (var, i);
|
offset = VAR_PART_OFFSET (var, i);
|
if (!var->var_part[i].cur_loc)
|
if (!var->var_part[i].cur_loc)
|
{
|
{
|
complete = false;
|
complete = false;
|
continue;
|
continue;
|
}
|
}
|
for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
|
for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
|
if (var->var_part[i].cur_loc == lc->loc)
|
if (var->var_part[i].cur_loc == lc->loc)
|
{
|
{
|
initialized = lc->init;
|
initialized = lc->init;
|
break;
|
break;
|
}
|
}
|
gcc_assert (lc);
|
gcc_assert (lc);
|
loc2 = var->var_part[i].cur_loc;
|
loc2 = var->var_part[i].cur_loc;
|
}
|
}
|
|
|
offsets[n_var_parts] = offset;
|
offsets[n_var_parts] = offset;
|
if (!loc2)
|
if (!loc2)
|
{
|
{
|
complete = false;
|
complete = false;
|
continue;
|
continue;
|
}
|
}
|
loc[n_var_parts] = loc2;
|
loc[n_var_parts] = loc2;
|
mode = GET_MODE (var->var_part[i].cur_loc);
|
mode = GET_MODE (var->var_part[i].cur_loc);
|
if (mode == VOIDmode && var->onepart)
|
if (mode == VOIDmode && var->onepart)
|
mode = DECL_MODE (decl);
|
mode = DECL_MODE (decl);
|
last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
|
last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
|
|
|
/* Attempt to merge adjacent registers or memory. */
|
/* Attempt to merge adjacent registers or memory. */
|
wider_mode = GET_MODE_WIDER_MODE (mode);
|
wider_mode = GET_MODE_WIDER_MODE (mode);
|
for (j = i + 1; j < var->n_var_parts; j++)
|
for (j = i + 1; j < var->n_var_parts; j++)
|
if (last_limit <= VAR_PART_OFFSET (var, j))
|
if (last_limit <= VAR_PART_OFFSET (var, j))
|
break;
|
break;
|
if (j < var->n_var_parts
|
if (j < var->n_var_parts
|
&& wider_mode != VOIDmode
|
&& wider_mode != VOIDmode
|
&& var->var_part[j].cur_loc
|
&& var->var_part[j].cur_loc
|
&& mode == GET_MODE (var->var_part[j].cur_loc)
|
&& mode == GET_MODE (var->var_part[j].cur_loc)
|
&& (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
|
&& (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
|
&& last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
|
&& last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
|
&& (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
|
&& (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
|
&& GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
|
&& GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
|
{
|
{
|
rtx new_loc = NULL;
|
rtx new_loc = NULL;
|
|
|
if (REG_P (loc[n_var_parts])
|
if (REG_P (loc[n_var_parts])
|
&& hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
|
&& hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
|
== hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
|
== hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
|
&& end_hard_regno (mode, REGNO (loc[n_var_parts]))
|
&& end_hard_regno (mode, REGNO (loc[n_var_parts]))
|
== REGNO (loc2))
|
== REGNO (loc2))
|
{
|
{
|
if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
|
if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
|
new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
|
new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
|
mode, 0);
|
mode, 0);
|
else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
|
else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
|
new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
|
new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
|
if (new_loc)
|
if (new_loc)
|
{
|
{
|
if (!REG_P (new_loc)
|
if (!REG_P (new_loc)
|
|| REGNO (new_loc) != REGNO (loc[n_var_parts]))
|
|| REGNO (new_loc) != REGNO (loc[n_var_parts]))
|
new_loc = NULL;
|
new_loc = NULL;
|
else
|
else
|
REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
|
REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
|
}
|
}
|
}
|
}
|
else if (MEM_P (loc[n_var_parts])
|
else if (MEM_P (loc[n_var_parts])
|
&& GET_CODE (XEXP (loc2, 0)) == PLUS
|
&& GET_CODE (XEXP (loc2, 0)) == PLUS
|
&& REG_P (XEXP (XEXP (loc2, 0), 0))
|
&& REG_P (XEXP (XEXP (loc2, 0), 0))
|
&& CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
|
&& CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
|
{
|
{
|
if ((REG_P (XEXP (loc[n_var_parts], 0))
|
if ((REG_P (XEXP (loc[n_var_parts], 0))
|
&& rtx_equal_p (XEXP (loc[n_var_parts], 0),
|
&& rtx_equal_p (XEXP (loc[n_var_parts], 0),
|
XEXP (XEXP (loc2, 0), 0))
|
XEXP (XEXP (loc2, 0), 0))
|
&& INTVAL (XEXP (XEXP (loc2, 0), 1))
|
&& INTVAL (XEXP (XEXP (loc2, 0), 1))
|
== GET_MODE_SIZE (mode))
|
== GET_MODE_SIZE (mode))
|
|| (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
|
|| (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
|
&& CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
|
&& CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
|
&& rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
|
&& rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
|
XEXP (XEXP (loc2, 0), 0))
|
XEXP (XEXP (loc2, 0), 0))
|
&& INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
|
&& INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
|
+ GET_MODE_SIZE (mode)
|
+ GET_MODE_SIZE (mode)
|
== INTVAL (XEXP (XEXP (loc2, 0), 1))))
|
== INTVAL (XEXP (XEXP (loc2, 0), 1))))
|
new_loc = adjust_address_nv (loc[n_var_parts],
|
new_loc = adjust_address_nv (loc[n_var_parts],
|
wider_mode, 0);
|
wider_mode, 0);
|
}
|
}
|
|
|
if (new_loc)
|
if (new_loc)
|
{
|
{
|
loc[n_var_parts] = new_loc;
|
loc[n_var_parts] = new_loc;
|
mode = wider_mode;
|
mode = wider_mode;
|
last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
|
last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
|
i = j;
|
i = j;
|
}
|
}
|
}
|
}
|
++n_var_parts;
|
++n_var_parts;
|
}
|
}
|
type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
|
type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
|
if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
|
if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
|
complete = false;
|
complete = false;
|
|
|
if (! flag_var_tracking_uninit)
|
if (! flag_var_tracking_uninit)
|
initialized = VAR_INIT_STATUS_INITIALIZED;
|
initialized = VAR_INIT_STATUS_INITIALIZED;
|
|
|
note_vl = NULL_RTX;
|
note_vl = NULL_RTX;
|
if (!complete)
|
if (!complete)
|
note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
|
note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
|
(int) initialized);
|
(int) initialized);
|
else if (n_var_parts == 1)
|
else if (n_var_parts == 1)
|
{
|
{
|
rtx expr_list;
|
rtx expr_list;
|
|
|
if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
|
if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
|
expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
|
expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
|
else
|
else
|
expr_list = loc[0];
|
expr_list = loc[0];
|
|
|
note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
|
note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
|
(int) initialized);
|
(int) initialized);
|
}
|
}
|
else if (n_var_parts)
|
else if (n_var_parts)
|
{
|
{
|
rtx parallel;
|
rtx parallel;
|
|
|
for (i = 0; i < n_var_parts; i++)
|
for (i = 0; i < n_var_parts; i++)
|
loc[i]
|
loc[i]
|
= gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
|
= gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
|
|
|
parallel = gen_rtx_PARALLEL (VOIDmode,
|
parallel = gen_rtx_PARALLEL (VOIDmode,
|
gen_rtvec_v (n_var_parts, loc));
|
gen_rtvec_v (n_var_parts, loc));
|
note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
|
note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
|
parallel, (int) initialized);
|
parallel, (int) initialized);
|
}
|
}
|
|
|
if (where != EMIT_NOTE_BEFORE_INSN)
|
if (where != EMIT_NOTE_BEFORE_INSN)
|
{
|
{
|
note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
|
note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
|
if (where == EMIT_NOTE_AFTER_CALL_INSN)
|
if (where == EMIT_NOTE_AFTER_CALL_INSN)
|
NOTE_DURING_CALL_P (note) = true;
|
NOTE_DURING_CALL_P (note) = true;
|
}
|
}
|
else
|
else
|
{
|
{
|
/* Make sure that the call related notes come first. */
|
/* Make sure that the call related notes come first. */
|
while (NEXT_INSN (insn)
|
while (NEXT_INSN (insn)
|
&& NOTE_P (insn)
|
&& NOTE_P (insn)
|
&& ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
|
&& ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
|
&& NOTE_DURING_CALL_P (insn))
|
&& NOTE_DURING_CALL_P (insn))
|
|| NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
|
|| NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
|
insn = NEXT_INSN (insn);
|
insn = NEXT_INSN (insn);
|
if (NOTE_P (insn)
|
if (NOTE_P (insn)
|
&& ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
|
&& ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
|
&& NOTE_DURING_CALL_P (insn))
|
&& NOTE_DURING_CALL_P (insn))
|
|| NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
|
|| NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
|
note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
|
note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
|
else
|
else
|
note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
|
note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
|
}
|
}
|
NOTE_VAR_LOCATION (note) = note_vl;
|
NOTE_VAR_LOCATION (note) = note_vl;
|
|
|
set_dv_changed (var->dv, false);
|
set_dv_changed (var->dv, false);
|
gcc_assert (var->in_changed_variables);
|
gcc_assert (var->in_changed_variables);
|
var->in_changed_variables = false;
|
var->in_changed_variables = false;
|
htab_clear_slot (changed_variables, varp);
|
htab_clear_slot (changed_variables, varp);
|
|
|
/* Continue traversing the hash table. */
|
/* Continue traversing the hash table. */
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* While traversing changed_variables, push onto DATA (a stack of RTX
|
/* While traversing changed_variables, push onto DATA (a stack of RTX
|
values) entries that aren't user variables. */
|
values) entries that aren't user variables. */
|
|
|
static int
|
static int
|
values_to_stack (void **slot, void *data)
|
values_to_stack (void **slot, void *data)
|
{
|
{
|
VEC (rtx, stack) **changed_values_stack = (VEC (rtx, stack) **)data;
|
VEC (rtx, stack) **changed_values_stack = (VEC (rtx, stack) **)data;
|
variable var = (variable) *slot;
|
variable var = (variable) *slot;
|
|
|
if (var->onepart == ONEPART_VALUE)
|
if (var->onepart == ONEPART_VALUE)
|
VEC_safe_push (rtx, stack, *changed_values_stack, dv_as_value (var->dv));
|
VEC_safe_push (rtx, stack, *changed_values_stack, dv_as_value (var->dv));
|
else if (var->onepart == ONEPART_DEXPR)
|
else if (var->onepart == ONEPART_DEXPR)
|
VEC_safe_push (rtx, stack, *changed_values_stack,
|
VEC_safe_push (rtx, stack, *changed_values_stack,
|
DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
|
DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
|
|
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Remove from changed_variables the entry whose DV corresponds to
|
/* Remove from changed_variables the entry whose DV corresponds to
|
value or debug_expr VAL. */
|
value or debug_expr VAL. */
|
static void
|
static void
|
remove_value_from_changed_variables (rtx val)
|
remove_value_from_changed_variables (rtx val)
|
{
|
{
|
decl_or_value dv = dv_from_rtx (val);
|
decl_or_value dv = dv_from_rtx (val);
|
void **slot;
|
void **slot;
|
variable var;
|
variable var;
|
|
|
slot = htab_find_slot_with_hash (changed_variables,
|
slot = htab_find_slot_with_hash (changed_variables,
|
dv, dv_htab_hash (dv), NO_INSERT);
|
dv, dv_htab_hash (dv), NO_INSERT);
|
var = (variable) *slot;
|
var = (variable) *slot;
|
var->in_changed_variables = false;
|
var->in_changed_variables = false;
|
htab_clear_slot (changed_variables, slot);
|
htab_clear_slot (changed_variables, slot);
|
}
|
}
|
|
|
/* If VAL (a value or debug_expr) has backlinks to variables actively
|
/* If VAL (a value or debug_expr) has backlinks to variables actively
|
dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
|
dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
|
changed, adding to CHANGED_VALUES_STACK any dependencies that may
|
changed, adding to CHANGED_VALUES_STACK any dependencies that may
|
have dependencies of their own to notify. */
|
have dependencies of their own to notify. */
|
|
|
static void
|
static void
|
notify_dependents_of_changed_value (rtx val, htab_t htab,
|
notify_dependents_of_changed_value (rtx val, htab_t htab,
|
VEC (rtx, stack) **changed_values_stack)
|
VEC (rtx, stack) **changed_values_stack)
|
{
|
{
|
void **slot;
|
void **slot;
|
variable var;
|
variable var;
|
loc_exp_dep *led;
|
loc_exp_dep *led;
|
decl_or_value dv = dv_from_rtx (val);
|
decl_or_value dv = dv_from_rtx (val);
|
|
|
slot = htab_find_slot_with_hash (changed_variables,
|
slot = htab_find_slot_with_hash (changed_variables,
|
dv, dv_htab_hash (dv), NO_INSERT);
|
dv, dv_htab_hash (dv), NO_INSERT);
|
if (!slot)
|
if (!slot)
|
slot = htab_find_slot_with_hash (htab,
|
slot = htab_find_slot_with_hash (htab,
|
dv, dv_htab_hash (dv), NO_INSERT);
|
dv, dv_htab_hash (dv), NO_INSERT);
|
if (!slot)
|
if (!slot)
|
slot = htab_find_slot_with_hash (dropped_values,
|
slot = htab_find_slot_with_hash (dropped_values,
|
dv, dv_htab_hash (dv), NO_INSERT);
|
dv, dv_htab_hash (dv), NO_INSERT);
|
var = (variable) *slot;
|
var = (variable) *slot;
|
|
|
while ((led = VAR_LOC_DEP_LST (var)))
|
while ((led = VAR_LOC_DEP_LST (var)))
|
{
|
{
|
decl_or_value ldv = led->dv;
|
decl_or_value ldv = led->dv;
|
void **islot;
|
void **islot;
|
variable ivar;
|
variable ivar;
|
|
|
/* Deactivate and remove the backlink, as it was “used up”. It
|
/* Deactivate and remove the backlink, as it was “used up”. It
|
makes no sense to attempt to notify the same entity again:
|
makes no sense to attempt to notify the same entity again:
|
either it will be recomputed and re-register an active
|
either it will be recomputed and re-register an active
|
dependency, or it will still have the changed mark. */
|
dependency, or it will still have the changed mark. */
|
if (led->next)
|
if (led->next)
|
led->next->pprev = led->pprev;
|
led->next->pprev = led->pprev;
|
if (led->pprev)
|
if (led->pprev)
|
*led->pprev = led->next;
|
*led->pprev = led->next;
|
led->next = NULL;
|
led->next = NULL;
|
led->pprev = NULL;
|
led->pprev = NULL;
|
|
|
if (dv_changed_p (ldv))
|
if (dv_changed_p (ldv))
|
continue;
|
continue;
|
|
|
switch (dv_onepart_p (ldv))
|
switch (dv_onepart_p (ldv))
|
{
|
{
|
case ONEPART_VALUE:
|
case ONEPART_VALUE:
|
case ONEPART_DEXPR:
|
case ONEPART_DEXPR:
|
set_dv_changed (ldv, true);
|
set_dv_changed (ldv, true);
|
VEC_safe_push (rtx, stack, *changed_values_stack, dv_as_rtx (ldv));
|
VEC_safe_push (rtx, stack, *changed_values_stack, dv_as_rtx (ldv));
|
break;
|
break;
|
|
|
default:
|
default:
|
islot = htab_find_slot_with_hash (htab, ldv, dv_htab_hash (ldv),
|
islot = htab_find_slot_with_hash (htab, ldv, dv_htab_hash (ldv),
|
NO_INSERT);
|
NO_INSERT);
|
ivar = (variable) *islot;
|
ivar = (variable) *islot;
|
gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
|
gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
|
variable_was_changed (ivar, NULL);
|
variable_was_changed (ivar, NULL);
|
break;
|
break;
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
/* Take out of changed_variables any entries that don't refer to use
|
/* Take out of changed_variables any entries that don't refer to use
|
variables. Back-propagate change notifications from values and
|
variables. Back-propagate change notifications from values and
|
debug_exprs to their active dependencies in HTAB or in
|
debug_exprs to their active dependencies in HTAB or in
|
CHANGED_VARIABLES. */
|
CHANGED_VARIABLES. */
|
|
|
static void
|
static void
|
process_changed_values (htab_t htab)
|
process_changed_values (htab_t htab)
|
{
|
{
|
int i, n;
|
int i, n;
|
rtx val;
|
rtx val;
|
VEC (rtx, stack) *changed_values_stack = VEC_alloc (rtx, stack, 20);
|
VEC (rtx, stack) *changed_values_stack = VEC_alloc (rtx, stack, 20);
|
|
|
/* Move values from changed_variables to changed_values_stack. */
|
/* Move values from changed_variables to changed_values_stack. */
|
htab_traverse (changed_variables, values_to_stack, &changed_values_stack);
|
htab_traverse (changed_variables, values_to_stack, &changed_values_stack);
|
|
|
/* Back-propagate change notifications in values while popping
|
/* Back-propagate change notifications in values while popping
|
them from the stack. */
|
them from the stack. */
|
for (n = i = VEC_length (rtx, changed_values_stack);
|
for (n = i = VEC_length (rtx, changed_values_stack);
|
i > 0; i = VEC_length (rtx, changed_values_stack))
|
i > 0; i = VEC_length (rtx, changed_values_stack))
|
{
|
{
|
val = VEC_pop (rtx, changed_values_stack);
|
val = VEC_pop (rtx, changed_values_stack);
|
notify_dependents_of_changed_value (val, htab, &changed_values_stack);
|
notify_dependents_of_changed_value (val, htab, &changed_values_stack);
|
|
|
/* This condition will hold when visiting each of the entries
|
/* This condition will hold when visiting each of the entries
|
originally in changed_variables. We can't remove them
|
originally in changed_variables. We can't remove them
|
earlier because this could drop the backlinks before we got a
|
earlier because this could drop the backlinks before we got a
|
chance to use them. */
|
chance to use them. */
|
if (i == n)
|
if (i == n)
|
{
|
{
|
remove_value_from_changed_variables (val);
|
remove_value_from_changed_variables (val);
|
n--;
|
n--;
|
}
|
}
|
}
|
}
|
|
|
VEC_free (rtx, stack, changed_values_stack);
|
VEC_free (rtx, stack, changed_values_stack);
|
}
|
}
|
|
|
/* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
|
/* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
|
CHANGED_VARIABLES and delete this chain. WHERE specifies whether
|
CHANGED_VARIABLES and delete this chain. WHERE specifies whether
|
the notes shall be emitted before of after instruction INSN. */
|
the notes shall be emitted before of after instruction INSN. */
|
|
|
static void
|
static void
|
emit_notes_for_changes (rtx insn, enum emit_note_where where,
|
emit_notes_for_changes (rtx insn, enum emit_note_where where,
|
shared_hash vars)
|
shared_hash vars)
|
{
|
{
|
emit_note_data data;
|
emit_note_data data;
|
htab_t htab = shared_hash_htab (vars);
|
htab_t htab = shared_hash_htab (vars);
|
|
|
if (!htab_elements (changed_variables))
|
if (!htab_elements (changed_variables))
|
return;
|
return;
|
|
|
if (MAY_HAVE_DEBUG_INSNS)
|
if (MAY_HAVE_DEBUG_INSNS)
|
process_changed_values (htab);
|
process_changed_values (htab);
|
|
|
data.insn = insn;
|
data.insn = insn;
|
data.where = where;
|
data.where = where;
|
data.vars = htab;
|
data.vars = htab;
|
|
|
htab_traverse (changed_variables, emit_note_insn_var_location, &data);
|
htab_traverse (changed_variables, emit_note_insn_var_location, &data);
|
}
|
}
|
|
|
/* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
|
/* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
|
same variable in hash table DATA or is not there at all. */
|
same variable in hash table DATA or is not there at all. */
|
|
|
static int
|
static int
|
emit_notes_for_differences_1 (void **slot, void *data)
|
emit_notes_for_differences_1 (void **slot, void *data)
|
{
|
{
|
htab_t new_vars = (htab_t) data;
|
htab_t new_vars = (htab_t) data;
|
variable old_var, new_var;
|
variable old_var, new_var;
|
|
|
old_var = (variable) *slot;
|
old_var = (variable) *slot;
|
new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
|
new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
|
dv_htab_hash (old_var->dv));
|
dv_htab_hash (old_var->dv));
|
|
|
if (!new_var)
|
if (!new_var)
|
{
|
{
|
/* Variable has disappeared. */
|
/* Variable has disappeared. */
|
variable empty_var = NULL;
|
variable empty_var = NULL;
|
|
|
if (old_var->onepart == ONEPART_VALUE
|
if (old_var->onepart == ONEPART_VALUE
|
|| old_var->onepart == ONEPART_DEXPR)
|
|| old_var->onepart == ONEPART_DEXPR)
|
{
|
{
|
empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
|
empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
|
if (empty_var)
|
if (empty_var)
|
{
|
{
|
gcc_checking_assert (!empty_var->in_changed_variables);
|
gcc_checking_assert (!empty_var->in_changed_variables);
|
if (!VAR_LOC_1PAUX (old_var))
|
if (!VAR_LOC_1PAUX (old_var))
|
{
|
{
|
VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
|
VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
|
VAR_LOC_1PAUX (empty_var) = NULL;
|
VAR_LOC_1PAUX (empty_var) = NULL;
|
}
|
}
|
else
|
else
|
gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
|
gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
|
}
|
}
|
}
|
}
|
|
|
if (!empty_var)
|
if (!empty_var)
|
{
|
{
|
empty_var = (variable) pool_alloc (onepart_pool (old_var->onepart));
|
empty_var = (variable) pool_alloc (onepart_pool (old_var->onepart));
|
empty_var->dv = old_var->dv;
|
empty_var->dv = old_var->dv;
|
empty_var->refcount = 0;
|
empty_var->refcount = 0;
|
empty_var->n_var_parts = 0;
|
empty_var->n_var_parts = 0;
|
empty_var->onepart = old_var->onepart;
|
empty_var->onepart = old_var->onepart;
|
empty_var->in_changed_variables = false;
|
empty_var->in_changed_variables = false;
|
}
|
}
|
|
|
if (empty_var->onepart)
|
if (empty_var->onepart)
|
{
|
{
|
/* Propagate the auxiliary data to (ultimately)
|
/* Propagate the auxiliary data to (ultimately)
|
changed_variables. */
|
changed_variables. */
|
empty_var->var_part[0].loc_chain = NULL;
|
empty_var->var_part[0].loc_chain = NULL;
|
empty_var->var_part[0].cur_loc = NULL;
|
empty_var->var_part[0].cur_loc = NULL;
|
VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
|
VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
|
VAR_LOC_1PAUX (old_var) = NULL;
|
VAR_LOC_1PAUX (old_var) = NULL;
|
}
|
}
|
variable_was_changed (empty_var, NULL);
|
variable_was_changed (empty_var, NULL);
|
/* Continue traversing the hash table. */
|
/* Continue traversing the hash table. */
|
return 1;
|
return 1;
|
}
|
}
|
/* Update cur_loc and one-part auxiliary data, before new_var goes
|
/* Update cur_loc and one-part auxiliary data, before new_var goes
|
through variable_was_changed. */
|
through variable_was_changed. */
|
if (old_var != new_var && new_var->onepart)
|
if (old_var != new_var && new_var->onepart)
|
{
|
{
|
gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
|
gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
|
VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
|
VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
|
VAR_LOC_1PAUX (old_var) = NULL;
|
VAR_LOC_1PAUX (old_var) = NULL;
|
new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
|
new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
|
}
|
}
|
if (variable_different_p (old_var, new_var))
|
if (variable_different_p (old_var, new_var))
|
variable_was_changed (new_var, NULL);
|
variable_was_changed (new_var, NULL);
|
|
|
/* Continue traversing the hash table. */
|
/* Continue traversing the hash table. */
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
|
/* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
|
table DATA. */
|
table DATA. */
|
|
|
static int
|
static int
|
emit_notes_for_differences_2 (void **slot, void *data)
|
emit_notes_for_differences_2 (void **slot, void *data)
|
{
|
{
|
htab_t old_vars = (htab_t) data;
|
htab_t old_vars = (htab_t) data;
|
variable old_var, new_var;
|
variable old_var, new_var;
|
|
|
new_var = (variable) *slot;
|
new_var = (variable) *slot;
|
old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
|
old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
|
dv_htab_hash (new_var->dv));
|
dv_htab_hash (new_var->dv));
|
if (!old_var)
|
if (!old_var)
|
{
|
{
|
int i;
|
int i;
|
for (i = 0; i < new_var->n_var_parts; i++)
|
for (i = 0; i < new_var->n_var_parts; i++)
|
new_var->var_part[i].cur_loc = NULL;
|
new_var->var_part[i].cur_loc = NULL;
|
variable_was_changed (new_var, NULL);
|
variable_was_changed (new_var, NULL);
|
}
|
}
|
|
|
/* Continue traversing the hash table. */
|
/* Continue traversing the hash table. */
|
return 1;
|
return 1;
|
}
|
}
|
|
|
/* Emit notes before INSN for differences between dataflow sets OLD_SET and
|
/* Emit notes before INSN for differences between dataflow sets OLD_SET and
|
NEW_SET. */
|
NEW_SET. */
|
|
|
static void
|
static void
|
emit_notes_for_differences (rtx insn, dataflow_set *old_set,
|
emit_notes_for_differences (rtx insn, dataflow_set *old_set,
|
dataflow_set *new_set)
|
dataflow_set *new_set)
|
{
|
{
|
htab_traverse (shared_hash_htab (old_set->vars),
|
htab_traverse (shared_hash_htab (old_set->vars),
|
emit_notes_for_differences_1,
|
emit_notes_for_differences_1,
|
shared_hash_htab (new_set->vars));
|
shared_hash_htab (new_set->vars));
|
htab_traverse (shared_hash_htab (new_set->vars),
|
htab_traverse (shared_hash_htab (new_set->vars),
|
emit_notes_for_differences_2,
|
emit_notes_for_differences_2,
|
shared_hash_htab (old_set->vars));
|
shared_hash_htab (old_set->vars));
|
emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
|
emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
|
}
|
}
|
|
|
/* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
|
/* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
|
|
|
static rtx
|
static rtx
|
next_non_note_insn_var_location (rtx insn)
|
next_non_note_insn_var_location (rtx insn)
|
{
|
{
|
while (insn)
|
while (insn)
|
{
|
{
|
insn = NEXT_INSN (insn);
|
insn = NEXT_INSN (insn);
|
if (insn == 0
|
if (insn == 0
|
|| !NOTE_P (insn)
|
|| !NOTE_P (insn)
|
|| NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
|
|| NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
|
break;
|
break;
|
}
|
}
|
|
|
return insn;
|
return insn;
|
}
|
}
|
|
|
/* Emit the notes for changes of location parts in the basic block BB. */
|
/* Emit the notes for changes of location parts in the basic block BB. */
|
|
|
static void
|
static void
|
emit_notes_in_bb (basic_block bb, dataflow_set *set)
|
emit_notes_in_bb (basic_block bb, dataflow_set *set)
|
{
|
{
|
unsigned int i;
|
unsigned int i;
|
micro_operation *mo;
|
micro_operation *mo;
|
|
|
dataflow_set_clear (set);
|
dataflow_set_clear (set);
|
dataflow_set_copy (set, &VTI (bb)->in);
|
dataflow_set_copy (set, &VTI (bb)->in);
|
|
|
FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
|
FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
|
{
|
{
|
rtx insn = mo->insn;
|
rtx insn = mo->insn;
|
rtx next_insn = next_non_note_insn_var_location (insn);
|
rtx next_insn = next_non_note_insn_var_location (insn);
|
|
|
switch (mo->type)
|
switch (mo->type)
|
{
|
{
|
case MO_CALL:
|
case MO_CALL:
|
dataflow_set_clear_at_call (set);
|
dataflow_set_clear_at_call (set);
|
emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
|
emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
|
{
|
{
|
rtx arguments = mo->u.loc, *p = &arguments, note;
|
rtx arguments = mo->u.loc, *p = &arguments, note;
|
while (*p)
|
while (*p)
|
{
|
{
|
XEXP (XEXP (*p, 0), 1)
|
XEXP (XEXP (*p, 0), 1)
|
= vt_expand_loc (XEXP (XEXP (*p, 0), 1),
|
= vt_expand_loc (XEXP (XEXP (*p, 0), 1),
|
shared_hash_htab (set->vars));
|
shared_hash_htab (set->vars));
|
/* If expansion is successful, keep it in the list. */
|
/* If expansion is successful, keep it in the list. */
|
if (XEXP (XEXP (*p, 0), 1))
|
if (XEXP (XEXP (*p, 0), 1))
|
p = &XEXP (*p, 1);
|
p = &XEXP (*p, 1);
|
/* Otherwise, if the following item is data_value for it,
|
/* Otherwise, if the following item is data_value for it,
|
drop it too too. */
|
drop it too too. */
|
else if (XEXP (*p, 1)
|
else if (XEXP (*p, 1)
|
&& REG_P (XEXP (XEXP (*p, 0), 0))
|
&& REG_P (XEXP (XEXP (*p, 0), 0))
|
&& MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
|
&& MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
|
&& REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
|
&& REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
|
0))
|
0))
|
&& REGNO (XEXP (XEXP (*p, 0), 0))
|
&& REGNO (XEXP (XEXP (*p, 0), 0))
|
== REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
|
== REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
|
0), 0)))
|
0), 0)))
|
*p = XEXP (XEXP (*p, 1), 1);
|
*p = XEXP (XEXP (*p, 1), 1);
|
/* Just drop this item. */
|
/* Just drop this item. */
|
else
|
else
|
*p = XEXP (*p, 1);
|
*p = XEXP (*p, 1);
|
}
|
}
|
note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
|
note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
|
NOTE_VAR_LOCATION (note) = arguments;
|
NOTE_VAR_LOCATION (note) = arguments;
|
}
|
}
|
break;
|
break;
|
|
|
case MO_USE:
|
case MO_USE:
|
{
|
{
|
rtx loc = mo->u.loc;
|
rtx loc = mo->u.loc;
|
|
|
if (REG_P (loc))
|
if (REG_P (loc))
|
var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
|
var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
|
else
|
else
|
var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
|
var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
|
|
|
emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
|
emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
|
}
|
}
|
break;
|
break;
|
|
|
case MO_VAL_LOC:
|
case MO_VAL_LOC:
|
{
|
{
|
rtx loc = mo->u.loc;
|
rtx loc = mo->u.loc;
|
rtx val, vloc;
|
rtx val, vloc;
|
tree var;
|
tree var;
|
|
|
if (GET_CODE (loc) == CONCAT)
|
if (GET_CODE (loc) == CONCAT)
|
{
|
{
|
val = XEXP (loc, 0);
|
val = XEXP (loc, 0);
|
vloc = XEXP (loc, 1);
|
vloc = XEXP (loc, 1);
|
}
|
}
|
else
|
else
|
{
|
{
|
val = NULL_RTX;
|
val = NULL_RTX;
|
vloc = loc;
|
vloc = loc;
|
}
|
}
|
|
|
var = PAT_VAR_LOCATION_DECL (vloc);
|
var = PAT_VAR_LOCATION_DECL (vloc);
|
|
|
clobber_variable_part (set, NULL_RTX,
|
clobber_variable_part (set, NULL_RTX,
|
dv_from_decl (var), 0, NULL_RTX);
|
dv_from_decl (var), 0, NULL_RTX);
|
if (val)
|
if (val)
|
{
|
{
|
if (VAL_NEEDS_RESOLUTION (loc))
|
if (VAL_NEEDS_RESOLUTION (loc))
|
val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
|
val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
|
set_variable_part (set, val, dv_from_decl (var), 0,
|
set_variable_part (set, val, dv_from_decl (var), 0,
|
VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
|
VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
|
INSERT);
|
INSERT);
|
}
|
}
|
else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
|
else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
|
set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
|
set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
|
dv_from_decl (var), 0,
|
dv_from_decl (var), 0,
|
VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
|
VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
|
INSERT);
|
INSERT);
|
|
|
emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
|
emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
|
}
|
}
|
break;
|
break;
|
|
|
case MO_VAL_USE:
|
case MO_VAL_USE:
|
{
|
{
|
rtx loc = mo->u.loc;
|
rtx loc = mo->u.loc;
|
rtx val, vloc, uloc;
|
rtx val, vloc, uloc;
|
|
|
vloc = uloc = XEXP (loc, 1);
|
vloc = uloc = XEXP (loc, 1);
|
val = XEXP (loc, 0);
|
val = XEXP (loc, 0);
|
|
|
if (GET_CODE (val) == CONCAT)
|
if (GET_CODE (val) == CONCAT)
|
{
|
{
|
uloc = XEXP (val, 1);
|
uloc = XEXP (val, 1);
|
val = XEXP (val, 0);
|
val = XEXP (val, 0);
|
}
|
}
|
|
|
if (VAL_NEEDS_RESOLUTION (loc))
|
if (VAL_NEEDS_RESOLUTION (loc))
|
val_resolve (set, val, vloc, insn);
|
val_resolve (set, val, vloc, insn);
|
else
|
else
|
val_store (set, val, uloc, insn, false);
|
val_store (set, val, uloc, insn, false);
|
|
|
if (VAL_HOLDS_TRACK_EXPR (loc))
|
if (VAL_HOLDS_TRACK_EXPR (loc))
|
{
|
{
|
if (GET_CODE (uloc) == REG)
|
if (GET_CODE (uloc) == REG)
|
var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
|
var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
|
NULL);
|
NULL);
|
else if (GET_CODE (uloc) == MEM)
|
else if (GET_CODE (uloc) == MEM)
|
var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
|
var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
|
NULL);
|
NULL);
|
}
|
}
|
|
|
emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
|
emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
|
}
|
}
|
break;
|
break;
|
|
|
case MO_VAL_SET:
|
case MO_VAL_SET:
|
{
|
{
|
rtx loc = mo->u.loc;
|
rtx loc = mo->u.loc;
|
rtx val, vloc, uloc;
|
rtx val, vloc, uloc;
|
|
|
vloc = loc;
|
vloc = loc;
|
uloc = XEXP (vloc, 1);
|
uloc = XEXP (vloc, 1);
|
val = XEXP (vloc, 0);
|
val = XEXP (vloc, 0);
|
vloc = uloc;
|
vloc = uloc;
|
|
|
if (GET_CODE (val) == CONCAT)
|
if (GET_CODE (val) == CONCAT)
|
{
|
{
|
vloc = XEXP (val, 1);
|
vloc = XEXP (val, 1);
|
val = XEXP (val, 0);
|
val = XEXP (val, 0);
|
}
|
}
|
|
|
if (GET_CODE (vloc) == SET)
|
if (GET_CODE (vloc) == SET)
|
{
|
{
|
rtx vsrc = SET_SRC (vloc);
|
rtx vsrc = SET_SRC (vloc);
|
|
|
gcc_assert (val != vsrc);
|
gcc_assert (val != vsrc);
|
gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
|
gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
|
|
|
vloc = SET_DEST (vloc);
|
vloc = SET_DEST (vloc);
|
|
|
if (VAL_NEEDS_RESOLUTION (loc))
|
if (VAL_NEEDS_RESOLUTION (loc))
|
val_resolve (set, val, vsrc, insn);
|
val_resolve (set, val, vsrc, insn);
|
}
|
}
|
else if (VAL_NEEDS_RESOLUTION (loc))
|
else if (VAL_NEEDS_RESOLUTION (loc))
|
{
|
{
|
gcc_assert (GET_CODE (uloc) == SET
|
gcc_assert (GET_CODE (uloc) == SET
|
&& GET_CODE (SET_SRC (uloc)) == REG);
|
&& GET_CODE (SET_SRC (uloc)) == REG);
|
val_resolve (set, val, SET_SRC (uloc), insn);
|
val_resolve (set, val, SET_SRC (uloc), insn);
|
}
|
}
|
|
|
if (VAL_HOLDS_TRACK_EXPR (loc))
|
if (VAL_HOLDS_TRACK_EXPR (loc))
|
{
|
{
|
if (VAL_EXPR_IS_CLOBBERED (loc))
|
if (VAL_EXPR_IS_CLOBBERED (loc))
|
{
|
{
|
if (REG_P (uloc))
|
if (REG_P (uloc))
|
var_reg_delete (set, uloc, true);
|
var_reg_delete (set, uloc, true);
|
else if (MEM_P (uloc))
|
else if (MEM_P (uloc))
|
var_mem_delete (set, uloc, true);
|
var_mem_delete (set, uloc, true);
|
}
|
}
|
else
|
else
|
{
|
{
|
bool copied_p = VAL_EXPR_IS_COPIED (loc);
|
bool copied_p = VAL_EXPR_IS_COPIED (loc);
|
rtx set_src = NULL;
|
rtx set_src = NULL;
|
enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
|
enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
|
|
|
if (GET_CODE (uloc) == SET)
|
if (GET_CODE (uloc) == SET)
|
{
|
{
|
set_src = SET_SRC (uloc);
|
set_src = SET_SRC (uloc);
|
uloc = SET_DEST (uloc);
|
uloc = SET_DEST (uloc);
|
}
|
}
|
|
|
if (copied_p)
|
if (copied_p)
|
{
|
{
|
status = find_src_status (set, set_src);
|
status = find_src_status (set, set_src);
|
|
|
set_src = find_src_set_src (set, set_src);
|
set_src = find_src_set_src (set, set_src);
|
}
|
}
|
|
|
if (REG_P (uloc))
|
if (REG_P (uloc))
|
var_reg_delete_and_set (set, uloc, !copied_p,
|
var_reg_delete_and_set (set, uloc, !copied_p,
|
status, set_src);
|
status, set_src);
|
else if (MEM_P (uloc))
|
else if (MEM_P (uloc))
|
var_mem_delete_and_set (set, uloc, !copied_p,
|
var_mem_delete_and_set (set, uloc, !copied_p,
|
status, set_src);
|
status, set_src);
|
}
|
}
|
}
|
}
|
else if (REG_P (uloc))
|
else if (REG_P (uloc))
|
var_regno_delete (set, REGNO (uloc));
|
var_regno_delete (set, REGNO (uloc));
|
|
|
val_store (set, val, vloc, insn, true);
|
val_store (set, val, vloc, insn, true);
|
|
|
emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
|
emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
|
set->vars);
|
set->vars);
|
}
|
}
|
break;
|
break;
|
|
|
case MO_SET:
|
case MO_SET:
|
{
|
{
|
rtx loc = mo->u.loc;
|
rtx loc = mo->u.loc;
|
rtx set_src = NULL;
|
rtx set_src = NULL;
|
|
|
if (GET_CODE (loc) == SET)
|
if (GET_CODE (loc) == SET)
|
{
|
{
|
set_src = SET_SRC (loc);
|
set_src = SET_SRC (loc);
|
loc = SET_DEST (loc);
|
loc = SET_DEST (loc);
|
}
|
}
|
|
|
if (REG_P (loc))
|
if (REG_P (loc))
|
var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
|
var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
|
set_src);
|
set_src);
|
else
|
else
|
var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
|
var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
|
set_src);
|
set_src);
|
|
|
emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
|
emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
|
set->vars);
|
set->vars);
|
}
|
}
|
break;
|
break;
|
|
|
case MO_COPY:
|
case MO_COPY:
|
{
|
{
|
rtx loc = mo->u.loc;
|
rtx loc = mo->u.loc;
|
enum var_init_status src_status;
|
enum var_init_status src_status;
|
rtx set_src = NULL;
|
rtx set_src = NULL;
|
|
|
if (GET_CODE (loc) == SET)
|
if (GET_CODE (loc) == SET)
|
{
|
{
|
set_src = SET_SRC (loc);
|
set_src = SET_SRC (loc);
|
loc = SET_DEST (loc);
|
loc = SET_DEST (loc);
|
}
|
}
|
|
|
src_status = find_src_status (set, set_src);
|
src_status = find_src_status (set, set_src);
|
set_src = find_src_set_src (set, set_src);
|
set_src = find_src_set_src (set, set_src);
|
|
|
if (REG_P (loc))
|
if (REG_P (loc))
|
var_reg_delete_and_set (set, loc, false, src_status, set_src);
|
var_reg_delete_and_set (set, loc, false, src_status, set_src);
|
else
|
else
|
var_mem_delete_and_set (set, loc, false, src_status, set_src);
|
var_mem_delete_and_set (set, loc, false, src_status, set_src);
|
|
|
emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
|
emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
|
set->vars);
|
set->vars);
|
}
|
}
|
break;
|
break;
|
|
|
case MO_USE_NO_VAR:
|
case MO_USE_NO_VAR:
|
{
|
{
|
rtx loc = mo->u.loc;
|
rtx loc = mo->u.loc;
|
|
|
if (REG_P (loc))
|
if (REG_P (loc))
|
var_reg_delete (set, loc, false);
|
var_reg_delete (set, loc, false);
|
else
|
else
|
var_mem_delete (set, loc, false);
|
var_mem_delete (set, loc, false);
|
|
|
emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
|
emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
|
}
|
}
|
break;
|
break;
|
|
|
case MO_CLOBBER:
|
case MO_CLOBBER:
|
{
|
{
|
rtx loc = mo->u.loc;
|
rtx loc = mo->u.loc;
|
|
|
if (REG_P (loc))
|
if (REG_P (loc))
|
var_reg_delete (set, loc, true);
|
var_reg_delete (set, loc, true);
|
else
|
else
|
var_mem_delete (set, loc, true);
|
var_mem_delete (set, loc, true);
|
|
|
emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
|
emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
|
set->vars);
|
set->vars);
|
}
|
}
|
break;
|
break;
|
|
|
case MO_ADJUST:
|
case MO_ADJUST:
|
set->stack_adjust += mo->u.adjust;
|
set->stack_adjust += mo->u.adjust;
|
break;
|
break;
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
/* Emit notes for the whole function. */
|
/* Emit notes for the whole function. */
|
|
|
static void
|
static void
|
vt_emit_notes (void)
|
vt_emit_notes (void)
|
{
|
{
|
basic_block bb;
|
basic_block bb;
|
dataflow_set cur;
|
dataflow_set cur;
|
|
|
gcc_assert (!htab_elements (changed_variables));
|
gcc_assert (!htab_elements (changed_variables));
|
|
|
/* Free memory occupied by the out hash tables, as they aren't used
|
/* Free memory occupied by the out hash tables, as they aren't used
|
anymore. */
|
anymore. */
|
FOR_EACH_BB (bb)
|
FOR_EACH_BB (bb)
|
dataflow_set_clear (&VTI (bb)->out);
|
dataflow_set_clear (&VTI (bb)->out);
|
|
|
/* Enable emitting notes by functions (mainly by set_variable_part and
|
/* Enable emitting notes by functions (mainly by set_variable_part and
|
delete_variable_part). */
|
delete_variable_part). */
|
emit_notes = true;
|
emit_notes = true;
|
|
|
if (MAY_HAVE_DEBUG_INSNS)
|
if (MAY_HAVE_DEBUG_INSNS)
|
dropped_values = htab_create (cselib_get_next_uid () * 2,
|
dropped_values = htab_create (cselib_get_next_uid () * 2,
|
variable_htab_hash, variable_htab_eq,
|
variable_htab_hash, variable_htab_eq,
|
variable_htab_free);
|
variable_htab_free);
|
|
|
dataflow_set_init (&cur);
|
dataflow_set_init (&cur);
|
|
|
FOR_EACH_BB (bb)
|
FOR_EACH_BB (bb)
|
{
|
{
|
/* Emit the notes for changes of variable locations between two
|
/* Emit the notes for changes of variable locations between two
|
subsequent basic blocks. */
|
subsequent basic blocks. */
|
emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
|
emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
|
|
|
/* Emit the notes for the changes in the basic block itself. */
|
/* Emit the notes for the changes in the basic block itself. */
|
emit_notes_in_bb (bb, &cur);
|
emit_notes_in_bb (bb, &cur);
|
|
|
/* Free memory occupied by the in hash table, we won't need it
|
/* Free memory occupied by the in hash table, we won't need it
|
again. */
|
again. */
|
dataflow_set_clear (&VTI (bb)->in);
|
dataflow_set_clear (&VTI (bb)->in);
|
}
|
}
|
#ifdef ENABLE_CHECKING
|
#ifdef ENABLE_CHECKING
|
htab_traverse (shared_hash_htab (cur.vars),
|
htab_traverse (shared_hash_htab (cur.vars),
|
emit_notes_for_differences_1,
|
emit_notes_for_differences_1,
|
shared_hash_htab (empty_shared_hash));
|
shared_hash_htab (empty_shared_hash));
|
#endif
|
#endif
|
dataflow_set_destroy (&cur);
|
dataflow_set_destroy (&cur);
|
|
|
if (MAY_HAVE_DEBUG_INSNS)
|
if (MAY_HAVE_DEBUG_INSNS)
|
htab_delete (dropped_values);
|
htab_delete (dropped_values);
|
|
|
emit_notes = false;
|
emit_notes = false;
|
}
|
}
|
|
|
/* If there is a declaration and offset associated with register/memory RTL
|
/* If there is a declaration and offset associated with register/memory RTL
|
assign declaration to *DECLP and offset to *OFFSETP, and return true. */
|
assign declaration to *DECLP and offset to *OFFSETP, and return true. */
|
|
|
static bool
|
static bool
|
vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
|
vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
|
{
|
{
|
if (REG_P (rtl))
|
if (REG_P (rtl))
|
{
|
{
|
if (REG_ATTRS (rtl))
|
if (REG_ATTRS (rtl))
|
{
|
{
|
*declp = REG_EXPR (rtl);
|
*declp = REG_EXPR (rtl);
|
*offsetp = REG_OFFSET (rtl);
|
*offsetp = REG_OFFSET (rtl);
|
return true;
|
return true;
|
}
|
}
|
}
|
}
|
else if (MEM_P (rtl))
|
else if (MEM_P (rtl))
|
{
|
{
|
if (MEM_ATTRS (rtl))
|
if (MEM_ATTRS (rtl))
|
{
|
{
|
*declp = MEM_EXPR (rtl);
|
*declp = MEM_EXPR (rtl);
|
*offsetp = INT_MEM_OFFSET (rtl);
|
*offsetp = INT_MEM_OFFSET (rtl);
|
return true;
|
return true;
|
}
|
}
|
}
|
}
|
return false;
|
return false;
|
}
|
}
|
|
|
/* Record the value for the ENTRY_VALUE of RTL as a global equivalence
|
/* Record the value for the ENTRY_VALUE of RTL as a global equivalence
|
of VAL. */
|
of VAL. */
|
|
|
static void
|
static void
|
record_entry_value (cselib_val *val, rtx rtl)
|
record_entry_value (cselib_val *val, rtx rtl)
|
{
|
{
|
rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
|
rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
|
|
|
ENTRY_VALUE_EXP (ev) = rtl;
|
ENTRY_VALUE_EXP (ev) = rtl;
|
|
|
cselib_add_permanent_equiv (val, ev, get_insns ());
|
cselib_add_permanent_equiv (val, ev, get_insns ());
|
}
|
}
|
|
|
/* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
|
/* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
|
|
|
static void
|
static void
|
vt_add_function_parameter (tree parm)
|
vt_add_function_parameter (tree parm)
|
{
|
{
|
rtx decl_rtl = DECL_RTL_IF_SET (parm);
|
rtx decl_rtl = DECL_RTL_IF_SET (parm);
|
rtx incoming = DECL_INCOMING_RTL (parm);
|
rtx incoming = DECL_INCOMING_RTL (parm);
|
tree decl;
|
tree decl;
|
enum machine_mode mode;
|
enum machine_mode mode;
|
HOST_WIDE_INT offset;
|
HOST_WIDE_INT offset;
|
dataflow_set *out;
|
dataflow_set *out;
|
decl_or_value dv;
|
decl_or_value dv;
|
|
|
if (TREE_CODE (parm) != PARM_DECL)
|
if (TREE_CODE (parm) != PARM_DECL)
|
return;
|
return;
|
|
|
if (!decl_rtl || !incoming)
|
if (!decl_rtl || !incoming)
|
return;
|
return;
|
|
|
if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
|
if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
|
return;
|
return;
|
|
|
/* If there is a DRAP register, rewrite the incoming location of parameters
|
/* If there is a DRAP register, rewrite the incoming location of parameters
|
passed on the stack into MEMs based on the argument pointer, as the DRAP
|
passed on the stack into MEMs based on the argument pointer, as the DRAP
|
register can be reused for other purposes and we do not track locations
|
register can be reused for other purposes and we do not track locations
|
based on generic registers. But the prerequisite is that this argument
|
based on generic registers. But the prerequisite is that this argument
|
pointer be also the virtual CFA pointer, see vt_initialize. */
|
pointer be also the virtual CFA pointer, see vt_initialize. */
|
if (MEM_P (incoming)
|
if (MEM_P (incoming)
|
&& stack_realign_drap
|
&& stack_realign_drap
|
&& arg_pointer_rtx == cfa_base_rtx
|
&& arg_pointer_rtx == cfa_base_rtx
|
&& (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
|
&& (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
|
|| (GET_CODE (XEXP (incoming, 0)) == PLUS
|
|| (GET_CODE (XEXP (incoming, 0)) == PLUS
|
&& XEXP (XEXP (incoming, 0), 0)
|
&& XEXP (XEXP (incoming, 0), 0)
|
== crtl->args.internal_arg_pointer
|
== crtl->args.internal_arg_pointer
|
&& CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
|
&& CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
|
{
|
{
|
HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
|
HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
|
if (GET_CODE (XEXP (incoming, 0)) == PLUS)
|
if (GET_CODE (XEXP (incoming, 0)) == PLUS)
|
off += INTVAL (XEXP (XEXP (incoming, 0), 1));
|
off += INTVAL (XEXP (XEXP (incoming, 0), 1));
|
incoming
|
incoming
|
= replace_equiv_address_nv (incoming,
|
= replace_equiv_address_nv (incoming,
|
plus_constant (arg_pointer_rtx, off));
|
plus_constant (arg_pointer_rtx, off));
|
}
|
}
|
|
|
#ifdef HAVE_window_save
|
#ifdef HAVE_window_save
|
/* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
|
/* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
|
If the target machine has an explicit window save instruction, the
|
If the target machine has an explicit window save instruction, the
|
actual entry value is the corresponding OUTGOING_REGNO instead. */
|
actual entry value is the corresponding OUTGOING_REGNO instead. */
|
if (REG_P (incoming)
|
if (REG_P (incoming)
|
&& HARD_REGISTER_P (incoming)
|
&& HARD_REGISTER_P (incoming)
|
&& OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
|
&& OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
|
{
|
{
|
parm_reg_t *p
|
parm_reg_t *p
|
= VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
|
= VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
|
p->incoming = incoming;
|
p->incoming = incoming;
|
incoming
|
incoming
|
= gen_rtx_REG_offset (incoming, GET_MODE (incoming),
|
= gen_rtx_REG_offset (incoming, GET_MODE (incoming),
|
OUTGOING_REGNO (REGNO (incoming)), 0);
|
OUTGOING_REGNO (REGNO (incoming)), 0);
|
p->outgoing = incoming;
|
p->outgoing = incoming;
|
}
|
}
|
else if (MEM_P (incoming)
|
else if (MEM_P (incoming)
|
&& REG_P (XEXP (incoming, 0))
|
&& REG_P (XEXP (incoming, 0))
|
&& HARD_REGISTER_P (XEXP (incoming, 0)))
|
&& HARD_REGISTER_P (XEXP (incoming, 0)))
|
{
|
{
|
rtx reg = XEXP (incoming, 0);
|
rtx reg = XEXP (incoming, 0);
|
if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
|
if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
|
{
|
{
|
parm_reg_t *p
|
parm_reg_t *p
|
= VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
|
= VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, NULL);
|
p->incoming = reg;
|
p->incoming = reg;
|
reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
|
reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
|
p->outgoing = reg;
|
p->outgoing = reg;
|
incoming = replace_equiv_address_nv (incoming, reg);
|
incoming = replace_equiv_address_nv (incoming, reg);
|
}
|
}
|
}
|
}
|
#endif
|
#endif
|
|
|
if (!vt_get_decl_and_offset (incoming, &decl, &offset))
|
if (!vt_get_decl_and_offset (incoming, &decl, &offset))
|
{
|
{
|
if (REG_P (incoming) || MEM_P (incoming))
|
if (REG_P (incoming) || MEM_P (incoming))
|
{
|
{
|
/* This means argument is passed by invisible reference. */
|
/* This means argument is passed by invisible reference. */
|
offset = 0;
|
offset = 0;
|
decl = parm;
|
decl = parm;
|
incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
|
incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
|
}
|
}
|
else
|
else
|
{
|
{
|
if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
|
if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
|
return;
|
return;
|
offset += byte_lowpart_offset (GET_MODE (incoming),
|
offset += byte_lowpart_offset (GET_MODE (incoming),
|
GET_MODE (decl_rtl));
|
GET_MODE (decl_rtl));
|
}
|
}
|
}
|
}
|
|
|
if (!decl)
|
if (!decl)
|
return;
|
return;
|
|
|
if (parm != decl)
|
if (parm != decl)
|
{
|
{
|
/* Assume that DECL_RTL was a pseudo that got spilled to
|
/* Assume that DECL_RTL was a pseudo that got spilled to
|
memory. The spill slot sharing code will force the
|
memory. The spill slot sharing code will force the
|
memory to reference spill_slot_decl (%sfp), so we don't
|
memory to reference spill_slot_decl (%sfp), so we don't
|
match above. That's ok, the pseudo must have referenced
|
match above. That's ok, the pseudo must have referenced
|
the entire parameter, so just reset OFFSET. */
|
the entire parameter, so just reset OFFSET. */
|
gcc_assert (decl == get_spill_slot_decl (false));
|
gcc_assert (decl == get_spill_slot_decl (false));
|
offset = 0;
|
offset = 0;
|
}
|
}
|
|
|
if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
|
if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
|
return;
|
return;
|
|
|
out = &VTI (ENTRY_BLOCK_PTR)->out;
|
out = &VTI (ENTRY_BLOCK_PTR)->out;
|
|
|
dv = dv_from_decl (parm);
|
dv = dv_from_decl (parm);
|
|
|
if (target_for_debug_bind (parm)
|
if (target_for_debug_bind (parm)
|
/* We can't deal with these right now, because this kind of
|
/* We can't deal with these right now, because this kind of
|
variable is single-part. ??? We could handle parallels
|
variable is single-part. ??? We could handle parallels
|
that describe multiple locations for the same single
|
that describe multiple locations for the same single
|
value, but ATM we don't. */
|
value, but ATM we don't. */
|
&& GET_CODE (incoming) != PARALLEL)
|
&& GET_CODE (incoming) != PARALLEL)
|
{
|
{
|
cselib_val *val;
|
cselib_val *val;
|
|
|
/* ??? We shouldn't ever hit this, but it may happen because
|
/* ??? We shouldn't ever hit this, but it may happen because
|
arguments passed by invisible reference aren't dealt with
|
arguments passed by invisible reference aren't dealt with
|
above: incoming-rtl will have Pmode rather than the
|
above: incoming-rtl will have Pmode rather than the
|
expected mode for the type. */
|
expected mode for the type. */
|
if (offset)
|
if (offset)
|
return;
|
return;
|
|
|
val = cselib_lookup_from_insn (var_lowpart (mode, incoming), mode, true,
|
val = cselib_lookup_from_insn (var_lowpart (mode, incoming), mode, true,
|
VOIDmode, get_insns ());
|
VOIDmode, get_insns ());
|
|
|
/* ??? Float-typed values in memory are not handled by
|
/* ??? Float-typed values in memory are not handled by
|
cselib. */
|
cselib. */
|
if (val)
|
if (val)
|
{
|
{
|
preserve_value (val);
|
preserve_value (val);
|
set_variable_part (out, val->val_rtx, dv, offset,
|
set_variable_part (out, val->val_rtx, dv, offset,
|
VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
|
VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
|
dv = dv_from_value (val->val_rtx);
|
dv = dv_from_value (val->val_rtx);
|
}
|
}
|
}
|
}
|
|
|
if (REG_P (incoming))
|
if (REG_P (incoming))
|
{
|
{
|
incoming = var_lowpart (mode, incoming);
|
incoming = var_lowpart (mode, incoming);
|
gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
|
gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
|
attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
|
attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
|
incoming);
|
incoming);
|
set_variable_part (out, incoming, dv, offset,
|
set_variable_part (out, incoming, dv, offset,
|
VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
|
VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
|
if (dv_is_value_p (dv))
|
if (dv_is_value_p (dv))
|
{
|
{
|
record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
|
record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
|
if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
|
if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
|
&& INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
|
&& INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
|
{
|
{
|
enum machine_mode indmode
|
enum machine_mode indmode
|
= TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
|
= TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
|
rtx mem = gen_rtx_MEM (indmode, incoming);
|
rtx mem = gen_rtx_MEM (indmode, incoming);
|
cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
|
cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
|
VOIDmode,
|
VOIDmode,
|
get_insns ());
|
get_insns ());
|
if (val)
|
if (val)
|
{
|
{
|
preserve_value (val);
|
preserve_value (val);
|
record_entry_value (val, mem);
|
record_entry_value (val, mem);
|
set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
|
set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
|
VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
|
VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
}
|
else if (MEM_P (incoming))
|
else if (MEM_P (incoming))
|
{
|
{
|
incoming = var_lowpart (mode, incoming);
|
incoming = var_lowpart (mode, incoming);
|
set_variable_part (out, incoming, dv, offset,
|
set_variable_part (out, incoming, dv, offset,
|
VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
|
VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
|
}
|
}
|
}
|
}
|
|
|
/* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
|
/* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
|
|
|
static void
|
static void
|
vt_add_function_parameters (void)
|
vt_add_function_parameters (void)
|
{
|
{
|
tree parm;
|
tree parm;
|
|
|
for (parm = DECL_ARGUMENTS (current_function_decl);
|
for (parm = DECL_ARGUMENTS (current_function_decl);
|
parm; parm = DECL_CHAIN (parm))
|
parm; parm = DECL_CHAIN (parm))
|
vt_add_function_parameter (parm);
|
vt_add_function_parameter (parm);
|
|
|
if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
|
if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
|
{
|
{
|
tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
|
tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
|
|
|
if (TREE_CODE (vexpr) == INDIRECT_REF)
|
if (TREE_CODE (vexpr) == INDIRECT_REF)
|
vexpr = TREE_OPERAND (vexpr, 0);
|
vexpr = TREE_OPERAND (vexpr, 0);
|
|
|
if (TREE_CODE (vexpr) == PARM_DECL
|
if (TREE_CODE (vexpr) == PARM_DECL
|
&& DECL_ARTIFICIAL (vexpr)
|
&& DECL_ARTIFICIAL (vexpr)
|
&& !DECL_IGNORED_P (vexpr)
|
&& !DECL_IGNORED_P (vexpr)
|
&& DECL_NAMELESS (vexpr))
|
&& DECL_NAMELESS (vexpr))
|
vt_add_function_parameter (vexpr);
|
vt_add_function_parameter (vexpr);
|
}
|
}
|
}
|
}
|
|
|
/* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
|
/* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
|
|
|
static bool
|
static bool
|
fp_setter (rtx insn)
|
fp_setter (rtx insn)
|
{
|
{
|
rtx pat = PATTERN (insn);
|
rtx pat = PATTERN (insn);
|
if (RTX_FRAME_RELATED_P (insn))
|
if (RTX_FRAME_RELATED_P (insn))
|
{
|
{
|
rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
|
rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
|
if (expr)
|
if (expr)
|
pat = XEXP (expr, 0);
|
pat = XEXP (expr, 0);
|
}
|
}
|
if (GET_CODE (pat) == SET)
|
if (GET_CODE (pat) == SET)
|
return SET_DEST (pat) == hard_frame_pointer_rtx;
|
return SET_DEST (pat) == hard_frame_pointer_rtx;
|
else if (GET_CODE (pat) == PARALLEL)
|
else if (GET_CODE (pat) == PARALLEL)
|
{
|
{
|
int i;
|
int i;
|
for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
|
for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
|
if (GET_CODE (XVECEXP (pat, 0, i)) == SET
|
if (GET_CODE (XVECEXP (pat, 0, i)) == SET
|
&& SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
|
&& SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
|
return true;
|
return true;
|
}
|
}
|
return false;
|
return false;
|
}
|
}
|
|
|
/* Initialize cfa_base_rtx, create a preserved VALUE for it and
|
/* Initialize cfa_base_rtx, create a preserved VALUE for it and
|
ensure it isn't flushed during cselib_reset_table.
|
ensure it isn't flushed during cselib_reset_table.
|
Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
|
Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
|
has been eliminated. */
|
has been eliminated. */
|
|
|
static void
|
static void
|
vt_init_cfa_base (void)
|
vt_init_cfa_base (void)
|
{
|
{
|
cselib_val *val;
|
cselib_val *val;
|
|
|
#ifdef FRAME_POINTER_CFA_OFFSET
|
#ifdef FRAME_POINTER_CFA_OFFSET
|
cfa_base_rtx = frame_pointer_rtx;
|
cfa_base_rtx = frame_pointer_rtx;
|
cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
|
cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
|
#else
|
#else
|
cfa_base_rtx = arg_pointer_rtx;
|
cfa_base_rtx = arg_pointer_rtx;
|
cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
|
cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
|
#endif
|
#endif
|
if (cfa_base_rtx == hard_frame_pointer_rtx
|
if (cfa_base_rtx == hard_frame_pointer_rtx
|
|| !fixed_regs[REGNO (cfa_base_rtx)])
|
|| !fixed_regs[REGNO (cfa_base_rtx)])
|
{
|
{
|
cfa_base_rtx = NULL_RTX;
|
cfa_base_rtx = NULL_RTX;
|
return;
|
return;
|
}
|
}
|
if (!MAY_HAVE_DEBUG_INSNS)
|
if (!MAY_HAVE_DEBUG_INSNS)
|
return;
|
return;
|
|
|
/* Tell alias analysis that cfa_base_rtx should share
|
/* Tell alias analysis that cfa_base_rtx should share
|
find_base_term value with stack pointer or hard frame pointer. */
|
find_base_term value with stack pointer or hard frame pointer. */
|
if (!frame_pointer_needed)
|
if (!frame_pointer_needed)
|
vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
|
vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
|
else if (!crtl->stack_realign_tried)
|
else if (!crtl->stack_realign_tried)
|
vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
|
vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
|
|
|
val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
|
val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
|
VOIDmode, get_insns ());
|
VOIDmode, get_insns ());
|
preserve_value (val);
|
preserve_value (val);
|
cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
|
cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
|
var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx,
|
var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx,
|
VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx),
|
VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx),
|
0, NULL_RTX, INSERT);
|
0, NULL_RTX, INSERT);
|
}
|
}
|
|
|
/* Allocate and initialize the data structures for variable tracking
|
/* Allocate and initialize the data structures for variable tracking
|
and parse the RTL to get the micro operations. */
|
and parse the RTL to get the micro operations. */
|
|
|
static bool
|
static bool
|
vt_initialize (void)
|
vt_initialize (void)
|
{
|
{
|
basic_block bb, prologue_bb = single_succ (ENTRY_BLOCK_PTR);
|
basic_block bb, prologue_bb = single_succ (ENTRY_BLOCK_PTR);
|
HOST_WIDE_INT fp_cfa_offset = -1;
|
HOST_WIDE_INT fp_cfa_offset = -1;
|
|
|
alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
|
alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
|
|
|
attrs_pool = create_alloc_pool ("attrs_def pool",
|
attrs_pool = create_alloc_pool ("attrs_def pool",
|
sizeof (struct attrs_def), 1024);
|
sizeof (struct attrs_def), 1024);
|
var_pool = create_alloc_pool ("variable_def pool",
|
var_pool = create_alloc_pool ("variable_def pool",
|
sizeof (struct variable_def)
|
sizeof (struct variable_def)
|
+ (MAX_VAR_PARTS - 1)
|
+ (MAX_VAR_PARTS - 1)
|
* sizeof (((variable)NULL)->var_part[0]), 64);
|
* sizeof (((variable)NULL)->var_part[0]), 64);
|
loc_chain_pool = create_alloc_pool ("location_chain_def pool",
|
loc_chain_pool = create_alloc_pool ("location_chain_def pool",
|
sizeof (struct location_chain_def),
|
sizeof (struct location_chain_def),
|
1024);
|
1024);
|
shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
|
shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
|
sizeof (struct shared_hash_def), 256);
|
sizeof (struct shared_hash_def), 256);
|
empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
|
empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
|
empty_shared_hash->refcount = 1;
|
empty_shared_hash->refcount = 1;
|
empty_shared_hash->htab
|
empty_shared_hash->htab
|
= htab_create (1, variable_htab_hash, variable_htab_eq,
|
= htab_create (1, variable_htab_hash, variable_htab_eq,
|
variable_htab_free);
|
variable_htab_free);
|
changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
|
changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
|
variable_htab_free);
|
variable_htab_free);
|
|
|
/* Init the IN and OUT sets. */
|
/* Init the IN and OUT sets. */
|
FOR_ALL_BB (bb)
|
FOR_ALL_BB (bb)
|
{
|
{
|
VTI (bb)->visited = false;
|
VTI (bb)->visited = false;
|
VTI (bb)->flooded = false;
|
VTI (bb)->flooded = false;
|
dataflow_set_init (&VTI (bb)->in);
|
dataflow_set_init (&VTI (bb)->in);
|
dataflow_set_init (&VTI (bb)->out);
|
dataflow_set_init (&VTI (bb)->out);
|
VTI (bb)->permp = NULL;
|
VTI (bb)->permp = NULL;
|
}
|
}
|
|
|
if (MAY_HAVE_DEBUG_INSNS)
|
if (MAY_HAVE_DEBUG_INSNS)
|
{
|
{
|
cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
|
cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
|
scratch_regs = BITMAP_ALLOC (NULL);
|
scratch_regs = BITMAP_ALLOC (NULL);
|
valvar_pool = create_alloc_pool ("small variable_def pool",
|
valvar_pool = create_alloc_pool ("small variable_def pool",
|
sizeof (struct variable_def), 256);
|
sizeof (struct variable_def), 256);
|
preserved_values = VEC_alloc (rtx, heap, 256);
|
preserved_values = VEC_alloc (rtx, heap, 256);
|
}
|
}
|
else
|
else
|
{
|
{
|
scratch_regs = NULL;
|
scratch_regs = NULL;
|
valvar_pool = NULL;
|
valvar_pool = NULL;
|
}
|
}
|
|
|
/* In order to factor out the adjustments made to the stack pointer or to
|
/* In order to factor out the adjustments made to the stack pointer or to
|
the hard frame pointer and thus be able to use DW_OP_fbreg operations
|
the hard frame pointer and thus be able to use DW_OP_fbreg operations
|
instead of individual location lists, we're going to rewrite MEMs based
|
instead of individual location lists, we're going to rewrite MEMs based
|
on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
|
on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
|
or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
|
or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
|
resp. arg_pointer_rtx. We can do this either when there is no frame
|
resp. arg_pointer_rtx. We can do this either when there is no frame
|
pointer in the function and stack adjustments are consistent for all
|
pointer in the function and stack adjustments are consistent for all
|
basic blocks or when there is a frame pointer and no stack realignment.
|
basic blocks or when there is a frame pointer and no stack realignment.
|
But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
|
But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
|
has been eliminated. */
|
has been eliminated. */
|
if (!frame_pointer_needed)
|
if (!frame_pointer_needed)
|
{
|
{
|
rtx reg, elim;
|
rtx reg, elim;
|
|
|
if (!vt_stack_adjustments ())
|
if (!vt_stack_adjustments ())
|
return false;
|
return false;
|
|
|
#ifdef FRAME_POINTER_CFA_OFFSET
|
#ifdef FRAME_POINTER_CFA_OFFSET
|
reg = frame_pointer_rtx;
|
reg = frame_pointer_rtx;
|
#else
|
#else
|
reg = arg_pointer_rtx;
|
reg = arg_pointer_rtx;
|
#endif
|
#endif
|
elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
|
elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
|
if (elim != reg)
|
if (elim != reg)
|
{
|
{
|
if (GET_CODE (elim) == PLUS)
|
if (GET_CODE (elim) == PLUS)
|
elim = XEXP (elim, 0);
|
elim = XEXP (elim, 0);
|
if (elim == stack_pointer_rtx)
|
if (elim == stack_pointer_rtx)
|
vt_init_cfa_base ();
|
vt_init_cfa_base ();
|
}
|
}
|
}
|
}
|
else if (!crtl->stack_realign_tried)
|
else if (!crtl->stack_realign_tried)
|
{
|
{
|
rtx reg, elim;
|
rtx reg, elim;
|
|
|
#ifdef FRAME_POINTER_CFA_OFFSET
|
#ifdef FRAME_POINTER_CFA_OFFSET
|
reg = frame_pointer_rtx;
|
reg = frame_pointer_rtx;
|
fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
|
fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
|
#else
|
#else
|
reg = arg_pointer_rtx;
|
reg = arg_pointer_rtx;
|
fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
|
fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
|
#endif
|
#endif
|
elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
|
elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
|
if (elim != reg)
|
if (elim != reg)
|
{
|
{
|
if (GET_CODE (elim) == PLUS)
|
if (GET_CODE (elim) == PLUS)
|
{
|
{
|
fp_cfa_offset -= INTVAL (XEXP (elim, 1));
|
fp_cfa_offset -= INTVAL (XEXP (elim, 1));
|
elim = XEXP (elim, 0);
|
elim = XEXP (elim, 0);
|
}
|
}
|
if (elim != hard_frame_pointer_rtx)
|
if (elim != hard_frame_pointer_rtx)
|
fp_cfa_offset = -1;
|
fp_cfa_offset = -1;
|
}
|
}
|
else
|
else
|
fp_cfa_offset = -1;
|
fp_cfa_offset = -1;
|
}
|
}
|
|
|
/* If the stack is realigned and a DRAP register is used, we're going to
|
/* If the stack is realigned and a DRAP register is used, we're going to
|
rewrite MEMs based on it representing incoming locations of parameters
|
rewrite MEMs based on it representing incoming locations of parameters
|
passed on the stack into MEMs based on the argument pointer. Although
|
passed on the stack into MEMs based on the argument pointer. Although
|
we aren't going to rewrite other MEMs, we still need to initialize the
|
we aren't going to rewrite other MEMs, we still need to initialize the
|
virtual CFA pointer in order to ensure that the argument pointer will
|
virtual CFA pointer in order to ensure that the argument pointer will
|
be seen as a constant throughout the function.
|
be seen as a constant throughout the function.
|
|
|
??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
|
??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
|
else if (stack_realign_drap)
|
else if (stack_realign_drap)
|
{
|
{
|
rtx reg, elim;
|
rtx reg, elim;
|
|
|
#ifdef FRAME_POINTER_CFA_OFFSET
|
#ifdef FRAME_POINTER_CFA_OFFSET
|
reg = frame_pointer_rtx;
|
reg = frame_pointer_rtx;
|
#else
|
#else
|
reg = arg_pointer_rtx;
|
reg = arg_pointer_rtx;
|
#endif
|
#endif
|
elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
|
elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
|
if (elim != reg)
|
if (elim != reg)
|
{
|
{
|
if (GET_CODE (elim) == PLUS)
|
if (GET_CODE (elim) == PLUS)
|
elim = XEXP (elim, 0);
|
elim = XEXP (elim, 0);
|
if (elim == hard_frame_pointer_rtx)
|
if (elim == hard_frame_pointer_rtx)
|
vt_init_cfa_base ();
|
vt_init_cfa_base ();
|
}
|
}
|
}
|
}
|
|
|
hard_frame_pointer_adjustment = -1;
|
hard_frame_pointer_adjustment = -1;
|
|
|
vt_add_function_parameters ();
|
vt_add_function_parameters ();
|
|
|
FOR_EACH_BB (bb)
|
FOR_EACH_BB (bb)
|
{
|
{
|
rtx insn;
|
rtx insn;
|
HOST_WIDE_INT pre, post = 0;
|
HOST_WIDE_INT pre, post = 0;
|
basic_block first_bb, last_bb;
|
basic_block first_bb, last_bb;
|
|
|
if (MAY_HAVE_DEBUG_INSNS)
|
if (MAY_HAVE_DEBUG_INSNS)
|
{
|
{
|
cselib_record_sets_hook = add_with_sets;
|
cselib_record_sets_hook = add_with_sets;
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
fprintf (dump_file, "first value: %i\n",
|
fprintf (dump_file, "first value: %i\n",
|
cselib_get_next_uid ());
|
cselib_get_next_uid ());
|
}
|
}
|
|
|
first_bb = bb;
|
first_bb = bb;
|
for (;;)
|
for (;;)
|
{
|
{
|
edge e;
|
edge e;
|
if (bb->next_bb == EXIT_BLOCK_PTR
|
if (bb->next_bb == EXIT_BLOCK_PTR
|
|| ! single_pred_p (bb->next_bb))
|
|| ! single_pred_p (bb->next_bb))
|
break;
|
break;
|
e = find_edge (bb, bb->next_bb);
|
e = find_edge (bb, bb->next_bb);
|
if (! e || (e->flags & EDGE_FALLTHRU) == 0)
|
if (! e || (e->flags & EDGE_FALLTHRU) == 0)
|
break;
|
break;
|
bb = bb->next_bb;
|
bb = bb->next_bb;
|
}
|
}
|
last_bb = bb;
|
last_bb = bb;
|
|
|
/* Add the micro-operations to the vector. */
|
/* Add the micro-operations to the vector. */
|
FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
|
FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
|
{
|
{
|
HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
|
HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
|
VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
|
VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
|
for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
|
for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
|
insn = NEXT_INSN (insn))
|
insn = NEXT_INSN (insn))
|
{
|
{
|
if (INSN_P (insn))
|
if (INSN_P (insn))
|
{
|
{
|
if (!frame_pointer_needed)
|
if (!frame_pointer_needed)
|
{
|
{
|
insn_stack_adjust_offset_pre_post (insn, &pre, &post);
|
insn_stack_adjust_offset_pre_post (insn, &pre, &post);
|
if (pre)
|
if (pre)
|
{
|
{
|
micro_operation mo;
|
micro_operation mo;
|
mo.type = MO_ADJUST;
|
mo.type = MO_ADJUST;
|
mo.u.adjust = pre;
|
mo.u.adjust = pre;
|
mo.insn = insn;
|
mo.insn = insn;
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
log_op_type (PATTERN (insn), bb, insn,
|
log_op_type (PATTERN (insn), bb, insn,
|
MO_ADJUST, dump_file);
|
MO_ADJUST, dump_file);
|
VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
|
VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
|
&mo);
|
&mo);
|
VTI (bb)->out.stack_adjust += pre;
|
VTI (bb)->out.stack_adjust += pre;
|
}
|
}
|
}
|
}
|
|
|
cselib_hook_called = false;
|
cselib_hook_called = false;
|
adjust_insn (bb, insn);
|
adjust_insn (bb, insn);
|
if (MAY_HAVE_DEBUG_INSNS)
|
if (MAY_HAVE_DEBUG_INSNS)
|
{
|
{
|
if (CALL_P (insn))
|
if (CALL_P (insn))
|
prepare_call_arguments (bb, insn);
|
prepare_call_arguments (bb, insn);
|
cselib_process_insn (insn);
|
cselib_process_insn (insn);
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
{
|
{
|
print_rtl_single (dump_file, insn);
|
print_rtl_single (dump_file, insn);
|
dump_cselib_table (dump_file);
|
dump_cselib_table (dump_file);
|
}
|
}
|
}
|
}
|
if (!cselib_hook_called)
|
if (!cselib_hook_called)
|
add_with_sets (insn, 0, 0);
|
add_with_sets (insn, 0, 0);
|
cancel_changes (0);
|
cancel_changes (0);
|
|
|
if (!frame_pointer_needed && post)
|
if (!frame_pointer_needed && post)
|
{
|
{
|
micro_operation mo;
|
micro_operation mo;
|
mo.type = MO_ADJUST;
|
mo.type = MO_ADJUST;
|
mo.u.adjust = post;
|
mo.u.adjust = post;
|
mo.insn = insn;
|
mo.insn = insn;
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
log_op_type (PATTERN (insn), bb, insn,
|
log_op_type (PATTERN (insn), bb, insn,
|
MO_ADJUST, dump_file);
|
MO_ADJUST, dump_file);
|
VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
|
VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
|
&mo);
|
&mo);
|
VTI (bb)->out.stack_adjust += post;
|
VTI (bb)->out.stack_adjust += post;
|
}
|
}
|
|
|
if (bb == prologue_bb
|
if (bb == prologue_bb
|
&& fp_cfa_offset != -1
|
&& fp_cfa_offset != -1
|
&& hard_frame_pointer_adjustment == -1
|
&& hard_frame_pointer_adjustment == -1
|
&& RTX_FRAME_RELATED_P (insn)
|
&& RTX_FRAME_RELATED_P (insn)
|
&& fp_setter (insn))
|
&& fp_setter (insn))
|
{
|
{
|
vt_init_cfa_base ();
|
vt_init_cfa_base ();
|
hard_frame_pointer_adjustment = fp_cfa_offset;
|
hard_frame_pointer_adjustment = fp_cfa_offset;
|
}
|
}
|
}
|
}
|
}
|
}
|
gcc_assert (offset == VTI (bb)->out.stack_adjust);
|
gcc_assert (offset == VTI (bb)->out.stack_adjust);
|
}
|
}
|
|
|
bb = last_bb;
|
bb = last_bb;
|
|
|
if (MAY_HAVE_DEBUG_INSNS)
|
if (MAY_HAVE_DEBUG_INSNS)
|
{
|
{
|
cselib_preserve_only_values ();
|
cselib_preserve_only_values ();
|
cselib_reset_table (cselib_get_next_uid ());
|
cselib_reset_table (cselib_get_next_uid ());
|
cselib_record_sets_hook = NULL;
|
cselib_record_sets_hook = NULL;
|
}
|
}
|
}
|
}
|
|
|
hard_frame_pointer_adjustment = -1;
|
hard_frame_pointer_adjustment = -1;
|
VTI (ENTRY_BLOCK_PTR)->flooded = true;
|
VTI (ENTRY_BLOCK_PTR)->flooded = true;
|
cfa_base_rtx = NULL_RTX;
|
cfa_base_rtx = NULL_RTX;
|
return true;
|
return true;
|
}
|
}
|
|
|
/* This is *not* reset after each function. It gives each
|
/* This is *not* reset after each function. It gives each
|
NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
|
NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
|
a unique label number. */
|
a unique label number. */
|
|
|
static int debug_label_num = 1;
|
static int debug_label_num = 1;
|
|
|
/* Get rid of all debug insns from the insn stream. */
|
/* Get rid of all debug insns from the insn stream. */
|
|
|
static void
|
static void
|
delete_debug_insns (void)
|
delete_debug_insns (void)
|
{
|
{
|
basic_block bb;
|
basic_block bb;
|
rtx insn, next;
|
rtx insn, next;
|
|
|
if (!MAY_HAVE_DEBUG_INSNS)
|
if (!MAY_HAVE_DEBUG_INSNS)
|
return;
|
return;
|
|
|
FOR_EACH_BB (bb)
|
FOR_EACH_BB (bb)
|
{
|
{
|
FOR_BB_INSNS_SAFE (bb, insn, next)
|
FOR_BB_INSNS_SAFE (bb, insn, next)
|
if (DEBUG_INSN_P (insn))
|
if (DEBUG_INSN_P (insn))
|
{
|
{
|
tree decl = INSN_VAR_LOCATION_DECL (insn);
|
tree decl = INSN_VAR_LOCATION_DECL (insn);
|
if (TREE_CODE (decl) == LABEL_DECL
|
if (TREE_CODE (decl) == LABEL_DECL
|
&& DECL_NAME (decl)
|
&& DECL_NAME (decl)
|
&& !DECL_RTL_SET_P (decl))
|
&& !DECL_RTL_SET_P (decl))
|
{
|
{
|
PUT_CODE (insn, NOTE);
|
PUT_CODE (insn, NOTE);
|
NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
|
NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
|
NOTE_DELETED_LABEL_NAME (insn)
|
NOTE_DELETED_LABEL_NAME (insn)
|
= IDENTIFIER_POINTER (DECL_NAME (decl));
|
= IDENTIFIER_POINTER (DECL_NAME (decl));
|
SET_DECL_RTL (decl, insn);
|
SET_DECL_RTL (decl, insn);
|
CODE_LABEL_NUMBER (insn) = debug_label_num++;
|
CODE_LABEL_NUMBER (insn) = debug_label_num++;
|
}
|
}
|
else
|
else
|
delete_insn (insn);
|
delete_insn (insn);
|
}
|
}
|
}
|
}
|
}
|
}
|
|
|
/* Run a fast, BB-local only version of var tracking, to take care of
|
/* Run a fast, BB-local only version of var tracking, to take care of
|
information that we don't do global analysis on, such that not all
|
information that we don't do global analysis on, such that not all
|
information is lost. If SKIPPED holds, we're skipping the global
|
information is lost. If SKIPPED holds, we're skipping the global
|
pass entirely, so we should try to use information it would have
|
pass entirely, so we should try to use information it would have
|
handled as well.. */
|
handled as well.. */
|
|
|
static void
|
static void
|
vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
|
vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
|
{
|
{
|
/* ??? Just skip it all for now. */
|
/* ??? Just skip it all for now. */
|
delete_debug_insns ();
|
delete_debug_insns ();
|
}
|
}
|
|
|
/* Free the data structures needed for variable tracking. */
|
/* Free the data structures needed for variable tracking. */
|
|
|
static void
|
static void
|
vt_finalize (void)
|
vt_finalize (void)
|
{
|
{
|
basic_block bb;
|
basic_block bb;
|
|
|
FOR_EACH_BB (bb)
|
FOR_EACH_BB (bb)
|
{
|
{
|
VEC_free (micro_operation, heap, VTI (bb)->mos);
|
VEC_free (micro_operation, heap, VTI (bb)->mos);
|
}
|
}
|
|
|
FOR_ALL_BB (bb)
|
FOR_ALL_BB (bb)
|
{
|
{
|
dataflow_set_destroy (&VTI (bb)->in);
|
dataflow_set_destroy (&VTI (bb)->in);
|
dataflow_set_destroy (&VTI (bb)->out);
|
dataflow_set_destroy (&VTI (bb)->out);
|
if (VTI (bb)->permp)
|
if (VTI (bb)->permp)
|
{
|
{
|
dataflow_set_destroy (VTI (bb)->permp);
|
dataflow_set_destroy (VTI (bb)->permp);
|
XDELETE (VTI (bb)->permp);
|
XDELETE (VTI (bb)->permp);
|
}
|
}
|
}
|
}
|
free_aux_for_blocks ();
|
free_aux_for_blocks ();
|
htab_delete (empty_shared_hash->htab);
|
htab_delete (empty_shared_hash->htab);
|
htab_delete (changed_variables);
|
htab_delete (changed_variables);
|
free_alloc_pool (attrs_pool);
|
free_alloc_pool (attrs_pool);
|
free_alloc_pool (var_pool);
|
free_alloc_pool (var_pool);
|
free_alloc_pool (loc_chain_pool);
|
free_alloc_pool (loc_chain_pool);
|
free_alloc_pool (shared_hash_pool);
|
free_alloc_pool (shared_hash_pool);
|
|
|
if (MAY_HAVE_DEBUG_INSNS)
|
if (MAY_HAVE_DEBUG_INSNS)
|
{
|
{
|
free_alloc_pool (valvar_pool);
|
free_alloc_pool (valvar_pool);
|
VEC_free (rtx, heap, preserved_values);
|
VEC_free (rtx, heap, preserved_values);
|
cselib_finish ();
|
cselib_finish ();
|
BITMAP_FREE (scratch_regs);
|
BITMAP_FREE (scratch_regs);
|
scratch_regs = NULL;
|
scratch_regs = NULL;
|
}
|
}
|
|
|
#ifdef HAVE_window_save
|
#ifdef HAVE_window_save
|
VEC_free (parm_reg_t, gc, windowed_parm_regs);
|
VEC_free (parm_reg_t, gc, windowed_parm_regs);
|
#endif
|
#endif
|
|
|
if (vui_vec)
|
if (vui_vec)
|
XDELETEVEC (vui_vec);
|
XDELETEVEC (vui_vec);
|
vui_vec = NULL;
|
vui_vec = NULL;
|
vui_allocated = 0;
|
vui_allocated = 0;
|
}
|
}
|
|
|
/* The entry point to variable tracking pass. */
|
/* The entry point to variable tracking pass. */
|
|
|
static inline unsigned int
|
static inline unsigned int
|
variable_tracking_main_1 (void)
|
variable_tracking_main_1 (void)
|
{
|
{
|
bool success;
|
bool success;
|
|
|
if (flag_var_tracking_assignments < 0)
|
if (flag_var_tracking_assignments < 0)
|
{
|
{
|
delete_debug_insns ();
|
delete_debug_insns ();
|
return 0;
|
return 0;
|
}
|
}
|
|
|
if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
|
if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
|
{
|
{
|
vt_debug_insns_local (true);
|
vt_debug_insns_local (true);
|
return 0;
|
return 0;
|
}
|
}
|
|
|
mark_dfs_back_edges ();
|
mark_dfs_back_edges ();
|
if (!vt_initialize ())
|
if (!vt_initialize ())
|
{
|
{
|
vt_finalize ();
|
vt_finalize ();
|
vt_debug_insns_local (true);
|
vt_debug_insns_local (true);
|
return 0;
|
return 0;
|
}
|
}
|
|
|
success = vt_find_locations ();
|
success = vt_find_locations ();
|
|
|
if (!success && flag_var_tracking_assignments > 0)
|
if (!success && flag_var_tracking_assignments > 0)
|
{
|
{
|
vt_finalize ();
|
vt_finalize ();
|
|
|
delete_debug_insns ();
|
delete_debug_insns ();
|
|
|
/* This is later restored by our caller. */
|
/* This is later restored by our caller. */
|
flag_var_tracking_assignments = 0;
|
flag_var_tracking_assignments = 0;
|
|
|
success = vt_initialize ();
|
success = vt_initialize ();
|
gcc_assert (success);
|
gcc_assert (success);
|
|
|
success = vt_find_locations ();
|
success = vt_find_locations ();
|
}
|
}
|
|
|
if (!success)
|
if (!success)
|
{
|
{
|
vt_finalize ();
|
vt_finalize ();
|
vt_debug_insns_local (false);
|
vt_debug_insns_local (false);
|
return 0;
|
return 0;
|
}
|
}
|
|
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
{
|
{
|
dump_dataflow_sets ();
|
dump_dataflow_sets ();
|
dump_flow_info (dump_file, dump_flags);
|
dump_flow_info (dump_file, dump_flags);
|
}
|
}
|
|
|
timevar_push (TV_VAR_TRACKING_EMIT);
|
timevar_push (TV_VAR_TRACKING_EMIT);
|
vt_emit_notes ();
|
vt_emit_notes ();
|
timevar_pop (TV_VAR_TRACKING_EMIT);
|
timevar_pop (TV_VAR_TRACKING_EMIT);
|
|
|
vt_finalize ();
|
vt_finalize ();
|
vt_debug_insns_local (false);
|
vt_debug_insns_local (false);
|
return 0;
|
return 0;
|
}
|
}
|
|
|
unsigned int
|
unsigned int
|
variable_tracking_main (void)
|
variable_tracking_main (void)
|
{
|
{
|
unsigned int ret;
|
unsigned int ret;
|
int save = flag_var_tracking_assignments;
|
int save = flag_var_tracking_assignments;
|
|
|
ret = variable_tracking_main_1 ();
|
ret = variable_tracking_main_1 ();
|
|
|
flag_var_tracking_assignments = save;
|
flag_var_tracking_assignments = save;
|
|
|
return ret;
|
return ret;
|
}
|
}
|
|
|
static bool
|
static bool
|
gate_handle_var_tracking (void)
|
gate_handle_var_tracking (void)
|
{
|
{
|
return (flag_var_tracking && !targetm.delay_vartrack);
|
return (flag_var_tracking && !targetm.delay_vartrack);
|
}
|
}
|
|
|
|
|
|
|
struct rtl_opt_pass pass_variable_tracking =
|
struct rtl_opt_pass pass_variable_tracking =
|
{
|
{
|
{
|
{
|
RTL_PASS,
|
RTL_PASS,
|
"vartrack", /* name */
|
"vartrack", /* name */
|
gate_handle_var_tracking, /* gate */
|
gate_handle_var_tracking, /* gate */
|
variable_tracking_main, /* execute */
|
variable_tracking_main, /* execute */
|
NULL, /* sub */
|
NULL, /* sub */
|
NULL, /* next */
|
NULL, /* next */
|
0, /* static_pass_number */
|
0, /* static_pass_number */
|
TV_VAR_TRACKING, /* tv_id */
|
TV_VAR_TRACKING, /* tv_id */
|
0, /* properties_required */
|
0, /* properties_required */
|
0, /* properties_provided */
|
0, /* properties_provided */
|
0, /* properties_destroyed */
|
0, /* properties_destroyed */
|
0, /* todo_flags_start */
|
0, /* todo_flags_start */
|
TODO_verify_rtl_sharing /* todo_flags_finish */
|
TODO_verify_rtl_sharing /* todo_flags_finish */
|
}
|
}
|
};
|
};
|
|
|