| 1 | 684 | jeremybenn | /* Common subexpression elimination for GNU compiler.
 | 
      
         | 2 |  |  |    Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
 | 
      
         | 3 |  |  |    1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
 | 
      
         | 4 |  |  |    2011 Free Software Foundation, Inc.
 | 
      
         | 5 |  |  |  
 | 
      
         | 6 |  |  | This file is part of GCC.
 | 
      
         | 7 |  |  |  
 | 
      
         | 8 |  |  | GCC is free software; you can redistribute it and/or modify it under
 | 
      
         | 9 |  |  | the terms of the GNU General Public License as published by the Free
 | 
      
         | 10 |  |  | Software Foundation; either version 3, or (at your option) any later
 | 
      
         | 11 |  |  | version.
 | 
      
         | 12 |  |  |  
 | 
      
         | 13 |  |  | GCC is distributed in the hope that it will be useful, but WITHOUT ANY
 | 
      
         | 14 |  |  | WARRANTY; without even the implied warranty of MERCHANTABILITY or
 | 
      
         | 15 |  |  | FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 | 
      
         | 16 |  |  | for more details.
 | 
      
         | 17 |  |  |  
 | 
      
         | 18 |  |  | You should have received a copy of the GNU General Public License
 | 
      
         | 19 |  |  | along with GCC; see the file COPYING3.  If not see
 | 
      
         | 20 |  |  | <http://www.gnu.org/licenses/>.  */
 | 
      
         | 21 |  |  |  
 | 
      
         | 22 |  |  | #include "config.h"
 | 
      
         | 23 |  |  | #include "system.h"
 | 
      
         | 24 |  |  | #include "coretypes.h"
 | 
      
         | 25 |  |  | #include "tm.h"
 | 
      
         | 26 |  |  | #include "rtl.h"
 | 
      
         | 27 |  |  | #include "tm_p.h"
 | 
      
         | 28 |  |  | #include "hard-reg-set.h"
 | 
      
         | 29 |  |  | #include "regs.h"
 | 
      
         | 30 |  |  | #include "basic-block.h"
 | 
      
         | 31 |  |  | #include "flags.h"
 | 
      
         | 32 |  |  | #include "insn-config.h"
 | 
      
         | 33 |  |  | #include "recog.h"
 | 
      
         | 34 |  |  | #include "function.h"
 | 
      
         | 35 |  |  | #include "expr.h"
 | 
      
         | 36 |  |  | #include "diagnostic-core.h"
 | 
      
         | 37 |  |  | #include "toplev.h"
 | 
      
         | 38 |  |  | #include "output.h"
 | 
      
         | 39 |  |  | #include "ggc.h"
 | 
      
         | 40 |  |  | #include "timevar.h"
 | 
      
         | 41 |  |  | #include "except.h"
 | 
      
         | 42 |  |  | #include "target.h"
 | 
      
         | 43 |  |  | #include "params.h"
 | 
      
         | 44 |  |  | #include "rtlhooks-def.h"
 | 
      
         | 45 |  |  | #include "tree-pass.h"
 | 
      
         | 46 |  |  | #include "df.h"
 | 
      
         | 47 |  |  | #include "dbgcnt.h"
 | 
      
         | 48 |  |  |  
 | 
      
         | 49 |  |  | /* The basic idea of common subexpression elimination is to go
 | 
      
         | 50 |  |  |    through the code, keeping a record of expressions that would
 | 
      
         | 51 |  |  |    have the same value at the current scan point, and replacing
 | 
      
         | 52 |  |  |    expressions encountered with the cheapest equivalent expression.
 | 
      
         | 53 |  |  |  
 | 
      
         | 54 |  |  |    It is too complicated to keep track of the different possibilities
 | 
      
         | 55 |  |  |    when control paths merge in this code; so, at each label, we forget all
 | 
      
         | 56 |  |  |    that is known and start fresh.  This can be described as processing each
 | 
      
         | 57 |  |  |    extended basic block separately.  We have a separate pass to perform
 | 
      
         | 58 |  |  |    global CSE.
 | 
      
         | 59 |  |  |  
 | 
      
         | 60 |  |  |    Note CSE can turn a conditional or computed jump into a nop or
 | 
      
         | 61 |  |  |    an unconditional jump.  When this occurs we arrange to run the jump
 | 
      
         | 62 |  |  |    optimizer after CSE to delete the unreachable code.
 | 
      
         | 63 |  |  |  
 | 
      
         | 64 |  |  |    We use two data structures to record the equivalent expressions:
 | 
      
         | 65 |  |  |    a hash table for most expressions, and a vector of "quantity
 | 
      
         | 66 |  |  |    numbers" to record equivalent (pseudo) registers.
 | 
      
         | 67 |  |  |  
 | 
      
         | 68 |  |  |    The use of the special data structure for registers is desirable
 | 
      
         | 69 |  |  |    because it is faster.  It is possible because registers references
 | 
      
         | 70 |  |  |    contain a fairly small number, the register number, taken from
 | 
      
         | 71 |  |  |    a contiguously allocated series, and two register references are
 | 
      
         | 72 |  |  |    identical if they have the same number.  General expressions
 | 
      
         | 73 |  |  |    do not have any such thing, so the only way to retrieve the
 | 
      
         | 74 |  |  |    information recorded on an expression other than a register
 | 
      
         | 75 |  |  |    is to keep it in a hash table.
 | 
      
         | 76 |  |  |  
 | 
      
         | 77 |  |  | Registers and "quantity numbers":
 | 
      
         | 78 |  |  |  
 | 
      
         | 79 |  |  |    At the start of each basic block, all of the (hardware and pseudo)
 | 
      
         | 80 |  |  |    registers used in the function are given distinct quantity
 | 
      
         | 81 |  |  |    numbers to indicate their contents.  During scan, when the code
 | 
      
         | 82 |  |  |    copies one register into another, we copy the quantity number.
 | 
      
         | 83 |  |  |    When a register is loaded in any other way, we allocate a new
 | 
      
         | 84 |  |  |    quantity number to describe the value generated by this operation.
 | 
      
         | 85 |  |  |    `REG_QTY (N)' records what quantity register N is currently thought
 | 
      
         | 86 |  |  |    of as containing.
 | 
      
         | 87 |  |  |  
 | 
      
         | 88 |  |  |    All real quantity numbers are greater than or equal to zero.
 | 
      
         | 89 |  |  |    If register N has not been assigned a quantity, `REG_QTY (N)' will
 | 
      
         | 90 |  |  |    equal -N - 1, which is always negative.
 | 
      
         | 91 |  |  |  
 | 
      
         | 92 |  |  |    Quantity numbers below zero do not exist and none of the `qty_table'
 | 
      
         | 93 |  |  |    entries should be referenced with a negative index.
 | 
      
         | 94 |  |  |  
 | 
      
         | 95 |  |  |    We also maintain a bidirectional chain of registers for each
 | 
      
         | 96 |  |  |    quantity number.  The `qty_table` members `first_reg' and `last_reg',
 | 
      
         | 97 |  |  |    and `reg_eqv_table' members `next' and `prev' hold these chains.
 | 
      
         | 98 |  |  |  
 | 
      
         | 99 |  |  |    The first register in a chain is the one whose lifespan is least local.
 | 
      
         | 100 |  |  |    Among equals, it is the one that was seen first.
 | 
      
         | 101 |  |  |    We replace any equivalent register with that one.
 | 
      
         | 102 |  |  |  
 | 
      
         | 103 |  |  |    If two registers have the same quantity number, it must be true that
 | 
      
         | 104 |  |  |    REG expressions with qty_table `mode' must be in the hash table for both
 | 
      
         | 105 |  |  |    registers and must be in the same class.
 | 
      
         | 106 |  |  |  
 | 
      
         | 107 |  |  |    The converse is not true.  Since hard registers may be referenced in
 | 
      
         | 108 |  |  |    any mode, two REG expressions might be equivalent in the hash table
 | 
      
         | 109 |  |  |    but not have the same quantity number if the quantity number of one
 | 
      
         | 110 |  |  |    of the registers is not the same mode as those expressions.
 | 
      
         | 111 |  |  |  
 | 
      
         | 112 |  |  | Constants and quantity numbers
 | 
      
         | 113 |  |  |  
 | 
      
         | 114 |  |  |    When a quantity has a known constant value, that value is stored
 | 
      
         | 115 |  |  |    in the appropriate qty_table `const_rtx'.  This is in addition to
 | 
      
         | 116 |  |  |    putting the constant in the hash table as is usual for non-regs.
 | 
      
         | 117 |  |  |  
 | 
      
         | 118 |  |  |    Whether a reg or a constant is preferred is determined by the configuration
 | 
      
         | 119 |  |  |    macro CONST_COSTS and will often depend on the constant value.  In any
 | 
      
         | 120 |  |  |    event, expressions containing constants can be simplified, by fold_rtx.
 | 
      
         | 121 |  |  |  
 | 
      
         | 122 |  |  |    When a quantity has a known nearly constant value (such as an address
 | 
      
         | 123 |  |  |    of a stack slot), that value is stored in the appropriate qty_table
 | 
      
         | 124 |  |  |    `const_rtx'.
 | 
      
         | 125 |  |  |  
 | 
      
         | 126 |  |  |    Integer constants don't have a machine mode.  However, cse
 | 
      
         | 127 |  |  |    determines the intended machine mode from the destination
 | 
      
         | 128 |  |  |    of the instruction that moves the constant.  The machine mode
 | 
      
         | 129 |  |  |    is recorded in the hash table along with the actual RTL
 | 
      
         | 130 |  |  |    constant expression so that different modes are kept separate.
 | 
      
         | 131 |  |  |  
 | 
      
         | 132 |  |  | Other expressions:
 | 
      
         | 133 |  |  |  
 | 
      
         | 134 |  |  |    To record known equivalences among expressions in general
 | 
      
         | 135 |  |  |    we use a hash table called `table'.  It has a fixed number of buckets
 | 
      
         | 136 |  |  |    that contain chains of `struct table_elt' elements for expressions.
 | 
      
         | 137 |  |  |    These chains connect the elements whose expressions have the same
 | 
      
         | 138 |  |  |    hash codes.
 | 
      
         | 139 |  |  |  
 | 
      
         | 140 |  |  |    Other chains through the same elements connect the elements which
 | 
      
         | 141 |  |  |    currently have equivalent values.
 | 
      
         | 142 |  |  |  
 | 
      
         | 143 |  |  |    Register references in an expression are canonicalized before hashing
 | 
      
         | 144 |  |  |    the expression.  This is done using `reg_qty' and qty_table `first_reg'.
 | 
      
         | 145 |  |  |    The hash code of a register reference is computed using the quantity
 | 
      
         | 146 |  |  |    number, not the register number.
 | 
      
         | 147 |  |  |  
 | 
      
         | 148 |  |  |    When the value of an expression changes, it is necessary to remove from the
 | 
      
         | 149 |  |  |    hash table not just that expression but all expressions whose values
 | 
      
         | 150 |  |  |    could be different as a result.
 | 
      
         | 151 |  |  |  
 | 
      
         | 152 |  |  |      1. If the value changing is in memory, except in special cases
 | 
      
         | 153 |  |  |      ANYTHING referring to memory could be changed.  That is because
 | 
      
         | 154 |  |  |      nobody knows where a pointer does not point.
 | 
      
         | 155 |  |  |      The function `invalidate_memory' removes what is necessary.
 | 
      
         | 156 |  |  |  
 | 
      
         | 157 |  |  |      The special cases are when the address is constant or is
 | 
      
         | 158 |  |  |      a constant plus a fixed register such as the frame pointer
 | 
      
         | 159 |  |  |      or a static chain pointer.  When such addresses are stored in,
 | 
      
         | 160 |  |  |      we can tell exactly which other such addresses must be invalidated
 | 
      
         | 161 |  |  |      due to overlap.  `invalidate' does this.
 | 
      
         | 162 |  |  |      All expressions that refer to non-constant
 | 
      
         | 163 |  |  |      memory addresses are also invalidated.  `invalidate_memory' does this.
 | 
      
         | 164 |  |  |  
 | 
      
         | 165 |  |  |      2. If the value changing is a register, all expressions
 | 
      
         | 166 |  |  |      containing references to that register, and only those,
 | 
      
         | 167 |  |  |      must be removed.
 | 
      
         | 168 |  |  |  
 | 
      
         | 169 |  |  |    Because searching the entire hash table for expressions that contain
 | 
      
         | 170 |  |  |    a register is very slow, we try to figure out when it isn't necessary.
 | 
      
         | 171 |  |  |    Precisely, this is necessary only when expressions have been
 | 
      
         | 172 |  |  |    entered in the hash table using this register, and then the value has
 | 
      
         | 173 |  |  |    changed, and then another expression wants to be added to refer to
 | 
      
         | 174 |  |  |    the register's new value.  This sequence of circumstances is rare
 | 
      
         | 175 |  |  |    within any one basic block.
 | 
      
         | 176 |  |  |  
 | 
      
         | 177 |  |  |    `REG_TICK' and `REG_IN_TABLE', accessors for members of
 | 
      
         | 178 |  |  |    cse_reg_info, are used to detect this case.  REG_TICK (i) is
 | 
      
         | 179 |  |  |    incremented whenever a value is stored in register i.
 | 
      
         | 180 |  |  |    REG_IN_TABLE (i) holds -1 if no references to register i have been
 | 
      
         | 181 |  |  |    entered in the table; otherwise, it contains the value REG_TICK (i)
 | 
      
         | 182 |  |  |    had when the references were entered.  If we want to enter a
 | 
      
         | 183 |  |  |    reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
 | 
      
         | 184 |  |  |    remove old references.  Until we want to enter a new entry, the
 | 
      
         | 185 |  |  |    mere fact that the two vectors don't match makes the entries be
 | 
      
         | 186 |  |  |    ignored if anyone tries to match them.
 | 
      
         | 187 |  |  |  
 | 
      
         | 188 |  |  |    Registers themselves are entered in the hash table as well as in
 | 
      
         | 189 |  |  |    the equivalent-register chains.  However, `REG_TICK' and
 | 
      
         | 190 |  |  |    `REG_IN_TABLE' do not apply to expressions which are simple
 | 
      
         | 191 |  |  |    register references.  These expressions are removed from the table
 | 
      
         | 192 |  |  |    immediately when they become invalid, and this can be done even if
 | 
      
         | 193 |  |  |    we do not immediately search for all the expressions that refer to
 | 
      
         | 194 |  |  |    the register.
 | 
      
         | 195 |  |  |  
 | 
      
         | 196 |  |  |    A CLOBBER rtx in an instruction invalidates its operand for further
 | 
      
         | 197 |  |  |    reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
 | 
      
         | 198 |  |  |    invalidates everything that resides in memory.
 | 
      
         | 199 |  |  |  
 | 
      
         | 200 |  |  | Related expressions:
 | 
      
         | 201 |  |  |  
 | 
      
         | 202 |  |  |    Constant expressions that differ only by an additive integer
 | 
      
         | 203 |  |  |    are called related.  When a constant expression is put in
 | 
      
         | 204 |  |  |    the table, the related expression with no constant term
 | 
      
         | 205 |  |  |    is also entered.  These are made to point at each other
 | 
      
         | 206 |  |  |    so that it is possible to find out if there exists any
 | 
      
         | 207 |  |  |    register equivalent to an expression related to a given expression.  */
 | 
      
         | 208 |  |  |  
 | 
      
         | 209 |  |  | /* Length of qty_table vector.  We know in advance we will not need
 | 
      
         | 210 |  |  |    a quantity number this big.  */
 | 
      
         | 211 |  |  |  
 | 
      
         | 212 |  |  | static int max_qty;
 | 
      
         | 213 |  |  |  
 | 
      
         | 214 |  |  | /* Next quantity number to be allocated.
 | 
      
         | 215 |  |  |    This is 1 + the largest number needed so far.  */
 | 
      
         | 216 |  |  |  
 | 
      
         | 217 |  |  | static int next_qty;
 | 
      
         | 218 |  |  |  
 | 
      
         | 219 |  |  | /* Per-qty information tracking.
 | 
      
         | 220 |  |  |  
 | 
      
         | 221 |  |  |    `first_reg' and `last_reg' track the head and tail of the
 | 
      
         | 222 |  |  |    chain of registers which currently contain this quantity.
 | 
      
         | 223 |  |  |  
 | 
      
         | 224 |  |  |    `mode' contains the machine mode of this quantity.
 | 
      
         | 225 |  |  |  
 | 
      
         | 226 |  |  |    `const_rtx' holds the rtx of the constant value of this
 | 
      
         | 227 |  |  |    quantity, if known.  A summations of the frame/arg pointer
 | 
      
         | 228 |  |  |    and a constant can also be entered here.  When this holds
 | 
      
         | 229 |  |  |    a known value, `const_insn' is the insn which stored the
 | 
      
         | 230 |  |  |    constant value.
 | 
      
         | 231 |  |  |  
 | 
      
         | 232 |  |  |    `comparison_{code,const,qty}' are used to track when a
 | 
      
         | 233 |  |  |    comparison between a quantity and some constant or register has
 | 
      
         | 234 |  |  |    been passed.  In such a case, we know the results of the comparison
 | 
      
         | 235 |  |  |    in case we see it again.  These members record a comparison that
 | 
      
         | 236 |  |  |    is known to be true.  `comparison_code' holds the rtx code of such
 | 
      
         | 237 |  |  |    a comparison, else it is set to UNKNOWN and the other two
 | 
      
         | 238 |  |  |    comparison members are undefined.  `comparison_const' holds
 | 
      
         | 239 |  |  |    the constant being compared against, or zero if the comparison
 | 
      
         | 240 |  |  |    is not against a constant.  `comparison_qty' holds the quantity
 | 
      
         | 241 |  |  |    being compared against when the result is known.  If the comparison
 | 
      
         | 242 |  |  |    is not with a register, `comparison_qty' is -1.  */
 | 
      
         | 243 |  |  |  
 | 
      
         | 244 |  |  | struct qty_table_elem
 | 
      
         | 245 |  |  | {
 | 
      
         | 246 |  |  |   rtx const_rtx;
 | 
      
         | 247 |  |  |   rtx const_insn;
 | 
      
         | 248 |  |  |   rtx comparison_const;
 | 
      
         | 249 |  |  |   int comparison_qty;
 | 
      
         | 250 |  |  |   unsigned int first_reg, last_reg;
 | 
      
         | 251 |  |  |   /* The sizes of these fields should match the sizes of the
 | 
      
         | 252 |  |  |      code and mode fields of struct rtx_def (see rtl.h).  */
 | 
      
         | 253 |  |  |   ENUM_BITFIELD(rtx_code) comparison_code : 16;
 | 
      
         | 254 |  |  |   ENUM_BITFIELD(machine_mode) mode : 8;
 | 
      
         | 255 |  |  | };
 | 
      
         | 256 |  |  |  
 | 
      
         | 257 |  |  | /* The table of all qtys, indexed by qty number.  */
 | 
      
         | 258 |  |  | static struct qty_table_elem *qty_table;
 | 
      
         | 259 |  |  |  
 | 
      
         | 260 |  |  | /* Structure used to pass arguments via for_each_rtx to function
 | 
      
         | 261 |  |  |    cse_change_cc_mode.  */
 | 
      
         | 262 |  |  | struct change_cc_mode_args
 | 
      
         | 263 |  |  | {
 | 
      
         | 264 |  |  |   rtx insn;
 | 
      
         | 265 |  |  |   rtx newreg;
 | 
      
         | 266 |  |  | };
 | 
      
         | 267 |  |  |  
 | 
      
         | 268 |  |  | #ifdef HAVE_cc0
 | 
      
         | 269 |  |  | /* For machines that have a CC0, we do not record its value in the hash
 | 
      
         | 270 |  |  |    table since its use is guaranteed to be the insn immediately following
 | 
      
         | 271 |  |  |    its definition and any other insn is presumed to invalidate it.
 | 
      
         | 272 |  |  |  
 | 
      
         | 273 |  |  |    Instead, we store below the current and last value assigned to CC0.
 | 
      
         | 274 |  |  |    If it should happen to be a constant, it is stored in preference
 | 
      
         | 275 |  |  |    to the actual assigned value.  In case it is a constant, we store
 | 
      
         | 276 |  |  |    the mode in which the constant should be interpreted.  */
 | 
      
         | 277 |  |  |  
 | 
      
         | 278 |  |  | static rtx this_insn_cc0, prev_insn_cc0;
 | 
      
         | 279 |  |  | static enum machine_mode this_insn_cc0_mode, prev_insn_cc0_mode;
 | 
      
         | 280 |  |  | #endif
 | 
      
         | 281 |  |  |  
 | 
      
         | 282 |  |  | /* Insn being scanned.  */
 | 
      
         | 283 |  |  |  
 | 
      
         | 284 |  |  | static rtx this_insn;
 | 
      
         | 285 |  |  | static bool optimize_this_for_speed_p;
 | 
      
         | 286 |  |  |  
 | 
      
         | 287 |  |  | /* Index by register number, gives the number of the next (or
 | 
      
         | 288 |  |  |    previous) register in the chain of registers sharing the same
 | 
      
         | 289 |  |  |    value.
 | 
      
         | 290 |  |  |  
 | 
      
         | 291 |  |  |    Or -1 if this register is at the end of the chain.
 | 
      
         | 292 |  |  |  
 | 
      
         | 293 |  |  |    If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined.  */
 | 
      
         | 294 |  |  |  
 | 
      
         | 295 |  |  | /* Per-register equivalence chain.  */
 | 
      
         | 296 |  |  | struct reg_eqv_elem
 | 
      
         | 297 |  |  | {
 | 
      
         | 298 |  |  |   int next, prev;
 | 
      
         | 299 |  |  | };
 | 
      
         | 300 |  |  |  
 | 
      
         | 301 |  |  | /* The table of all register equivalence chains.  */
 | 
      
         | 302 |  |  | static struct reg_eqv_elem *reg_eqv_table;
 | 
      
         | 303 |  |  |  
 | 
      
         | 304 |  |  | struct cse_reg_info
 | 
      
         | 305 |  |  | {
 | 
      
         | 306 |  |  |   /* The timestamp at which this register is initialized.  */
 | 
      
         | 307 |  |  |   unsigned int timestamp;
 | 
      
         | 308 |  |  |  
 | 
      
         | 309 |  |  |   /* The quantity number of the register's current contents.  */
 | 
      
         | 310 |  |  |   int reg_qty;
 | 
      
         | 311 |  |  |  
 | 
      
         | 312 |  |  |   /* The number of times the register has been altered in the current
 | 
      
         | 313 |  |  |      basic block.  */
 | 
      
         | 314 |  |  |   int reg_tick;
 | 
      
         | 315 |  |  |  
 | 
      
         | 316 |  |  |   /* The REG_TICK value at which rtx's containing this register are
 | 
      
         | 317 |  |  |      valid in the hash table.  If this does not equal the current
 | 
      
         | 318 |  |  |      reg_tick value, such expressions existing in the hash table are
 | 
      
         | 319 |  |  |      invalid.  */
 | 
      
         | 320 |  |  |   int reg_in_table;
 | 
      
         | 321 |  |  |  
 | 
      
         | 322 |  |  |   /* The SUBREG that was set when REG_TICK was last incremented.  Set
 | 
      
         | 323 |  |  |      to -1 if the last store was to the whole register, not a subreg.  */
 | 
      
         | 324 |  |  |   unsigned int subreg_ticked;
 | 
      
         | 325 |  |  | };
 | 
      
         | 326 |  |  |  
 | 
      
         | 327 |  |  | /* A table of cse_reg_info indexed by register numbers.  */
 | 
      
         | 328 |  |  | static struct cse_reg_info *cse_reg_info_table;
 | 
      
         | 329 |  |  |  
 | 
      
         | 330 |  |  | /* The size of the above table.  */
 | 
      
         | 331 |  |  | static unsigned int cse_reg_info_table_size;
 | 
      
         | 332 |  |  |  
 | 
      
         | 333 |  |  | /* The index of the first entry that has not been initialized.  */
 | 
      
         | 334 |  |  | static unsigned int cse_reg_info_table_first_uninitialized;
 | 
      
         | 335 |  |  |  
 | 
      
         | 336 |  |  | /* The timestamp at the beginning of the current run of
 | 
      
         | 337 |  |  |    cse_extended_basic_block.  We increment this variable at the beginning of
 | 
      
         | 338 |  |  |    the current run of cse_extended_basic_block.  The timestamp field of a
 | 
      
         | 339 |  |  |    cse_reg_info entry matches the value of this variable if and only
 | 
      
         | 340 |  |  |    if the entry has been initialized during the current run of
 | 
      
         | 341 |  |  |    cse_extended_basic_block.  */
 | 
      
         | 342 |  |  | static unsigned int cse_reg_info_timestamp;
 | 
      
         | 343 |  |  |  
 | 
      
         | 344 |  |  | /* A HARD_REG_SET containing all the hard registers for which there is
 | 
      
         | 345 |  |  |    currently a REG expression in the hash table.  Note the difference
 | 
      
         | 346 |  |  |    from the above variables, which indicate if the REG is mentioned in some
 | 
      
         | 347 |  |  |    expression in the table.  */
 | 
      
         | 348 |  |  |  
 | 
      
         | 349 |  |  | static HARD_REG_SET hard_regs_in_table;
 | 
      
         | 350 |  |  |  
 | 
      
         | 351 |  |  | /* True if CSE has altered the CFG.  */
 | 
      
         | 352 |  |  | static bool cse_cfg_altered;
 | 
      
         | 353 |  |  |  
 | 
      
         | 354 |  |  | /* True if CSE has altered conditional jump insns in such a way
 | 
      
         | 355 |  |  |    that jump optimization should be redone.  */
 | 
      
         | 356 |  |  | static bool cse_jumps_altered;
 | 
      
         | 357 |  |  |  
 | 
      
         | 358 |  |  | /* True if we put a LABEL_REF into the hash table for an INSN
 | 
      
         | 359 |  |  |    without a REG_LABEL_OPERAND, we have to rerun jump after CSE
 | 
      
         | 360 |  |  |    to put in the note.  */
 | 
      
         | 361 |  |  | static bool recorded_label_ref;
 | 
      
         | 362 |  |  |  
 | 
      
         | 363 |  |  | /* canon_hash stores 1 in do_not_record
 | 
      
         | 364 |  |  |    if it notices a reference to CC0, PC, or some other volatile
 | 
      
         | 365 |  |  |    subexpression.  */
 | 
      
         | 366 |  |  |  
 | 
      
         | 367 |  |  | static int do_not_record;
 | 
      
         | 368 |  |  |  
 | 
      
         | 369 |  |  | /* canon_hash stores 1 in hash_arg_in_memory
 | 
      
         | 370 |  |  |    if it notices a reference to memory within the expression being hashed.  */
 | 
      
         | 371 |  |  |  
 | 
      
         | 372 |  |  | static int hash_arg_in_memory;
 | 
      
         | 373 |  |  |  
 | 
      
         | 374 |  |  | /* The hash table contains buckets which are chains of `struct table_elt's,
 | 
      
         | 375 |  |  |    each recording one expression's information.
 | 
      
         | 376 |  |  |    That expression is in the `exp' field.
 | 
      
         | 377 |  |  |  
 | 
      
         | 378 |  |  |    The canon_exp field contains a canonical (from the point of view of
 | 
      
         | 379 |  |  |    alias analysis) version of the `exp' field.
 | 
      
         | 380 |  |  |  
 | 
      
         | 381 |  |  |    Those elements with the same hash code are chained in both directions
 | 
      
         | 382 |  |  |    through the `next_same_hash' and `prev_same_hash' fields.
 | 
      
         | 383 |  |  |  
 | 
      
         | 384 |  |  |    Each set of expressions with equivalent values
 | 
      
         | 385 |  |  |    are on a two-way chain through the `next_same_value'
 | 
      
         | 386 |  |  |    and `prev_same_value' fields, and all point with
 | 
      
         | 387 |  |  |    the `first_same_value' field at the first element in
 | 
      
         | 388 |  |  |    that chain.  The chain is in order of increasing cost.
 | 
      
         | 389 |  |  |    Each element's cost value is in its `cost' field.
 | 
      
         | 390 |  |  |  
 | 
      
         | 391 |  |  |    The `in_memory' field is nonzero for elements that
 | 
      
         | 392 |  |  |    involve any reference to memory.  These elements are removed
 | 
      
         | 393 |  |  |    whenever a write is done to an unidentified location in memory.
 | 
      
         | 394 |  |  |    To be safe, we assume that a memory address is unidentified unless
 | 
      
         | 395 |  |  |    the address is either a symbol constant or a constant plus
 | 
      
         | 396 |  |  |    the frame pointer or argument pointer.
 | 
      
         | 397 |  |  |  
 | 
      
         | 398 |  |  |    The `related_value' field is used to connect related expressions
 | 
      
         | 399 |  |  |    (that differ by adding an integer).
 | 
      
         | 400 |  |  |    The related expressions are chained in a circular fashion.
 | 
      
         | 401 |  |  |    `related_value' is zero for expressions for which this
 | 
      
         | 402 |  |  |    chain is not useful.
 | 
      
         | 403 |  |  |  
 | 
      
         | 404 |  |  |    The `cost' field stores the cost of this element's expression.
 | 
      
         | 405 |  |  |    The `regcost' field stores the value returned by approx_reg_cost for
 | 
      
         | 406 |  |  |    this element's expression.
 | 
      
         | 407 |  |  |  
 | 
      
         | 408 |  |  |    The `is_const' flag is set if the element is a constant (including
 | 
      
         | 409 |  |  |    a fixed address).
 | 
      
         | 410 |  |  |  
 | 
      
         | 411 |  |  |    The `flag' field is used as a temporary during some search routines.
 | 
      
         | 412 |  |  |  
 | 
      
         | 413 |  |  |    The `mode' field is usually the same as GET_MODE (`exp'), but
 | 
      
         | 414 |  |  |    if `exp' is a CONST_INT and has no machine mode then the `mode'
 | 
      
         | 415 |  |  |    field is the mode it was being used as.  Each constant is
 | 
      
         | 416 |  |  |    recorded separately for each mode it is used with.  */
 | 
      
         | 417 |  |  |  
 | 
      
         | 418 |  |  | struct table_elt
 | 
      
         | 419 |  |  | {
 | 
      
         | 420 |  |  |   rtx exp;
 | 
      
         | 421 |  |  |   rtx canon_exp;
 | 
      
         | 422 |  |  |   struct table_elt *next_same_hash;
 | 
      
         | 423 |  |  |   struct table_elt *prev_same_hash;
 | 
      
         | 424 |  |  |   struct table_elt *next_same_value;
 | 
      
         | 425 |  |  |   struct table_elt *prev_same_value;
 | 
      
         | 426 |  |  |   struct table_elt *first_same_value;
 | 
      
         | 427 |  |  |   struct table_elt *related_value;
 | 
      
         | 428 |  |  |   int cost;
 | 
      
         | 429 |  |  |   int regcost;
 | 
      
         | 430 |  |  |   /* The size of this field should match the size
 | 
      
         | 431 |  |  |      of the mode field of struct rtx_def (see rtl.h).  */
 | 
      
         | 432 |  |  |   ENUM_BITFIELD(machine_mode) mode : 8;
 | 
      
         | 433 |  |  |   char in_memory;
 | 
      
         | 434 |  |  |   char is_const;
 | 
      
         | 435 |  |  |   char flag;
 | 
      
         | 436 |  |  | };
 | 
      
         | 437 |  |  |  
 | 
      
         | 438 |  |  | /* We don't want a lot of buckets, because we rarely have very many
 | 
      
         | 439 |  |  |    things stored in the hash table, and a lot of buckets slows
 | 
      
         | 440 |  |  |    down a lot of loops that happen frequently.  */
 | 
      
         | 441 |  |  | #define HASH_SHIFT      5
 | 
      
         | 442 |  |  | #define HASH_SIZE       (1 << HASH_SHIFT)
 | 
      
         | 443 |  |  | #define HASH_MASK       (HASH_SIZE - 1)
 | 
      
         | 444 |  |  |  
 | 
      
         | 445 |  |  | /* Compute hash code of X in mode M.  Special-case case where X is a pseudo
 | 
      
         | 446 |  |  |    register (hard registers may require `do_not_record' to be set).  */
 | 
      
         | 447 |  |  |  
 | 
      
         | 448 |  |  | #define HASH(X, M)      \
 | 
      
         | 449 |  |  |  ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER      \
 | 
      
         | 450 |  |  |   ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))    \
 | 
      
         | 451 |  |  |   : canon_hash (X, M)) & HASH_MASK)
 | 
      
         | 452 |  |  |  
 | 
      
         | 453 |  |  | /* Like HASH, but without side-effects.  */
 | 
      
         | 454 |  |  | #define SAFE_HASH(X, M) \
 | 
      
         | 455 |  |  |  ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER      \
 | 
      
         | 456 |  |  |   ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))    \
 | 
      
         | 457 |  |  |   : safe_hash (X, M)) & HASH_MASK)
 | 
      
         | 458 |  |  |  
 | 
      
         | 459 |  |  | /* Determine whether register number N is considered a fixed register for the
 | 
      
         | 460 |  |  |    purpose of approximating register costs.
 | 
      
         | 461 |  |  |    It is desirable to replace other regs with fixed regs, to reduce need for
 | 
      
         | 462 |  |  |    non-fixed hard regs.
 | 
      
         | 463 |  |  |    A reg wins if it is either the frame pointer or designated as fixed.  */
 | 
      
         | 464 |  |  | #define FIXED_REGNO_P(N)  \
 | 
      
         | 465 |  |  |   ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
 | 
      
         | 466 |  |  |    || fixed_regs[N] || global_regs[N])
 | 
      
         | 467 |  |  |  
 | 
      
         | 468 |  |  | /* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
 | 
      
         | 469 |  |  |    hard registers and pointers into the frame are the cheapest with a cost
 | 
      
         | 470 |  |  |    of 0.  Next come pseudos with a cost of one and other hard registers with
 | 
      
         | 471 |  |  |    a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
 | 
      
         | 472 |  |  |  
 | 
      
         | 473 |  |  | #define CHEAP_REGNO(N)                                                  \
 | 
      
         | 474 |  |  |   (REGNO_PTR_FRAME_P(N)                                                 \
 | 
      
         | 475 |  |  |    || (HARD_REGISTER_NUM_P (N)                                          \
 | 
      
         | 476 |  |  |        && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
 | 
      
         | 477 |  |  |  
 | 
      
         | 478 |  |  | #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET, 1))
 | 
      
         | 479 |  |  | #define COST_IN(X, OUTER, OPNO) (REG_P (X) ? 0 : notreg_cost (X, OUTER, OPNO))
 | 
      
         | 480 |  |  |  
 | 
      
         | 481 |  |  | /* Get the number of times this register has been updated in this
 | 
      
         | 482 |  |  |    basic block.  */
 | 
      
         | 483 |  |  |  
 | 
      
         | 484 |  |  | #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
 | 
      
         | 485 |  |  |  
 | 
      
         | 486 |  |  | /* Get the point at which REG was recorded in the table.  */
 | 
      
         | 487 |  |  |  
 | 
      
         | 488 |  |  | #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
 | 
      
         | 489 |  |  |  
 | 
      
         | 490 |  |  | /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
 | 
      
         | 491 |  |  |    SUBREG).  */
 | 
      
         | 492 |  |  |  
 | 
      
         | 493 |  |  | #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
 | 
      
         | 494 |  |  |  
 | 
      
         | 495 |  |  | /* Get the quantity number for REG.  */
 | 
      
         | 496 |  |  |  
 | 
      
         | 497 |  |  | #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
 | 
      
         | 498 |  |  |  
 | 
      
         | 499 |  |  | /* Determine if the quantity number for register X represents a valid index
 | 
      
         | 500 |  |  |    into the qty_table.  */
 | 
      
         | 501 |  |  |  
 | 
      
         | 502 |  |  | #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
 | 
      
         | 503 |  |  |  
 | 
      
         | 504 |  |  | /* Compare table_elt X and Y and return true iff X is cheaper than Y.  */
 | 
      
         | 505 |  |  |  
 | 
      
         | 506 |  |  | #define CHEAPER(X, Y) \
 | 
      
         | 507 |  |  |  (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
 | 
      
         | 508 |  |  |  
 | 
      
         | 509 |  |  | static struct table_elt *table[HASH_SIZE];
 | 
      
         | 510 |  |  |  
 | 
      
         | 511 |  |  | /* Chain of `struct table_elt's made so far for this function
 | 
      
         | 512 |  |  |    but currently removed from the table.  */
 | 
      
         | 513 |  |  |  
 | 
      
         | 514 |  |  | static struct table_elt *free_element_chain;
 | 
      
         | 515 |  |  |  
 | 
      
         | 516 |  |  | /* Set to the cost of a constant pool reference if one was found for a
 | 
      
         | 517 |  |  |    symbolic constant.  If this was found, it means we should try to
 | 
      
         | 518 |  |  |    convert constants into constant pool entries if they don't fit in
 | 
      
         | 519 |  |  |    the insn.  */
 | 
      
         | 520 |  |  |  
 | 
      
         | 521 |  |  | static int constant_pool_entries_cost;
 | 
      
         | 522 |  |  | static int constant_pool_entries_regcost;
 | 
      
         | 523 |  |  |  
 | 
      
         | 524 |  |  | /* Trace a patch through the CFG.  */
 | 
      
         | 525 |  |  |  
 | 
      
         | 526 |  |  | struct branch_path
 | 
      
         | 527 |  |  | {
 | 
      
         | 528 |  |  |   /* The basic block for this path entry.  */
 | 
      
         | 529 |  |  |   basic_block bb;
 | 
      
         | 530 |  |  | };
 | 
      
         | 531 |  |  |  
 | 
      
         | 532 |  |  | /* This data describes a block that will be processed by
 | 
      
         | 533 |  |  |    cse_extended_basic_block.  */
 | 
      
         | 534 |  |  |  
 | 
      
         | 535 |  |  | struct cse_basic_block_data
 | 
      
         | 536 |  |  | {
 | 
      
         | 537 |  |  |   /* Total number of SETs in block.  */
 | 
      
         | 538 |  |  |   int nsets;
 | 
      
         | 539 |  |  |   /* Size of current branch path, if any.  */
 | 
      
         | 540 |  |  |   int path_size;
 | 
      
         | 541 |  |  |   /* Current path, indicating which basic_blocks will be processed.  */
 | 
      
         | 542 |  |  |   struct branch_path *path;
 | 
      
         | 543 |  |  | };
 | 
      
         | 544 |  |  |  
 | 
      
         | 545 |  |  |  
 | 
      
         | 546 |  |  | /* Pointers to the live in/live out bitmaps for the boundaries of the
 | 
      
         | 547 |  |  |    current EBB.  */
 | 
      
         | 548 |  |  | static bitmap cse_ebb_live_in, cse_ebb_live_out;
 | 
      
         | 549 |  |  |  
 | 
      
         | 550 |  |  | /* A simple bitmap to track which basic blocks have been visited
 | 
      
         | 551 |  |  |    already as part of an already processed extended basic block.  */
 | 
      
         | 552 |  |  | static sbitmap cse_visited_basic_blocks;
 | 
      
         | 553 |  |  |  
 | 
      
         | 554 |  |  | static bool fixed_base_plus_p (rtx x);
 | 
      
         | 555 |  |  | static int notreg_cost (rtx, enum rtx_code, int);
 | 
      
         | 556 |  |  | static int approx_reg_cost_1 (rtx *, void *);
 | 
      
         | 557 |  |  | static int approx_reg_cost (rtx);
 | 
      
         | 558 |  |  | static int preferable (int, int, int, int);
 | 
      
         | 559 |  |  | static void new_basic_block (void);
 | 
      
         | 560 |  |  | static void make_new_qty (unsigned int, enum machine_mode);
 | 
      
         | 561 |  |  | static void make_regs_eqv (unsigned int, unsigned int);
 | 
      
         | 562 |  |  | static void delete_reg_equiv (unsigned int);
 | 
      
         | 563 |  |  | static int mention_regs (rtx);
 | 
      
         | 564 |  |  | static int insert_regs (rtx, struct table_elt *, int);
 | 
      
         | 565 |  |  | static void remove_from_table (struct table_elt *, unsigned);
 | 
      
         | 566 |  |  | static void remove_pseudo_from_table (rtx, unsigned);
 | 
      
         | 567 |  |  | static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
 | 
      
         | 568 |  |  | static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
 | 
      
         | 569 |  |  | static rtx lookup_as_function (rtx, enum rtx_code);
 | 
      
         | 570 |  |  | static struct table_elt *insert_with_costs (rtx, struct table_elt *, unsigned,
 | 
      
         | 571 |  |  |                                             enum machine_mode, int, int);
 | 
      
         | 572 |  |  | static struct table_elt *insert (rtx, struct table_elt *, unsigned,
 | 
      
         | 573 |  |  |                                  enum machine_mode);
 | 
      
         | 574 |  |  | static void merge_equiv_classes (struct table_elt *, struct table_elt *);
 | 
      
         | 575 |  |  | static void invalidate (rtx, enum machine_mode);
 | 
      
         | 576 |  |  | static void remove_invalid_refs (unsigned int);
 | 
      
         | 577 |  |  | static void remove_invalid_subreg_refs (unsigned int, unsigned int,
 | 
      
         | 578 |  |  |                                         enum machine_mode);
 | 
      
         | 579 |  |  | static void rehash_using_reg (rtx);
 | 
      
         | 580 |  |  | static void invalidate_memory (void);
 | 
      
         | 581 |  |  | static void invalidate_for_call (void);
 | 
      
         | 582 |  |  | static rtx use_related_value (rtx, struct table_elt *);
 | 
      
         | 583 |  |  |  
 | 
      
         | 584 |  |  | static inline unsigned canon_hash (rtx, enum machine_mode);
 | 
      
         | 585 |  |  | static inline unsigned safe_hash (rtx, enum machine_mode);
 | 
      
         | 586 |  |  | static inline unsigned hash_rtx_string (const char *);
 | 
      
         | 587 |  |  |  
 | 
      
         | 588 |  |  | static rtx canon_reg (rtx, rtx);
 | 
      
         | 589 |  |  | static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
 | 
      
         | 590 |  |  |                                            enum machine_mode *,
 | 
      
         | 591 |  |  |                                            enum machine_mode *);
 | 
      
         | 592 |  |  | static rtx fold_rtx (rtx, rtx);
 | 
      
         | 593 |  |  | static rtx equiv_constant (rtx);
 | 
      
         | 594 |  |  | static void record_jump_equiv (rtx, bool);
 | 
      
         | 595 |  |  | static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
 | 
      
         | 596 |  |  |                               int);
 | 
      
         | 597 |  |  | static void cse_insn (rtx);
 | 
      
         | 598 |  |  | static void cse_prescan_path (struct cse_basic_block_data *);
 | 
      
         | 599 |  |  | static void invalidate_from_clobbers (rtx);
 | 
      
         | 600 |  |  | static rtx cse_process_notes (rtx, rtx, bool *);
 | 
      
         | 601 |  |  | static void cse_extended_basic_block (struct cse_basic_block_data *);
 | 
      
         | 602 |  |  | static void count_reg_usage (rtx, int *, rtx, int);
 | 
      
         | 603 |  |  | static int check_for_label_ref (rtx *, void *);
 | 
      
         | 604 |  |  | extern void dump_class (struct table_elt*);
 | 
      
         | 605 |  |  | static void get_cse_reg_info_1 (unsigned int regno);
 | 
      
         | 606 |  |  | static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
 | 
      
         | 607 |  |  | static int check_dependence (rtx *, void *);
 | 
      
         | 608 |  |  |  
 | 
      
         | 609 |  |  | static void flush_hash_table (void);
 | 
      
         | 610 |  |  | static bool insn_live_p (rtx, int *);
 | 
      
         | 611 |  |  | static bool set_live_p (rtx, rtx, int *);
 | 
      
         | 612 |  |  | static int cse_change_cc_mode (rtx *, void *);
 | 
      
         | 613 |  |  | static void cse_change_cc_mode_insn (rtx, rtx);
 | 
      
         | 614 |  |  | static void cse_change_cc_mode_insns (rtx, rtx, rtx);
 | 
      
         | 615 |  |  | static enum machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx,
 | 
      
         | 616 |  |  |                                        bool);
 | 
      
         | 617 |  |  |  
 | 
      
         | 618 |  |  |  
 | 
      
         | 619 |  |  | #undef RTL_HOOKS_GEN_LOWPART
 | 
      
         | 620 |  |  | #define RTL_HOOKS_GEN_LOWPART           gen_lowpart_if_possible
 | 
      
         | 621 |  |  |  
 | 
      
         | 622 |  |  | static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
 | 
      
         | 623 |  |  |  
 | 
      
         | 624 |  |  | /* Nonzero if X has the form (PLUS frame-pointer integer).  We check for
 | 
      
         | 625 |  |  |    virtual regs here because the simplify_*_operation routines are called
 | 
      
         | 626 |  |  |    by integrate.c, which is called before virtual register instantiation.  */
 | 
      
         | 627 |  |  |  
 | 
      
         | 628 |  |  | static bool
 | 
      
         | 629 |  |  | fixed_base_plus_p (rtx x)
 | 
      
         | 630 |  |  | {
 | 
      
         | 631 |  |  |   switch (GET_CODE (x))
 | 
      
         | 632 |  |  |     {
 | 
      
         | 633 |  |  |     case REG:
 | 
      
         | 634 |  |  |       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
 | 
      
         | 635 |  |  |         return true;
 | 
      
         | 636 |  |  |       if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
 | 
      
         | 637 |  |  |         return true;
 | 
      
         | 638 |  |  |       if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
 | 
      
         | 639 |  |  |           && REGNO (x) <= LAST_VIRTUAL_REGISTER)
 | 
      
         | 640 |  |  |         return true;
 | 
      
         | 641 |  |  |       return false;
 | 
      
         | 642 |  |  |  
 | 
      
         | 643 |  |  |     case PLUS:
 | 
      
         | 644 |  |  |       if (!CONST_INT_P (XEXP (x, 1)))
 | 
      
         | 645 |  |  |         return false;
 | 
      
         | 646 |  |  |       return fixed_base_plus_p (XEXP (x, 0));
 | 
      
         | 647 |  |  |  
 | 
      
         | 648 |  |  |     default:
 | 
      
         | 649 |  |  |       return false;
 | 
      
         | 650 |  |  |     }
 | 
      
         | 651 |  |  | }
 | 
      
         | 652 |  |  |  
 | 
      
         | 653 |  |  | /* Dump the expressions in the equivalence class indicated by CLASSP.
 | 
      
         | 654 |  |  |    This function is used only for debugging.  */
 | 
      
         | 655 |  |  | void
 | 
      
         | 656 |  |  | dump_class (struct table_elt *classp)
 | 
      
         | 657 |  |  | {
 | 
      
         | 658 |  |  |   struct table_elt *elt;
 | 
      
         | 659 |  |  |  
 | 
      
         | 660 |  |  |   fprintf (stderr, "Equivalence chain for ");
 | 
      
         | 661 |  |  |   print_rtl (stderr, classp->exp);
 | 
      
         | 662 |  |  |   fprintf (stderr, ": \n");
 | 
      
         | 663 |  |  |  
 | 
      
         | 664 |  |  |   for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
 | 
      
         | 665 |  |  |     {
 | 
      
         | 666 |  |  |       print_rtl (stderr, elt->exp);
 | 
      
         | 667 |  |  |       fprintf (stderr, "\n");
 | 
      
         | 668 |  |  |     }
 | 
      
         | 669 |  |  | }
 | 
      
         | 670 |  |  |  
 | 
      
         | 671 |  |  | /* Subroutine of approx_reg_cost; called through for_each_rtx.  */
 | 
      
         | 672 |  |  |  
 | 
      
         | 673 |  |  | static int
 | 
      
         | 674 |  |  | approx_reg_cost_1 (rtx *xp, void *data)
 | 
      
         | 675 |  |  | {
 | 
      
         | 676 |  |  |   rtx x = *xp;
 | 
      
         | 677 |  |  |   int *cost_p = (int *) data;
 | 
      
         | 678 |  |  |  
 | 
      
         | 679 |  |  |   if (x && REG_P (x))
 | 
      
         | 680 |  |  |     {
 | 
      
         | 681 |  |  |       unsigned int regno = REGNO (x);
 | 
      
         | 682 |  |  |  
 | 
      
         | 683 |  |  |       if (! CHEAP_REGNO (regno))
 | 
      
         | 684 |  |  |         {
 | 
      
         | 685 |  |  |           if (regno < FIRST_PSEUDO_REGISTER)
 | 
      
         | 686 |  |  |             {
 | 
      
         | 687 |  |  |               if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
 | 
      
         | 688 |  |  |                 return 1;
 | 
      
         | 689 |  |  |               *cost_p += 2;
 | 
      
         | 690 |  |  |             }
 | 
      
         | 691 |  |  |           else
 | 
      
         | 692 |  |  |             *cost_p += 1;
 | 
      
         | 693 |  |  |         }
 | 
      
         | 694 |  |  |     }
 | 
      
         | 695 |  |  |  
 | 
      
         | 696 |  |  |   return 0;
 | 
      
         | 697 |  |  | }
 | 
      
         | 698 |  |  |  
 | 
      
         | 699 |  |  | /* Return an estimate of the cost of the registers used in an rtx.
 | 
      
         | 700 |  |  |    This is mostly the number of different REG expressions in the rtx;
 | 
      
         | 701 |  |  |    however for some exceptions like fixed registers we use a cost of
 | 
      
         | 702 |  |  |    0.  If any other hard register reference occurs, return MAX_COST.  */
 | 
      
         | 703 |  |  |  
 | 
      
         | 704 |  |  | static int
 | 
      
         | 705 |  |  | approx_reg_cost (rtx x)
 | 
      
         | 706 |  |  | {
 | 
      
         | 707 |  |  |   int cost = 0;
 | 
      
         | 708 |  |  |  
 | 
      
         | 709 |  |  |   if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
 | 
      
         | 710 |  |  |     return MAX_COST;
 | 
      
         | 711 |  |  |  
 | 
      
         | 712 |  |  |   return cost;
 | 
      
         | 713 |  |  | }
 | 
      
         | 714 |  |  |  
 | 
      
         | 715 |  |  | /* Return a negative value if an rtx A, whose costs are given by COST_A
 | 
      
         | 716 |  |  |    and REGCOST_A, is more desirable than an rtx B.
 | 
      
         | 717 |  |  |    Return a positive value if A is less desirable, or 0 if the two are
 | 
      
         | 718 |  |  |    equally good.  */
 | 
      
         | 719 |  |  | static int
 | 
      
         | 720 |  |  | preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
 | 
      
         | 721 |  |  | {
 | 
      
         | 722 |  |  |   /* First, get rid of cases involving expressions that are entirely
 | 
      
         | 723 |  |  |      unwanted.  */
 | 
      
         | 724 |  |  |   if (cost_a != cost_b)
 | 
      
         | 725 |  |  |     {
 | 
      
         | 726 |  |  |       if (cost_a == MAX_COST)
 | 
      
         | 727 |  |  |         return 1;
 | 
      
         | 728 |  |  |       if (cost_b == MAX_COST)
 | 
      
         | 729 |  |  |         return -1;
 | 
      
         | 730 |  |  |     }
 | 
      
         | 731 |  |  |  
 | 
      
         | 732 |  |  |   /* Avoid extending lifetimes of hardregs.  */
 | 
      
         | 733 |  |  |   if (regcost_a != regcost_b)
 | 
      
         | 734 |  |  |     {
 | 
      
         | 735 |  |  |       if (regcost_a == MAX_COST)
 | 
      
         | 736 |  |  |         return 1;
 | 
      
         | 737 |  |  |       if (regcost_b == MAX_COST)
 | 
      
         | 738 |  |  |         return -1;
 | 
      
         | 739 |  |  |     }
 | 
      
         | 740 |  |  |  
 | 
      
         | 741 |  |  |   /* Normal operation costs take precedence.  */
 | 
      
         | 742 |  |  |   if (cost_a != cost_b)
 | 
      
         | 743 |  |  |     return cost_a - cost_b;
 | 
      
         | 744 |  |  |   /* Only if these are identical consider effects on register pressure.  */
 | 
      
         | 745 |  |  |   if (regcost_a != regcost_b)
 | 
      
         | 746 |  |  |     return regcost_a - regcost_b;
 | 
      
         | 747 |  |  |   return 0;
 | 
      
         | 748 |  |  | }
 | 
      
         | 749 |  |  |  
 | 
      
         | 750 |  |  | /* Internal function, to compute cost when X is not a register; called
 | 
      
         | 751 |  |  |    from COST macro to keep it simple.  */
 | 
      
         | 752 |  |  |  
 | 
      
         | 753 |  |  | static int
 | 
      
         | 754 |  |  | notreg_cost (rtx x, enum rtx_code outer, int opno)
 | 
      
         | 755 |  |  | {
 | 
      
         | 756 |  |  |   return ((GET_CODE (x) == SUBREG
 | 
      
         | 757 |  |  |            && REG_P (SUBREG_REG (x))
 | 
      
         | 758 |  |  |            && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
 | 
      
         | 759 |  |  |            && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
 | 
      
         | 760 |  |  |            && (GET_MODE_SIZE (GET_MODE (x))
 | 
      
         | 761 |  |  |                < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
 | 
      
         | 762 |  |  |            && subreg_lowpart_p (x)
 | 
      
         | 763 |  |  |            && TRULY_NOOP_TRUNCATION_MODES_P (GET_MODE (x),
 | 
      
         | 764 |  |  |                                              GET_MODE (SUBREG_REG (x))))
 | 
      
         | 765 |  |  |           ? 0
 | 
      
         | 766 |  |  |           : rtx_cost (x, outer, opno, optimize_this_for_speed_p) * 2);
 | 
      
         | 767 |  |  | }
 | 
      
         | 768 |  |  |  
 | 
      
         | 769 |  |  |  
 | 
      
         | 770 |  |  | /* Initialize CSE_REG_INFO_TABLE.  */
 | 
      
         | 771 |  |  |  
 | 
      
         | 772 |  |  | static void
 | 
      
         | 773 |  |  | init_cse_reg_info (unsigned int nregs)
 | 
      
         | 774 |  |  | {
 | 
      
         | 775 |  |  |   /* Do we need to grow the table?  */
 | 
      
         | 776 |  |  |   if (nregs > cse_reg_info_table_size)
 | 
      
         | 777 |  |  |     {
 | 
      
         | 778 |  |  |       unsigned int new_size;
 | 
      
         | 779 |  |  |  
 | 
      
         | 780 |  |  |       if (cse_reg_info_table_size < 2048)
 | 
      
         | 781 |  |  |         {
 | 
      
         | 782 |  |  |           /* Compute a new size that is a power of 2 and no smaller
 | 
      
         | 783 |  |  |              than the large of NREGS and 64.  */
 | 
      
         | 784 |  |  |           new_size = (cse_reg_info_table_size
 | 
      
         | 785 |  |  |                       ? cse_reg_info_table_size : 64);
 | 
      
         | 786 |  |  |  
 | 
      
         | 787 |  |  |           while (new_size < nregs)
 | 
      
         | 788 |  |  |             new_size *= 2;
 | 
      
         | 789 |  |  |         }
 | 
      
         | 790 |  |  |       else
 | 
      
         | 791 |  |  |         {
 | 
      
         | 792 |  |  |           /* If we need a big table, allocate just enough to hold
 | 
      
         | 793 |  |  |              NREGS registers.  */
 | 
      
         | 794 |  |  |           new_size = nregs;
 | 
      
         | 795 |  |  |         }
 | 
      
         | 796 |  |  |  
 | 
      
         | 797 |  |  |       /* Reallocate the table with NEW_SIZE entries.  */
 | 
      
         | 798 |  |  |       free (cse_reg_info_table);
 | 
      
         | 799 |  |  |       cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
 | 
      
         | 800 |  |  |       cse_reg_info_table_size = new_size;
 | 
      
         | 801 |  |  |       cse_reg_info_table_first_uninitialized = 0;
 | 
      
         | 802 |  |  |     }
 | 
      
         | 803 |  |  |  
 | 
      
         | 804 |  |  |   /* Do we have all of the first NREGS entries initialized?  */
 | 
      
         | 805 |  |  |   if (cse_reg_info_table_first_uninitialized < nregs)
 | 
      
         | 806 |  |  |     {
 | 
      
         | 807 |  |  |       unsigned int old_timestamp = cse_reg_info_timestamp - 1;
 | 
      
         | 808 |  |  |       unsigned int i;
 | 
      
         | 809 |  |  |  
 | 
      
         | 810 |  |  |       /* Put the old timestamp on newly allocated entries so that they
 | 
      
         | 811 |  |  |          will all be considered out of date.  We do not touch those
 | 
      
         | 812 |  |  |          entries beyond the first NREGS entries to be nice to the
 | 
      
         | 813 |  |  |          virtual memory.  */
 | 
      
         | 814 |  |  |       for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
 | 
      
         | 815 |  |  |         cse_reg_info_table[i].timestamp = old_timestamp;
 | 
      
         | 816 |  |  |  
 | 
      
         | 817 |  |  |       cse_reg_info_table_first_uninitialized = nregs;
 | 
      
         | 818 |  |  |     }
 | 
      
         | 819 |  |  | }
 | 
      
         | 820 |  |  |  
 | 
      
         | 821 |  |  | /* Given REGNO, initialize the cse_reg_info entry for REGNO.  */
 | 
      
         | 822 |  |  |  
 | 
      
         | 823 |  |  | static void
 | 
      
         | 824 |  |  | get_cse_reg_info_1 (unsigned int regno)
 | 
      
         | 825 |  |  | {
 | 
      
         | 826 |  |  |   /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
 | 
      
         | 827 |  |  |      entry will be considered to have been initialized.  */
 | 
      
         | 828 |  |  |   cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
 | 
      
         | 829 |  |  |  
 | 
      
         | 830 |  |  |   /* Initialize the rest of the entry.  */
 | 
      
         | 831 |  |  |   cse_reg_info_table[regno].reg_tick = 1;
 | 
      
         | 832 |  |  |   cse_reg_info_table[regno].reg_in_table = -1;
 | 
      
         | 833 |  |  |   cse_reg_info_table[regno].subreg_ticked = -1;
 | 
      
         | 834 |  |  |   cse_reg_info_table[regno].reg_qty = -regno - 1;
 | 
      
         | 835 |  |  | }
 | 
      
         | 836 |  |  |  
 | 
      
         | 837 |  |  | /* Find a cse_reg_info entry for REGNO.  */
 | 
      
         | 838 |  |  |  
 | 
      
         | 839 |  |  | static inline struct cse_reg_info *
 | 
      
         | 840 |  |  | get_cse_reg_info (unsigned int regno)
 | 
      
         | 841 |  |  | {
 | 
      
         | 842 |  |  |   struct cse_reg_info *p = &cse_reg_info_table[regno];
 | 
      
         | 843 |  |  |  
 | 
      
         | 844 |  |  |   /* If this entry has not been initialized, go ahead and initialize
 | 
      
         | 845 |  |  |      it.  */
 | 
      
         | 846 |  |  |   if (p->timestamp != cse_reg_info_timestamp)
 | 
      
         | 847 |  |  |     get_cse_reg_info_1 (regno);
 | 
      
         | 848 |  |  |  
 | 
      
         | 849 |  |  |   return p;
 | 
      
         | 850 |  |  | }
 | 
      
         | 851 |  |  |  
 | 
      
         | 852 |  |  | /* Clear the hash table and initialize each register with its own quantity,
 | 
      
         | 853 |  |  |    for a new basic block.  */
 | 
      
         | 854 |  |  |  
 | 
      
         | 855 |  |  | static void
 | 
      
         | 856 |  |  | new_basic_block (void)
 | 
      
         | 857 |  |  | {
 | 
      
         | 858 |  |  |   int i;
 | 
      
         | 859 |  |  |  
 | 
      
         | 860 |  |  |   next_qty = 0;
 | 
      
         | 861 |  |  |  
 | 
      
         | 862 |  |  |   /* Invalidate cse_reg_info_table.  */
 | 
      
         | 863 |  |  |   cse_reg_info_timestamp++;
 | 
      
         | 864 |  |  |  
 | 
      
         | 865 |  |  |   /* Clear out hash table state for this pass.  */
 | 
      
         | 866 |  |  |   CLEAR_HARD_REG_SET (hard_regs_in_table);
 | 
      
         | 867 |  |  |  
 | 
      
         | 868 |  |  |   /* The per-quantity values used to be initialized here, but it is
 | 
      
         | 869 |  |  |      much faster to initialize each as it is made in `make_new_qty'.  */
 | 
      
         | 870 |  |  |  
 | 
      
         | 871 |  |  |   for (i = 0; i < HASH_SIZE; i++)
 | 
      
         | 872 |  |  |     {
 | 
      
         | 873 |  |  |       struct table_elt *first;
 | 
      
         | 874 |  |  |  
 | 
      
         | 875 |  |  |       first = table[i];
 | 
      
         | 876 |  |  |       if (first != NULL)
 | 
      
         | 877 |  |  |         {
 | 
      
         | 878 |  |  |           struct table_elt *last = first;
 | 
      
         | 879 |  |  |  
 | 
      
         | 880 |  |  |           table[i] = NULL;
 | 
      
         | 881 |  |  |  
 | 
      
         | 882 |  |  |           while (last->next_same_hash != NULL)
 | 
      
         | 883 |  |  |             last = last->next_same_hash;
 | 
      
         | 884 |  |  |  
 | 
      
         | 885 |  |  |           /* Now relink this hash entire chain into
 | 
      
         | 886 |  |  |              the free element list.  */
 | 
      
         | 887 |  |  |  
 | 
      
         | 888 |  |  |           last->next_same_hash = free_element_chain;
 | 
      
         | 889 |  |  |           free_element_chain = first;
 | 
      
         | 890 |  |  |         }
 | 
      
         | 891 |  |  |     }
 | 
      
         | 892 |  |  |  
 | 
      
         | 893 |  |  | #ifdef HAVE_cc0
 | 
      
         | 894 |  |  |   prev_insn_cc0 = 0;
 | 
      
         | 895 |  |  | #endif
 | 
      
         | 896 |  |  | }
 | 
      
         | 897 |  |  |  
 | 
      
         | 898 |  |  | /* Say that register REG contains a quantity in mode MODE not in any
 | 
      
         | 899 |  |  |    register before and initialize that quantity.  */
 | 
      
         | 900 |  |  |  
 | 
      
         | 901 |  |  | static void
 | 
      
         | 902 |  |  | make_new_qty (unsigned int reg, enum machine_mode mode)
 | 
      
         | 903 |  |  | {
 | 
      
         | 904 |  |  |   int q;
 | 
      
         | 905 |  |  |   struct qty_table_elem *ent;
 | 
      
         | 906 |  |  |   struct reg_eqv_elem *eqv;
 | 
      
         | 907 |  |  |  
 | 
      
         | 908 |  |  |   gcc_assert (next_qty < max_qty);
 | 
      
         | 909 |  |  |  
 | 
      
         | 910 |  |  |   q = REG_QTY (reg) = next_qty++;
 | 
      
         | 911 |  |  |   ent = &qty_table[q];
 | 
      
         | 912 |  |  |   ent->first_reg = reg;
 | 
      
         | 913 |  |  |   ent->last_reg = reg;
 | 
      
         | 914 |  |  |   ent->mode = mode;
 | 
      
         | 915 |  |  |   ent->const_rtx = ent->const_insn = NULL_RTX;
 | 
      
         | 916 |  |  |   ent->comparison_code = UNKNOWN;
 | 
      
         | 917 |  |  |  
 | 
      
         | 918 |  |  |   eqv = ®_eqv_table[reg];
 | 
      
         | 919 |  |  |   eqv->next = eqv->prev = -1;
 | 
      
         | 920 |  |  | }
 | 
      
         | 921 |  |  |  
 | 
      
         | 922 |  |  | /* Make reg NEW equivalent to reg OLD.
 | 
      
         | 923 |  |  |    OLD is not changing; NEW is.  */
 | 
      
         | 924 |  |  |  
 | 
      
         | 925 |  |  | static void
 | 
      
         | 926 |  |  | make_regs_eqv (unsigned int new_reg, unsigned int old_reg)
 | 
      
         | 927 |  |  | {
 | 
      
         | 928 |  |  |   unsigned int lastr, firstr;
 | 
      
         | 929 |  |  |   int q = REG_QTY (old_reg);
 | 
      
         | 930 |  |  |   struct qty_table_elem *ent;
 | 
      
         | 931 |  |  |  
 | 
      
         | 932 |  |  |   ent = &qty_table[q];
 | 
      
         | 933 |  |  |  
 | 
      
         | 934 |  |  |   /* Nothing should become eqv until it has a "non-invalid" qty number.  */
 | 
      
         | 935 |  |  |   gcc_assert (REGNO_QTY_VALID_P (old_reg));
 | 
      
         | 936 |  |  |  
 | 
      
         | 937 |  |  |   REG_QTY (new_reg) = q;
 | 
      
         | 938 |  |  |   firstr = ent->first_reg;
 | 
      
         | 939 |  |  |   lastr = ent->last_reg;
 | 
      
         | 940 |  |  |  
 | 
      
         | 941 |  |  |   /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
 | 
      
         | 942 |  |  |      hard regs.  Among pseudos, if NEW will live longer than any other reg
 | 
      
         | 943 |  |  |      of the same qty, and that is beyond the current basic block,
 | 
      
         | 944 |  |  |      make it the new canonical replacement for this qty.  */
 | 
      
         | 945 |  |  |   if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
 | 
      
         | 946 |  |  |       /* Certain fixed registers might be of the class NO_REGS.  This means
 | 
      
         | 947 |  |  |          that not only can they not be allocated by the compiler, but
 | 
      
         | 948 |  |  |          they cannot be used in substitutions or canonicalizations
 | 
      
         | 949 |  |  |          either.  */
 | 
      
         | 950 |  |  |       && (new_reg >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new_reg) != NO_REGS)
 | 
      
         | 951 |  |  |       && ((new_reg < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new_reg))
 | 
      
         | 952 |  |  |           || (new_reg >= FIRST_PSEUDO_REGISTER
 | 
      
         | 953 |  |  |               && (firstr < FIRST_PSEUDO_REGISTER
 | 
      
         | 954 |  |  |                   || (bitmap_bit_p (cse_ebb_live_out, new_reg)
 | 
      
         | 955 |  |  |                       && !bitmap_bit_p (cse_ebb_live_out, firstr))
 | 
      
         | 956 |  |  |                   || (bitmap_bit_p (cse_ebb_live_in, new_reg)
 | 
      
         | 957 |  |  |                       && !bitmap_bit_p (cse_ebb_live_in, firstr))))))
 | 
      
         | 958 |  |  |     {
 | 
      
         | 959 |  |  |       reg_eqv_table[firstr].prev = new_reg;
 | 
      
         | 960 |  |  |       reg_eqv_table[new_reg].next = firstr;
 | 
      
         | 961 |  |  |       reg_eqv_table[new_reg].prev = -1;
 | 
      
         | 962 |  |  |       ent->first_reg = new_reg;
 | 
      
         | 963 |  |  |     }
 | 
      
         | 964 |  |  |   else
 | 
      
         | 965 |  |  |     {
 | 
      
         | 966 |  |  |       /* If NEW is a hard reg (known to be non-fixed), insert at end.
 | 
      
         | 967 |  |  |          Otherwise, insert before any non-fixed hard regs that are at the
 | 
      
         | 968 |  |  |          end.  Registers of class NO_REGS cannot be used as an
 | 
      
         | 969 |  |  |          equivalent for anything.  */
 | 
      
         | 970 |  |  |       while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
 | 
      
         | 971 |  |  |              && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
 | 
      
         | 972 |  |  |              && new_reg >= FIRST_PSEUDO_REGISTER)
 | 
      
         | 973 |  |  |         lastr = reg_eqv_table[lastr].prev;
 | 
      
         | 974 |  |  |       reg_eqv_table[new_reg].next = reg_eqv_table[lastr].next;
 | 
      
         | 975 |  |  |       if (reg_eqv_table[lastr].next >= 0)
 | 
      
         | 976 |  |  |         reg_eqv_table[reg_eqv_table[lastr].next].prev = new_reg;
 | 
      
         | 977 |  |  |       else
 | 
      
         | 978 |  |  |         qty_table[q].last_reg = new_reg;
 | 
      
         | 979 |  |  |       reg_eqv_table[lastr].next = new_reg;
 | 
      
         | 980 |  |  |       reg_eqv_table[new_reg].prev = lastr;
 | 
      
         | 981 |  |  |     }
 | 
      
         | 982 |  |  | }
 | 
      
         | 983 |  |  |  
 | 
      
         | 984 |  |  | /* Remove REG from its equivalence class.  */
 | 
      
         | 985 |  |  |  
 | 
      
         | 986 |  |  | static void
 | 
      
         | 987 |  |  | delete_reg_equiv (unsigned int reg)
 | 
      
         | 988 |  |  | {
 | 
      
         | 989 |  |  |   struct qty_table_elem *ent;
 | 
      
         | 990 |  |  |   int q = REG_QTY (reg);
 | 
      
         | 991 |  |  |   int p, n;
 | 
      
         | 992 |  |  |  
 | 
      
         | 993 |  |  |   /* If invalid, do nothing.  */
 | 
      
         | 994 |  |  |   if (! REGNO_QTY_VALID_P (reg))
 | 
      
         | 995 |  |  |     return;
 | 
      
         | 996 |  |  |  
 | 
      
         | 997 |  |  |   ent = &qty_table[q];
 | 
      
         | 998 |  |  |  
 | 
      
         | 999 |  |  |   p = reg_eqv_table[reg].prev;
 | 
      
         | 1000 |  |  |   n = reg_eqv_table[reg].next;
 | 
      
         | 1001 |  |  |  
 | 
      
         | 1002 |  |  |   if (n != -1)
 | 
      
         | 1003 |  |  |     reg_eqv_table[n].prev = p;
 | 
      
         | 1004 |  |  |   else
 | 
      
         | 1005 |  |  |     ent->last_reg = p;
 | 
      
         | 1006 |  |  |   if (p != -1)
 | 
      
         | 1007 |  |  |     reg_eqv_table[p].next = n;
 | 
      
         | 1008 |  |  |   else
 | 
      
         | 1009 |  |  |     ent->first_reg = n;
 | 
      
         | 1010 |  |  |  
 | 
      
         | 1011 |  |  |   REG_QTY (reg) = -reg - 1;
 | 
      
         | 1012 |  |  | }
 | 
      
         | 1013 |  |  |  
 | 
      
         | 1014 |  |  | /* Remove any invalid expressions from the hash table
 | 
      
         | 1015 |  |  |    that refer to any of the registers contained in expression X.
 | 
      
         | 1016 |  |  |  
 | 
      
         | 1017 |  |  |    Make sure that newly inserted references to those registers
 | 
      
         | 1018 |  |  |    as subexpressions will be considered valid.
 | 
      
         | 1019 |  |  |  
 | 
      
         | 1020 |  |  |    mention_regs is not called when a register itself
 | 
      
         | 1021 |  |  |    is being stored in the table.
 | 
      
         | 1022 |  |  |  
 | 
      
         | 1023 |  |  |    Return 1 if we have done something that may have changed the hash code
 | 
      
         | 1024 |  |  |    of X.  */
 | 
      
         | 1025 |  |  |  
 | 
      
         | 1026 |  |  | static int
 | 
      
         | 1027 |  |  | mention_regs (rtx x)
 | 
      
         | 1028 |  |  | {
 | 
      
         | 1029 |  |  |   enum rtx_code code;
 | 
      
         | 1030 |  |  |   int i, j;
 | 
      
         | 1031 |  |  |   const char *fmt;
 | 
      
         | 1032 |  |  |   int changed = 0;
 | 
      
         | 1033 |  |  |  
 | 
      
         | 1034 |  |  |   if (x == 0)
 | 
      
         | 1035 |  |  |     return 0;
 | 
      
         | 1036 |  |  |  
 | 
      
         | 1037 |  |  |   code = GET_CODE (x);
 | 
      
         | 1038 |  |  |   if (code == REG)
 | 
      
         | 1039 |  |  |     {
 | 
      
         | 1040 |  |  |       unsigned int regno = REGNO (x);
 | 
      
         | 1041 |  |  |       unsigned int endregno = END_REGNO (x);
 | 
      
         | 1042 |  |  |       unsigned int i;
 | 
      
         | 1043 |  |  |  
 | 
      
         | 1044 |  |  |       for (i = regno; i < endregno; i++)
 | 
      
         | 1045 |  |  |         {
 | 
      
         | 1046 |  |  |           if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
 | 
      
         | 1047 |  |  |             remove_invalid_refs (i);
 | 
      
         | 1048 |  |  |  
 | 
      
         | 1049 |  |  |           REG_IN_TABLE (i) = REG_TICK (i);
 | 
      
         | 1050 |  |  |           SUBREG_TICKED (i) = -1;
 | 
      
         | 1051 |  |  |         }
 | 
      
         | 1052 |  |  |  
 | 
      
         | 1053 |  |  |       return 0;
 | 
      
         | 1054 |  |  |     }
 | 
      
         | 1055 |  |  |  
 | 
      
         | 1056 |  |  |   /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
 | 
      
         | 1057 |  |  |      pseudo if they don't use overlapping words.  We handle only pseudos
 | 
      
         | 1058 |  |  |      here for simplicity.  */
 | 
      
         | 1059 |  |  |   if (code == SUBREG && REG_P (SUBREG_REG (x))
 | 
      
         | 1060 |  |  |       && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
 | 
      
         | 1061 |  |  |     {
 | 
      
         | 1062 |  |  |       unsigned int i = REGNO (SUBREG_REG (x));
 | 
      
         | 1063 |  |  |  
 | 
      
         | 1064 |  |  |       if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
 | 
      
         | 1065 |  |  |         {
 | 
      
         | 1066 |  |  |           /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
 | 
      
         | 1067 |  |  |              the last store to this register really stored into this
 | 
      
         | 1068 |  |  |              subreg, then remove the memory of this subreg.
 | 
      
         | 1069 |  |  |              Otherwise, remove any memory of the entire register and
 | 
      
         | 1070 |  |  |              all its subregs from the table.  */
 | 
      
         | 1071 |  |  |           if (REG_TICK (i) - REG_IN_TABLE (i) > 1
 | 
      
         | 1072 |  |  |               || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
 | 
      
         | 1073 |  |  |             remove_invalid_refs (i);
 | 
      
         | 1074 |  |  |           else
 | 
      
         | 1075 |  |  |             remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
 | 
      
         | 1076 |  |  |         }
 | 
      
         | 1077 |  |  |  
 | 
      
         | 1078 |  |  |       REG_IN_TABLE (i) = REG_TICK (i);
 | 
      
         | 1079 |  |  |       SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
 | 
      
         | 1080 |  |  |       return 0;
 | 
      
         | 1081 |  |  |     }
 | 
      
         | 1082 |  |  |  
 | 
      
         | 1083 |  |  |   /* If X is a comparison or a COMPARE and either operand is a register
 | 
      
         | 1084 |  |  |      that does not have a quantity, give it one.  This is so that a later
 | 
      
         | 1085 |  |  |      call to record_jump_equiv won't cause X to be assigned a different
 | 
      
         | 1086 |  |  |      hash code and not found in the table after that call.
 | 
      
         | 1087 |  |  |  
 | 
      
         | 1088 |  |  |      It is not necessary to do this here, since rehash_using_reg can
 | 
      
         | 1089 |  |  |      fix up the table later, but doing this here eliminates the need to
 | 
      
         | 1090 |  |  |      call that expensive function in the most common case where the only
 | 
      
         | 1091 |  |  |      use of the register is in the comparison.  */
 | 
      
         | 1092 |  |  |  
 | 
      
         | 1093 |  |  |   if (code == COMPARE || COMPARISON_P (x))
 | 
      
         | 1094 |  |  |     {
 | 
      
         | 1095 |  |  |       if (REG_P (XEXP (x, 0))
 | 
      
         | 1096 |  |  |           && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
 | 
      
         | 1097 |  |  |         if (insert_regs (XEXP (x, 0), NULL, 0))
 | 
      
         | 1098 |  |  |           {
 | 
      
         | 1099 |  |  |             rehash_using_reg (XEXP (x, 0));
 | 
      
         | 1100 |  |  |             changed = 1;
 | 
      
         | 1101 |  |  |           }
 | 
      
         | 1102 |  |  |  
 | 
      
         | 1103 |  |  |       if (REG_P (XEXP (x, 1))
 | 
      
         | 1104 |  |  |           && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
 | 
      
         | 1105 |  |  |         if (insert_regs (XEXP (x, 1), NULL, 0))
 | 
      
         | 1106 |  |  |           {
 | 
      
         | 1107 |  |  |             rehash_using_reg (XEXP (x, 1));
 | 
      
         | 1108 |  |  |             changed = 1;
 | 
      
         | 1109 |  |  |           }
 | 
      
         | 1110 |  |  |     }
 | 
      
         | 1111 |  |  |  
 | 
      
         | 1112 |  |  |   fmt = GET_RTX_FORMAT (code);
 | 
      
         | 1113 |  |  |   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
 | 
      
         | 1114 |  |  |     if (fmt[i] == 'e')
 | 
      
         | 1115 |  |  |       changed |= mention_regs (XEXP (x, i));
 | 
      
         | 1116 |  |  |     else if (fmt[i] == 'E')
 | 
      
         | 1117 |  |  |       for (j = 0; j < XVECLEN (x, i); j++)
 | 
      
         | 1118 |  |  |         changed |= mention_regs (XVECEXP (x, i, j));
 | 
      
         | 1119 |  |  |  
 | 
      
         | 1120 |  |  |   return changed;
 | 
      
         | 1121 |  |  | }
 | 
      
         | 1122 |  |  |  
 | 
      
         | 1123 |  |  | /* Update the register quantities for inserting X into the hash table
 | 
      
         | 1124 |  |  |    with a value equivalent to CLASSP.
 | 
      
         | 1125 |  |  |    (If the class does not contain a REG, it is irrelevant.)
 | 
      
         | 1126 |  |  |    If MODIFIED is nonzero, X is a destination; it is being modified.
 | 
      
         | 1127 |  |  |    Note that delete_reg_equiv should be called on a register
 | 
      
         | 1128 |  |  |    before insert_regs is done on that register with MODIFIED != 0.
 | 
      
         | 1129 |  |  |  
 | 
      
         | 1130 |  |  |    Nonzero value means that elements of reg_qty have changed
 | 
      
         | 1131 |  |  |    so X's hash code may be different.  */
 | 
      
         | 1132 |  |  |  
 | 
      
         | 1133 |  |  | static int
 | 
      
         | 1134 |  |  | insert_regs (rtx x, struct table_elt *classp, int modified)
 | 
      
         | 1135 |  |  | {
 | 
      
         | 1136 |  |  |   if (REG_P (x))
 | 
      
         | 1137 |  |  |     {
 | 
      
         | 1138 |  |  |       unsigned int regno = REGNO (x);
 | 
      
         | 1139 |  |  |       int qty_valid;
 | 
      
         | 1140 |  |  |  
 | 
      
         | 1141 |  |  |       /* If REGNO is in the equivalence table already but is of the
 | 
      
         | 1142 |  |  |          wrong mode for that equivalence, don't do anything here.  */
 | 
      
         | 1143 |  |  |  
 | 
      
         | 1144 |  |  |       qty_valid = REGNO_QTY_VALID_P (regno);
 | 
      
         | 1145 |  |  |       if (qty_valid)
 | 
      
         | 1146 |  |  |         {
 | 
      
         | 1147 |  |  |           struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
 | 
      
         | 1148 |  |  |  
 | 
      
         | 1149 |  |  |           if (ent->mode != GET_MODE (x))
 | 
      
         | 1150 |  |  |             return 0;
 | 
      
         | 1151 |  |  |         }
 | 
      
         | 1152 |  |  |  
 | 
      
         | 1153 |  |  |       if (modified || ! qty_valid)
 | 
      
         | 1154 |  |  |         {
 | 
      
         | 1155 |  |  |           if (classp)
 | 
      
         | 1156 |  |  |             for (classp = classp->first_same_value;
 | 
      
         | 1157 |  |  |                  classp != 0;
 | 
      
         | 1158 |  |  |                  classp = classp->next_same_value)
 | 
      
         | 1159 |  |  |               if (REG_P (classp->exp)
 | 
      
         | 1160 |  |  |                   && GET_MODE (classp->exp) == GET_MODE (x))
 | 
      
         | 1161 |  |  |                 {
 | 
      
         | 1162 |  |  |                   unsigned c_regno = REGNO (classp->exp);
 | 
      
         | 1163 |  |  |  
 | 
      
         | 1164 |  |  |                   gcc_assert (REGNO_QTY_VALID_P (c_regno));
 | 
      
         | 1165 |  |  |  
 | 
      
         | 1166 |  |  |                   /* Suppose that 5 is hard reg and 100 and 101 are
 | 
      
         | 1167 |  |  |                      pseudos.  Consider
 | 
      
         | 1168 |  |  |  
 | 
      
         | 1169 |  |  |                      (set (reg:si 100) (reg:si 5))
 | 
      
         | 1170 |  |  |                      (set (reg:si 5) (reg:si 100))
 | 
      
         | 1171 |  |  |                      (set (reg:di 101) (reg:di 5))
 | 
      
         | 1172 |  |  |  
 | 
      
         | 1173 |  |  |                      We would now set REG_QTY (101) = REG_QTY (5), but the
 | 
      
         | 1174 |  |  |                      entry for 5 is in SImode.  When we use this later in
 | 
      
         | 1175 |  |  |                      copy propagation, we get the register in wrong mode.  */
 | 
      
         | 1176 |  |  |                   if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
 | 
      
         | 1177 |  |  |                     continue;
 | 
      
         | 1178 |  |  |  
 | 
      
         | 1179 |  |  |                   make_regs_eqv (regno, c_regno);
 | 
      
         | 1180 |  |  |                   return 1;
 | 
      
         | 1181 |  |  |                 }
 | 
      
         | 1182 |  |  |  
 | 
      
         | 1183 |  |  |           /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
 | 
      
         | 1184 |  |  |              than REG_IN_TABLE to find out if there was only a single preceding
 | 
      
         | 1185 |  |  |              invalidation - for the SUBREG - or another one, which would be
 | 
      
         | 1186 |  |  |              for the full register.  However, if we find here that REG_TICK
 | 
      
         | 1187 |  |  |              indicates that the register is invalid, it means that it has
 | 
      
         | 1188 |  |  |              been invalidated in a separate operation.  The SUBREG might be used
 | 
      
         | 1189 |  |  |              now (then this is a recursive call), or we might use the full REG
 | 
      
         | 1190 |  |  |              now and a SUBREG of it later.  So bump up REG_TICK so that
 | 
      
         | 1191 |  |  |              mention_regs will do the right thing.  */
 | 
      
         | 1192 |  |  |           if (! modified
 | 
      
         | 1193 |  |  |               && REG_IN_TABLE (regno) >= 0
 | 
      
         | 1194 |  |  |               && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
 | 
      
         | 1195 |  |  |             REG_TICK (regno)++;
 | 
      
         | 1196 |  |  |           make_new_qty (regno, GET_MODE (x));
 | 
      
         | 1197 |  |  |           return 1;
 | 
      
         | 1198 |  |  |         }
 | 
      
         | 1199 |  |  |  
 | 
      
         | 1200 |  |  |       return 0;
 | 
      
         | 1201 |  |  |     }
 | 
      
         | 1202 |  |  |  
 | 
      
         | 1203 |  |  |   /* If X is a SUBREG, we will likely be inserting the inner register in the
 | 
      
         | 1204 |  |  |      table.  If that register doesn't have an assigned quantity number at
 | 
      
         | 1205 |  |  |      this point but does later, the insertion that we will be doing now will
 | 
      
         | 1206 |  |  |      not be accessible because its hash code will have changed.  So assign
 | 
      
         | 1207 |  |  |      a quantity number now.  */
 | 
      
         | 1208 |  |  |  
 | 
      
         | 1209 |  |  |   else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
 | 
      
         | 1210 |  |  |            && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
 | 
      
         | 1211 |  |  |     {
 | 
      
         | 1212 |  |  |       insert_regs (SUBREG_REG (x), NULL, 0);
 | 
      
         | 1213 |  |  |       mention_regs (x);
 | 
      
         | 1214 |  |  |       return 1;
 | 
      
         | 1215 |  |  |     }
 | 
      
         | 1216 |  |  |   else
 | 
      
         | 1217 |  |  |     return mention_regs (x);
 | 
      
         | 1218 |  |  | }
 | 
      
         | 1219 |  |  |  
 | 
      
         | 1220 |  |  |  
 | 
      
         | 1221 |  |  | /* Compute upper and lower anchors for CST.  Also compute the offset of CST
 | 
      
         | 1222 |  |  |    from these anchors/bases such that *_BASE + *_OFFS = CST.  Return false iff
 | 
      
         | 1223 |  |  |    CST is equal to an anchor.  */
 | 
      
         | 1224 |  |  |  
 | 
      
         | 1225 |  |  | static bool
 | 
      
         | 1226 |  |  | compute_const_anchors (rtx cst,
 | 
      
         | 1227 |  |  |                        HOST_WIDE_INT *lower_base, HOST_WIDE_INT *lower_offs,
 | 
      
         | 1228 |  |  |                        HOST_WIDE_INT *upper_base, HOST_WIDE_INT *upper_offs)
 | 
      
         | 1229 |  |  | {
 | 
      
         | 1230 |  |  |   HOST_WIDE_INT n = INTVAL (cst);
 | 
      
         | 1231 |  |  |  
 | 
      
         | 1232 |  |  |   *lower_base = n & ~(targetm.const_anchor - 1);
 | 
      
         | 1233 |  |  |   if (*lower_base == n)
 | 
      
         | 1234 |  |  |     return false;
 | 
      
         | 1235 |  |  |  
 | 
      
         | 1236 |  |  |   *upper_base =
 | 
      
         | 1237 |  |  |     (n + (targetm.const_anchor - 1)) & ~(targetm.const_anchor - 1);
 | 
      
         | 1238 |  |  |   *upper_offs = n - *upper_base;
 | 
      
         | 1239 |  |  |   *lower_offs = n - *lower_base;
 | 
      
         | 1240 |  |  |   return true;
 | 
      
         | 1241 |  |  | }
 | 
      
         | 1242 |  |  |  
 | 
      
         | 1243 |  |  | /* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE.  */
 | 
      
         | 1244 |  |  |  
 | 
      
         | 1245 |  |  | static void
 | 
      
         | 1246 |  |  | insert_const_anchor (HOST_WIDE_INT anchor, rtx reg, HOST_WIDE_INT offs,
 | 
      
         | 1247 |  |  |                      enum machine_mode mode)
 | 
      
         | 1248 |  |  | {
 | 
      
         | 1249 |  |  |   struct table_elt *elt;
 | 
      
         | 1250 |  |  |   unsigned hash;
 | 
      
         | 1251 |  |  |   rtx anchor_exp;
 | 
      
         | 1252 |  |  |   rtx exp;
 | 
      
         | 1253 |  |  |  
 | 
      
         | 1254 |  |  |   anchor_exp = GEN_INT (anchor);
 | 
      
         | 1255 |  |  |   hash = HASH (anchor_exp, mode);
 | 
      
         | 1256 |  |  |   elt = lookup (anchor_exp, hash, mode);
 | 
      
         | 1257 |  |  |   if (!elt)
 | 
      
         | 1258 |  |  |     elt = insert (anchor_exp, NULL, hash, mode);
 | 
      
         | 1259 |  |  |  
 | 
      
         | 1260 |  |  |   exp = plus_constant (reg, offs);
 | 
      
         | 1261 |  |  |   /* REG has just been inserted and the hash codes recomputed.  */
 | 
      
         | 1262 |  |  |   mention_regs (exp);
 | 
      
         | 1263 |  |  |   hash = HASH (exp, mode);
 | 
      
         | 1264 |  |  |  
 | 
      
         | 1265 |  |  |   /* Use the cost of the register rather than the whole expression.  When
 | 
      
         | 1266 |  |  |      looking up constant anchors we will further offset the corresponding
 | 
      
         | 1267 |  |  |      expression therefore it does not make sense to prefer REGs over
 | 
      
         | 1268 |  |  |      reg-immediate additions.  Prefer instead the oldest expression.  Also
 | 
      
         | 1269 |  |  |      don't prefer pseudos over hard regs so that we derive constants in
 | 
      
         | 1270 |  |  |      argument registers from other argument registers rather than from the
 | 
      
         | 1271 |  |  |      original pseudo that was used to synthesize the constant.  */
 | 
      
         | 1272 |  |  |   insert_with_costs (exp, elt, hash, mode, COST (reg), 1);
 | 
      
         | 1273 |  |  | }
 | 
      
         | 1274 |  |  |  
 | 
      
         | 1275 |  |  | /* The constant CST is equivalent to the register REG.  Create
 | 
      
         | 1276 |  |  |    equivalences between the two anchors of CST and the corresponding
 | 
      
         | 1277 |  |  |    register-offset expressions using REG.  */
 | 
      
         | 1278 |  |  |  
 | 
      
         | 1279 |  |  | static void
 | 
      
         | 1280 |  |  | insert_const_anchors (rtx reg, rtx cst, enum machine_mode mode)
 | 
      
         | 1281 |  |  | {
 | 
      
         | 1282 |  |  |   HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
 | 
      
         | 1283 |  |  |  
 | 
      
         | 1284 |  |  |   if (!compute_const_anchors (cst, &lower_base, &lower_offs,
 | 
      
         | 1285 |  |  |                               &upper_base, &upper_offs))
 | 
      
         | 1286 |  |  |       return;
 | 
      
         | 1287 |  |  |  
 | 
      
         | 1288 |  |  |   /* Ignore anchors of value 0.  Constants accessible from zero are
 | 
      
         | 1289 |  |  |      simple.  */
 | 
      
         | 1290 |  |  |   if (lower_base != 0)
 | 
      
         | 1291 |  |  |     insert_const_anchor (lower_base, reg, -lower_offs, mode);
 | 
      
         | 1292 |  |  |  
 | 
      
         | 1293 |  |  |   if (upper_base != 0)
 | 
      
         | 1294 |  |  |     insert_const_anchor (upper_base, reg, -upper_offs, mode);
 | 
      
         | 1295 |  |  | }
 | 
      
         | 1296 |  |  |  
 | 
      
         | 1297 |  |  | /* We need to express ANCHOR_ELT->exp + OFFS.  Walk the equivalence list of
 | 
      
         | 1298 |  |  |    ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
 | 
      
         | 1299 |  |  |    valid expression.  Return the cheapest and oldest of such expressions.  In
 | 
      
         | 1300 |  |  |    *OLD, return how old the resulting expression is compared to the other
 | 
      
         | 1301 |  |  |    equivalent expressions.  */
 | 
      
         | 1302 |  |  |  
 | 
      
         | 1303 |  |  | static rtx
 | 
      
         | 1304 |  |  | find_reg_offset_for_const (struct table_elt *anchor_elt, HOST_WIDE_INT offs,
 | 
      
         | 1305 |  |  |                            unsigned *old)
 | 
      
         | 1306 |  |  | {
 | 
      
         | 1307 |  |  |   struct table_elt *elt;
 | 
      
         | 1308 |  |  |   unsigned idx;
 | 
      
         | 1309 |  |  |   struct table_elt *match_elt;
 | 
      
         | 1310 |  |  |   rtx match;
 | 
      
         | 1311 |  |  |  
 | 
      
         | 1312 |  |  |   /* Find the cheapest and *oldest* expression to maximize the chance of
 | 
      
         | 1313 |  |  |      reusing the same pseudo.  */
 | 
      
         | 1314 |  |  |  
 | 
      
         | 1315 |  |  |   match_elt = NULL;
 | 
      
         | 1316 |  |  |   match = NULL_RTX;
 | 
      
         | 1317 |  |  |   for (elt = anchor_elt->first_same_value, idx = 0;
 | 
      
         | 1318 |  |  |        elt;
 | 
      
         | 1319 |  |  |        elt = elt->next_same_value, idx++)
 | 
      
         | 1320 |  |  |     {
 | 
      
         | 1321 |  |  |       if (match_elt && CHEAPER (match_elt, elt))
 | 
      
         | 1322 |  |  |         return match;
 | 
      
         | 1323 |  |  |  
 | 
      
         | 1324 |  |  |       if (REG_P (elt->exp)
 | 
      
         | 1325 |  |  |           || (GET_CODE (elt->exp) == PLUS
 | 
      
         | 1326 |  |  |               && REG_P (XEXP (elt->exp, 0))
 | 
      
         | 1327 |  |  |               && GET_CODE (XEXP (elt->exp, 1)) == CONST_INT))
 | 
      
         | 1328 |  |  |         {
 | 
      
         | 1329 |  |  |           rtx x;
 | 
      
         | 1330 |  |  |  
 | 
      
         | 1331 |  |  |           /* Ignore expressions that are no longer valid.  */
 | 
      
         | 1332 |  |  |           if (!REG_P (elt->exp) && !exp_equiv_p (elt->exp, elt->exp, 1, false))
 | 
      
         | 1333 |  |  |             continue;
 | 
      
         | 1334 |  |  |  
 | 
      
         | 1335 |  |  |           x = plus_constant (elt->exp, offs);
 | 
      
         | 1336 |  |  |           if (REG_P (x)
 | 
      
         | 1337 |  |  |               || (GET_CODE (x) == PLUS
 | 
      
         | 1338 |  |  |                   && IN_RANGE (INTVAL (XEXP (x, 1)),
 | 
      
         | 1339 |  |  |                                -targetm.const_anchor,
 | 
      
         | 1340 |  |  |                                targetm.const_anchor - 1)))
 | 
      
         | 1341 |  |  |             {
 | 
      
         | 1342 |  |  |               match = x;
 | 
      
         | 1343 |  |  |               match_elt = elt;
 | 
      
         | 1344 |  |  |               *old = idx;
 | 
      
         | 1345 |  |  |             }
 | 
      
         | 1346 |  |  |         }
 | 
      
         | 1347 |  |  |     }
 | 
      
         | 1348 |  |  |  
 | 
      
         | 1349 |  |  |   return match;
 | 
      
         | 1350 |  |  | }
 | 
      
         | 1351 |  |  |  
 | 
      
         | 1352 |  |  | /* Try to express the constant SRC_CONST using a register+offset expression
 | 
      
         | 1353 |  |  |    derived from a constant anchor.  Return it if successful or NULL_RTX,
 | 
      
         | 1354 |  |  |    otherwise.  */
 | 
      
         | 1355 |  |  |  
 | 
      
         | 1356 |  |  | static rtx
 | 
      
         | 1357 |  |  | try_const_anchors (rtx src_const, enum machine_mode mode)
 | 
      
         | 1358 |  |  | {
 | 
      
         | 1359 |  |  |   struct table_elt *lower_elt, *upper_elt;
 | 
      
         | 1360 |  |  |   HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
 | 
      
         | 1361 |  |  |   rtx lower_anchor_rtx, upper_anchor_rtx;
 | 
      
         | 1362 |  |  |   rtx lower_exp = NULL_RTX, upper_exp = NULL_RTX;
 | 
      
         | 1363 |  |  |   unsigned lower_old, upper_old;
 | 
      
         | 1364 |  |  |  
 | 
      
         | 1365 |  |  |   if (!compute_const_anchors (src_const, &lower_base, &lower_offs,
 | 
      
         | 1366 |  |  |                               &upper_base, &upper_offs))
 | 
      
         | 1367 |  |  |     return NULL_RTX;
 | 
      
         | 1368 |  |  |  
 | 
      
         | 1369 |  |  |   lower_anchor_rtx = GEN_INT (lower_base);
 | 
      
         | 1370 |  |  |   upper_anchor_rtx = GEN_INT (upper_base);
 | 
      
         | 1371 |  |  |   lower_elt = lookup (lower_anchor_rtx, HASH (lower_anchor_rtx, mode), mode);
 | 
      
         | 1372 |  |  |   upper_elt = lookup (upper_anchor_rtx, HASH (upper_anchor_rtx, mode), mode);
 | 
      
         | 1373 |  |  |  
 | 
      
         | 1374 |  |  |   if (lower_elt)
 | 
      
         | 1375 |  |  |     lower_exp = find_reg_offset_for_const (lower_elt, lower_offs, &lower_old);
 | 
      
         | 1376 |  |  |   if (upper_elt)
 | 
      
         | 1377 |  |  |     upper_exp = find_reg_offset_for_const (upper_elt, upper_offs, &upper_old);
 | 
      
         | 1378 |  |  |  
 | 
      
         | 1379 |  |  |   if (!lower_exp)
 | 
      
         | 1380 |  |  |     return upper_exp;
 | 
      
         | 1381 |  |  |   if (!upper_exp)
 | 
      
         | 1382 |  |  |     return lower_exp;
 | 
      
         | 1383 |  |  |  
 | 
      
         | 1384 |  |  |   /* Return the older expression.  */
 | 
      
         | 1385 |  |  |   return (upper_old > lower_old ? upper_exp : lower_exp);
 | 
      
         | 1386 |  |  | }
 | 
      
         | 1387 |  |  |  
 | 
      
         | 1388 |  |  | /* Look in or update the hash table.  */
 | 
      
         | 1389 |  |  |  
 | 
      
         | 1390 |  |  | /* Remove table element ELT from use in the table.
 | 
      
         | 1391 |  |  |    HASH is its hash code, made using the HASH macro.
 | 
      
         | 1392 |  |  |    It's an argument because often that is known in advance
 | 
      
         | 1393 |  |  |    and we save much time not recomputing it.  */
 | 
      
         | 1394 |  |  |  
 | 
      
         | 1395 |  |  | static void
 | 
      
         | 1396 |  |  | remove_from_table (struct table_elt *elt, unsigned int hash)
 | 
      
         | 1397 |  |  | {
 | 
      
         | 1398 |  |  |   if (elt == 0)
 | 
      
         | 1399 |  |  |     return;
 | 
      
         | 1400 |  |  |  
 | 
      
         | 1401 |  |  |   /* Mark this element as removed.  See cse_insn.  */
 | 
      
         | 1402 |  |  |   elt->first_same_value = 0;
 | 
      
         | 1403 |  |  |  
 | 
      
         | 1404 |  |  |   /* Remove the table element from its equivalence class.  */
 | 
      
         | 1405 |  |  |  
 | 
      
         | 1406 |  |  |   {
 | 
      
         | 1407 |  |  |     struct table_elt *prev = elt->prev_same_value;
 | 
      
         | 1408 |  |  |     struct table_elt *next = elt->next_same_value;
 | 
      
         | 1409 |  |  |  
 | 
      
         | 1410 |  |  |     if (next)
 | 
      
         | 1411 |  |  |       next->prev_same_value = prev;
 | 
      
         | 1412 |  |  |  
 | 
      
         | 1413 |  |  |     if (prev)
 | 
      
         | 1414 |  |  |       prev->next_same_value = next;
 | 
      
         | 1415 |  |  |     else
 | 
      
         | 1416 |  |  |       {
 | 
      
         | 1417 |  |  |         struct table_elt *newfirst = next;
 | 
      
         | 1418 |  |  |         while (next)
 | 
      
         | 1419 |  |  |           {
 | 
      
         | 1420 |  |  |             next->first_same_value = newfirst;
 | 
      
         | 1421 |  |  |             next = next->next_same_value;
 | 
      
         | 1422 |  |  |           }
 | 
      
         | 1423 |  |  |       }
 | 
      
         | 1424 |  |  |   }
 | 
      
         | 1425 |  |  |  
 | 
      
         | 1426 |  |  |   /* Remove the table element from its hash bucket.  */
 | 
      
         | 1427 |  |  |  
 | 
      
         | 1428 |  |  |   {
 | 
      
         | 1429 |  |  |     struct table_elt *prev = elt->prev_same_hash;
 | 
      
         | 1430 |  |  |     struct table_elt *next = elt->next_same_hash;
 | 
      
         | 1431 |  |  |  
 | 
      
         | 1432 |  |  |     if (next)
 | 
      
         | 1433 |  |  |       next->prev_same_hash = prev;
 | 
      
         | 1434 |  |  |  
 | 
      
         | 1435 |  |  |     if (prev)
 | 
      
         | 1436 |  |  |       prev->next_same_hash = next;
 | 
      
         | 1437 |  |  |     else if (table[hash] == elt)
 | 
      
         | 1438 |  |  |       table[hash] = next;
 | 
      
         | 1439 |  |  |     else
 | 
      
         | 1440 |  |  |       {
 | 
      
         | 1441 |  |  |         /* This entry is not in the proper hash bucket.  This can happen
 | 
      
         | 1442 |  |  |            when two classes were merged by `merge_equiv_classes'.  Search
 | 
      
         | 1443 |  |  |            for the hash bucket that it heads.  This happens only very
 | 
      
         | 1444 |  |  |            rarely, so the cost is acceptable.  */
 | 
      
         | 1445 |  |  |         for (hash = 0; hash < HASH_SIZE; hash++)
 | 
      
         | 1446 |  |  |           if (table[hash] == elt)
 | 
      
         | 1447 |  |  |             table[hash] = next;
 | 
      
         | 1448 |  |  |       }
 | 
      
         | 1449 |  |  |   }
 | 
      
         | 1450 |  |  |  
 | 
      
         | 1451 |  |  |   /* Remove the table element from its related-value circular chain.  */
 | 
      
         | 1452 |  |  |  
 | 
      
         | 1453 |  |  |   if (elt->related_value != 0 && elt->related_value != elt)
 | 
      
         | 1454 |  |  |     {
 | 
      
         | 1455 |  |  |       struct table_elt *p = elt->related_value;
 | 
      
         | 1456 |  |  |  
 | 
      
         | 1457 |  |  |       while (p->related_value != elt)
 | 
      
         | 1458 |  |  |         p = p->related_value;
 | 
      
         | 1459 |  |  |       p->related_value = elt->related_value;
 | 
      
         | 1460 |  |  |       if (p->related_value == p)
 | 
      
         | 1461 |  |  |         p->related_value = 0;
 | 
      
         | 1462 |  |  |     }
 | 
      
         | 1463 |  |  |  
 | 
      
         | 1464 |  |  |   /* Now add it to the free element chain.  */
 | 
      
         | 1465 |  |  |   elt->next_same_hash = free_element_chain;
 | 
      
         | 1466 |  |  |   free_element_chain = elt;
 | 
      
         | 1467 |  |  | }
 | 
      
         | 1468 |  |  |  
 | 
      
         | 1469 |  |  | /* Same as above, but X is a pseudo-register.  */
 | 
      
         | 1470 |  |  |  
 | 
      
         | 1471 |  |  | static void
 | 
      
         | 1472 |  |  | remove_pseudo_from_table (rtx x, unsigned int hash)
 | 
      
         | 1473 |  |  | {
 | 
      
         | 1474 |  |  |   struct table_elt *elt;
 | 
      
         | 1475 |  |  |  
 | 
      
         | 1476 |  |  |   /* Because a pseudo-register can be referenced in more than one
 | 
      
         | 1477 |  |  |      mode, we might have to remove more than one table entry.  */
 | 
      
         | 1478 |  |  |   while ((elt = lookup_for_remove (x, hash, VOIDmode)))
 | 
      
         | 1479 |  |  |     remove_from_table (elt, hash);
 | 
      
         | 1480 |  |  | }
 | 
      
         | 1481 |  |  |  
 | 
      
         | 1482 |  |  | /* Look up X in the hash table and return its table element,
 | 
      
         | 1483 |  |  |    or 0 if X is not in the table.
 | 
      
         | 1484 |  |  |  
 | 
      
         | 1485 |  |  |    MODE is the machine-mode of X, or if X is an integer constant
 | 
      
         | 1486 |  |  |    with VOIDmode then MODE is the mode with which X will be used.
 | 
      
         | 1487 |  |  |  
 | 
      
         | 1488 |  |  |    Here we are satisfied to find an expression whose tree structure
 | 
      
         | 1489 |  |  |    looks like X.  */
 | 
      
         | 1490 |  |  |  
 | 
      
         | 1491 |  |  | static struct table_elt *
 | 
      
         | 1492 |  |  | lookup (rtx x, unsigned int hash, enum machine_mode mode)
 | 
      
         | 1493 |  |  | {
 | 
      
         | 1494 |  |  |   struct table_elt *p;
 | 
      
         | 1495 |  |  |  
 | 
      
         | 1496 |  |  |   for (p = table[hash]; p; p = p->next_same_hash)
 | 
      
         | 1497 |  |  |     if (mode == p->mode && ((x == p->exp && REG_P (x))
 | 
      
         | 1498 |  |  |                             || exp_equiv_p (x, p->exp, !REG_P (x), false)))
 | 
      
         | 1499 |  |  |       return p;
 | 
      
         | 1500 |  |  |  
 | 
      
         | 1501 |  |  |   return 0;
 | 
      
         | 1502 |  |  | }
 | 
      
         | 1503 |  |  |  
 | 
      
         | 1504 |  |  | /* Like `lookup' but don't care whether the table element uses invalid regs.
 | 
      
         | 1505 |  |  |    Also ignore discrepancies in the machine mode of a register.  */
 | 
      
         | 1506 |  |  |  
 | 
      
         | 1507 |  |  | static struct table_elt *
 | 
      
         | 1508 |  |  | lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
 | 
      
         | 1509 |  |  | {
 | 
      
         | 1510 |  |  |   struct table_elt *p;
 | 
      
         | 1511 |  |  |  
 | 
      
         | 1512 |  |  |   if (REG_P (x))
 | 
      
         | 1513 |  |  |     {
 | 
      
         | 1514 |  |  |       unsigned int regno = REGNO (x);
 | 
      
         | 1515 |  |  |  
 | 
      
         | 1516 |  |  |       /* Don't check the machine mode when comparing registers;
 | 
      
         | 1517 |  |  |          invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
 | 
      
         | 1518 |  |  |       for (p = table[hash]; p; p = p->next_same_hash)
 | 
      
         | 1519 |  |  |         if (REG_P (p->exp)
 | 
      
         | 1520 |  |  |             && REGNO (p->exp) == regno)
 | 
      
         | 1521 |  |  |           return p;
 | 
      
         | 1522 |  |  |     }
 | 
      
         | 1523 |  |  |   else
 | 
      
         | 1524 |  |  |     {
 | 
      
         | 1525 |  |  |       for (p = table[hash]; p; p = p->next_same_hash)
 | 
      
         | 1526 |  |  |         if (mode == p->mode
 | 
      
         | 1527 |  |  |             && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
 | 
      
         | 1528 |  |  |           return p;
 | 
      
         | 1529 |  |  |     }
 | 
      
         | 1530 |  |  |  
 | 
      
         | 1531 |  |  |   return 0;
 | 
      
         | 1532 |  |  | }
 | 
      
         | 1533 |  |  |  
 | 
      
         | 1534 |  |  | /* Look for an expression equivalent to X and with code CODE.
 | 
      
         | 1535 |  |  |    If one is found, return that expression.  */
 | 
      
         | 1536 |  |  |  
 | 
      
         | 1537 |  |  | static rtx
 | 
      
         | 1538 |  |  | lookup_as_function (rtx x, enum rtx_code code)
 | 
      
         | 1539 |  |  | {
 | 
      
         | 1540 |  |  |   struct table_elt *p
 | 
      
         | 1541 |  |  |     = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
 | 
      
         | 1542 |  |  |  
 | 
      
         | 1543 |  |  |   if (p == 0)
 | 
      
         | 1544 |  |  |     return 0;
 | 
      
         | 1545 |  |  |  
 | 
      
         | 1546 |  |  |   for (p = p->first_same_value; p; p = p->next_same_value)
 | 
      
         | 1547 |  |  |     if (GET_CODE (p->exp) == code
 | 
      
         | 1548 |  |  |         /* Make sure this is a valid entry in the table.  */
 | 
      
         | 1549 |  |  |         && exp_equiv_p (p->exp, p->exp, 1, false))
 | 
      
         | 1550 |  |  |       return p->exp;
 | 
      
         | 1551 |  |  |  
 | 
      
         | 1552 |  |  |   return 0;
 | 
      
         | 1553 |  |  | }
 | 
      
         | 1554 |  |  |  
 | 
      
         | 1555 |  |  | /* Insert X in the hash table, assuming HASH is its hash code and
 | 
      
         | 1556 |  |  |    CLASSP is an element of the class it should go in (or 0 if a new
 | 
      
         | 1557 |  |  |    class should be made).  COST is the code of X and reg_cost is the
 | 
      
         | 1558 |  |  |    cost of registers in X.  It is inserted at the proper position to
 | 
      
         | 1559 |  |  |    keep the class in the order cheapest first.
 | 
      
         | 1560 |  |  |  
 | 
      
         | 1561 |  |  |    MODE is the machine-mode of X, or if X is an integer constant
 | 
      
         | 1562 |  |  |    with VOIDmode then MODE is the mode with which X will be used.
 | 
      
         | 1563 |  |  |  
 | 
      
         | 1564 |  |  |    For elements of equal cheapness, the most recent one
 | 
      
         | 1565 |  |  |    goes in front, except that the first element in the list
 | 
      
         | 1566 |  |  |    remains first unless a cheaper element is added.  The order of
 | 
      
         | 1567 |  |  |    pseudo-registers does not matter, as canon_reg will be called to
 | 
      
         | 1568 |  |  |    find the cheapest when a register is retrieved from the table.
 | 
      
         | 1569 |  |  |  
 | 
      
         | 1570 |  |  |    The in_memory field in the hash table element is set to 0.
 | 
      
         | 1571 |  |  |    The caller must set it nonzero if appropriate.
 | 
      
         | 1572 |  |  |  
 | 
      
         | 1573 |  |  |    You should call insert_regs (X, CLASSP, MODIFY) before calling here,
 | 
      
         | 1574 |  |  |    and if insert_regs returns a nonzero value
 | 
      
         | 1575 |  |  |    you must then recompute its hash code before calling here.
 | 
      
         | 1576 |  |  |  
 | 
      
         | 1577 |  |  |    If necessary, update table showing constant values of quantities.  */
 | 
      
         | 1578 |  |  |  
 | 
      
         | 1579 |  |  | static struct table_elt *
 | 
      
         | 1580 |  |  | insert_with_costs (rtx x, struct table_elt *classp, unsigned int hash,
 | 
      
         | 1581 |  |  |                    enum machine_mode mode, int cost, int reg_cost)
 | 
      
         | 1582 |  |  | {
 | 
      
         | 1583 |  |  |   struct table_elt *elt;
 | 
      
         | 1584 |  |  |  
 | 
      
         | 1585 |  |  |   /* If X is a register and we haven't made a quantity for it,
 | 
      
         | 1586 |  |  |      something is wrong.  */
 | 
      
         | 1587 |  |  |   gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
 | 
      
         | 1588 |  |  |  
 | 
      
         | 1589 |  |  |   /* If X is a hard register, show it is being put in the table.  */
 | 
      
         | 1590 |  |  |   if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
 | 
      
         | 1591 |  |  |     add_to_hard_reg_set (&hard_regs_in_table, GET_MODE (x), REGNO (x));
 | 
      
         | 1592 |  |  |  
 | 
      
         | 1593 |  |  |   /* Put an element for X into the right hash bucket.  */
 | 
      
         | 1594 |  |  |  
 | 
      
         | 1595 |  |  |   elt = free_element_chain;
 | 
      
         | 1596 |  |  |   if (elt)
 | 
      
         | 1597 |  |  |     free_element_chain = elt->next_same_hash;
 | 
      
         | 1598 |  |  |   else
 | 
      
         | 1599 |  |  |     elt = XNEW (struct table_elt);
 | 
      
         | 1600 |  |  |  
 | 
      
         | 1601 |  |  |   elt->exp = x;
 | 
      
         | 1602 |  |  |   elt->canon_exp = NULL_RTX;
 | 
      
         | 1603 |  |  |   elt->cost = cost;
 | 
      
         | 1604 |  |  |   elt->regcost = reg_cost;
 | 
      
         | 1605 |  |  |   elt->next_same_value = 0;
 | 
      
         | 1606 |  |  |   elt->prev_same_value = 0;
 | 
      
         | 1607 |  |  |   elt->next_same_hash = table[hash];
 | 
      
         | 1608 |  |  |   elt->prev_same_hash = 0;
 | 
      
         | 1609 |  |  |   elt->related_value = 0;
 | 
      
         | 1610 |  |  |   elt->in_memory = 0;
 | 
      
         | 1611 |  |  |   elt->mode = mode;
 | 
      
         | 1612 |  |  |   elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
 | 
      
         | 1613 |  |  |  
 | 
      
         | 1614 |  |  |   if (table[hash])
 | 
      
         | 1615 |  |  |     table[hash]->prev_same_hash = elt;
 | 
      
         | 1616 |  |  |   table[hash] = elt;
 | 
      
         | 1617 |  |  |  
 | 
      
         | 1618 |  |  |   /* Put it into the proper value-class.  */
 | 
      
         | 1619 |  |  |   if (classp)
 | 
      
         | 1620 |  |  |     {
 | 
      
         | 1621 |  |  |       classp = classp->first_same_value;
 | 
      
         | 1622 |  |  |       if (CHEAPER (elt, classp))
 | 
      
         | 1623 |  |  |         /* Insert at the head of the class.  */
 | 
      
         | 1624 |  |  |         {
 | 
      
         | 1625 |  |  |           struct table_elt *p;
 | 
      
         | 1626 |  |  |           elt->next_same_value = classp;
 | 
      
         | 1627 |  |  |           classp->prev_same_value = elt;
 | 
      
         | 1628 |  |  |           elt->first_same_value = elt;
 | 
      
         | 1629 |  |  |  
 | 
      
         | 1630 |  |  |           for (p = classp; p; p = p->next_same_value)
 | 
      
         | 1631 |  |  |             p->first_same_value = elt;
 | 
      
         | 1632 |  |  |         }
 | 
      
         | 1633 |  |  |       else
 | 
      
         | 1634 |  |  |         {
 | 
      
         | 1635 |  |  |           /* Insert not at head of the class.  */
 | 
      
         | 1636 |  |  |           /* Put it after the last element cheaper than X.  */
 | 
      
         | 1637 |  |  |           struct table_elt *p, *next;
 | 
      
         | 1638 |  |  |  
 | 
      
         | 1639 |  |  |           for (p = classp;
 | 
      
         | 1640 |  |  |                (next = p->next_same_value) && CHEAPER (next, elt);
 | 
      
         | 1641 |  |  |                p = next)
 | 
      
         | 1642 |  |  |             ;
 | 
      
         | 1643 |  |  |  
 | 
      
         | 1644 |  |  |           /* Put it after P and before NEXT.  */
 | 
      
         | 1645 |  |  |           elt->next_same_value = next;
 | 
      
         | 1646 |  |  |           if (next)
 | 
      
         | 1647 |  |  |             next->prev_same_value = elt;
 | 
      
         | 1648 |  |  |  
 | 
      
         | 1649 |  |  |           elt->prev_same_value = p;
 | 
      
         | 1650 |  |  |           p->next_same_value = elt;
 | 
      
         | 1651 |  |  |           elt->first_same_value = classp;
 | 
      
         | 1652 |  |  |         }
 | 
      
         | 1653 |  |  |     }
 | 
      
         | 1654 |  |  |   else
 | 
      
         | 1655 |  |  |     elt->first_same_value = elt;
 | 
      
         | 1656 |  |  |  
 | 
      
         | 1657 |  |  |   /* If this is a constant being set equivalent to a register or a register
 | 
      
         | 1658 |  |  |      being set equivalent to a constant, note the constant equivalence.
 | 
      
         | 1659 |  |  |  
 | 
      
         | 1660 |  |  |      If this is a constant, it cannot be equivalent to a different constant,
 | 
      
         | 1661 |  |  |      and a constant is the only thing that can be cheaper than a register.  So
 | 
      
         | 1662 |  |  |      we know the register is the head of the class (before the constant was
 | 
      
         | 1663 |  |  |      inserted).
 | 
      
         | 1664 |  |  |  
 | 
      
         | 1665 |  |  |      If this is a register that is not already known equivalent to a
 | 
      
         | 1666 |  |  |      constant, we must check the entire class.
 | 
      
         | 1667 |  |  |  
 | 
      
         | 1668 |  |  |      If this is a register that is already known equivalent to an insn,
 | 
      
         | 1669 |  |  |      update the qtys `const_insn' to show that `this_insn' is the latest
 | 
      
         | 1670 |  |  |      insn making that quantity equivalent to the constant.  */
 | 
      
         | 1671 |  |  |  
 | 
      
         | 1672 |  |  |   if (elt->is_const && classp && REG_P (classp->exp)
 | 
      
         | 1673 |  |  |       && !REG_P (x))
 | 
      
         | 1674 |  |  |     {
 | 
      
         | 1675 |  |  |       int exp_q = REG_QTY (REGNO (classp->exp));
 | 
      
         | 1676 |  |  |       struct qty_table_elem *exp_ent = &qty_table[exp_q];
 | 
      
         | 1677 |  |  |  
 | 
      
         | 1678 |  |  |       exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
 | 
      
         | 1679 |  |  |       exp_ent->const_insn = this_insn;
 | 
      
         | 1680 |  |  |     }
 | 
      
         | 1681 |  |  |  
 | 
      
         | 1682 |  |  |   else if (REG_P (x)
 | 
      
         | 1683 |  |  |            && classp
 | 
      
         | 1684 |  |  |            && ! qty_table[REG_QTY (REGNO (x))].const_rtx
 | 
      
         | 1685 |  |  |            && ! elt->is_const)
 | 
      
         | 1686 |  |  |     {
 | 
      
         | 1687 |  |  |       struct table_elt *p;
 | 
      
         | 1688 |  |  |  
 | 
      
         | 1689 |  |  |       for (p = classp; p != 0; p = p->next_same_value)
 | 
      
         | 1690 |  |  |         {
 | 
      
         | 1691 |  |  |           if (p->is_const && !REG_P (p->exp))
 | 
      
         | 1692 |  |  |             {
 | 
      
         | 1693 |  |  |               int x_q = REG_QTY (REGNO (x));
 | 
      
         | 1694 |  |  |               struct qty_table_elem *x_ent = &qty_table[x_q];
 | 
      
         | 1695 |  |  |  
 | 
      
         | 1696 |  |  |               x_ent->const_rtx
 | 
      
         | 1697 |  |  |                 = gen_lowpart (GET_MODE (x), p->exp);
 | 
      
         | 1698 |  |  |               x_ent->const_insn = this_insn;
 | 
      
         | 1699 |  |  |               break;
 | 
      
         | 1700 |  |  |             }
 | 
      
         | 1701 |  |  |         }
 | 
      
         | 1702 |  |  |     }
 | 
      
         | 1703 |  |  |  
 | 
      
         | 1704 |  |  |   else if (REG_P (x)
 | 
      
         | 1705 |  |  |            && qty_table[REG_QTY (REGNO (x))].const_rtx
 | 
      
         | 1706 |  |  |            && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
 | 
      
         | 1707 |  |  |     qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
 | 
      
         | 1708 |  |  |  
 | 
      
         | 1709 |  |  |   /* If this is a constant with symbolic value,
 | 
      
         | 1710 |  |  |      and it has a term with an explicit integer value,
 | 
      
         | 1711 |  |  |      link it up with related expressions.  */
 | 
      
         | 1712 |  |  |   if (GET_CODE (x) == CONST)
 | 
      
         | 1713 |  |  |     {
 | 
      
         | 1714 |  |  |       rtx subexp = get_related_value (x);
 | 
      
         | 1715 |  |  |       unsigned subhash;
 | 
      
         | 1716 |  |  |       struct table_elt *subelt, *subelt_prev;
 | 
      
         | 1717 |  |  |  
 | 
      
         | 1718 |  |  |       if (subexp != 0)
 | 
      
         | 1719 |  |  |         {
 | 
      
         | 1720 |  |  |           /* Get the integer-free subexpression in the hash table.  */
 | 
      
         | 1721 |  |  |           subhash = SAFE_HASH (subexp, mode);
 | 
      
         | 1722 |  |  |           subelt = lookup (subexp, subhash, mode);
 | 
      
         | 1723 |  |  |           if (subelt == 0)
 | 
      
         | 1724 |  |  |             subelt = insert (subexp, NULL, subhash, mode);
 | 
      
         | 1725 |  |  |           /* Initialize SUBELT's circular chain if it has none.  */
 | 
      
         | 1726 |  |  |           if (subelt->related_value == 0)
 | 
      
         | 1727 |  |  |             subelt->related_value = subelt;
 | 
      
         | 1728 |  |  |           /* Find the element in the circular chain that precedes SUBELT.  */
 | 
      
         | 1729 |  |  |           subelt_prev = subelt;
 | 
      
         | 1730 |  |  |           while (subelt_prev->related_value != subelt)
 | 
      
         | 1731 |  |  |             subelt_prev = subelt_prev->related_value;
 | 
      
         | 1732 |  |  |           /* Put new ELT into SUBELT's circular chain just before SUBELT.
 | 
      
         | 1733 |  |  |              This way the element that follows SUBELT is the oldest one.  */
 | 
      
         | 1734 |  |  |           elt->related_value = subelt_prev->related_value;
 | 
      
         | 1735 |  |  |           subelt_prev->related_value = elt;
 | 
      
         | 1736 |  |  |         }
 | 
      
         | 1737 |  |  |     }
 | 
      
         | 1738 |  |  |  
 | 
      
         | 1739 |  |  |   return elt;
 | 
      
         | 1740 |  |  | }
 | 
      
         | 1741 |  |  |  
 | 
      
         | 1742 |  |  | /* Wrap insert_with_costs by passing the default costs.  */
 | 
      
         | 1743 |  |  |  
 | 
      
         | 1744 |  |  | static struct table_elt *
 | 
      
         | 1745 |  |  | insert (rtx x, struct table_elt *classp, unsigned int hash,
 | 
      
         | 1746 |  |  |         enum machine_mode mode)
 | 
      
         | 1747 |  |  | {
 | 
      
         | 1748 |  |  |   return
 | 
      
         | 1749 |  |  |     insert_with_costs (x, classp, hash, mode, COST (x), approx_reg_cost (x));
 | 
      
         | 1750 |  |  | }
 | 
      
         | 1751 |  |  |  
 | 
      
         | 1752 |  |  |  
 | 
      
         | 1753 |  |  | /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
 | 
      
         | 1754 |  |  |    CLASS2 into CLASS1.  This is done when we have reached an insn which makes
 | 
      
         | 1755 |  |  |    the two classes equivalent.
 | 
      
         | 1756 |  |  |  
 | 
      
         | 1757 |  |  |    CLASS1 will be the surviving class; CLASS2 should not be used after this
 | 
      
         | 1758 |  |  |    call.
 | 
      
         | 1759 |  |  |  
 | 
      
         | 1760 |  |  |    Any invalid entries in CLASS2 will not be copied.  */
 | 
      
         | 1761 |  |  |  
 | 
      
         | 1762 |  |  | static void
 | 
      
         | 1763 |  |  | merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
 | 
      
         | 1764 |  |  | {
 | 
      
         | 1765 |  |  |   struct table_elt *elt, *next, *new_elt;
 | 
      
         | 1766 |  |  |  
 | 
      
         | 1767 |  |  |   /* Ensure we start with the head of the classes.  */
 | 
      
         | 1768 |  |  |   class1 = class1->first_same_value;
 | 
      
         | 1769 |  |  |   class2 = class2->first_same_value;
 | 
      
         | 1770 |  |  |  
 | 
      
         | 1771 |  |  |   /* If they were already equal, forget it.  */
 | 
      
         | 1772 |  |  |   if (class1 == class2)
 | 
      
         | 1773 |  |  |     return;
 | 
      
         | 1774 |  |  |  
 | 
      
         | 1775 |  |  |   for (elt = class2; elt; elt = next)
 | 
      
         | 1776 |  |  |     {
 | 
      
         | 1777 |  |  |       unsigned int hash;
 | 
      
         | 1778 |  |  |       rtx exp = elt->exp;
 | 
      
         | 1779 |  |  |       enum machine_mode mode = elt->mode;
 | 
      
         | 1780 |  |  |  
 | 
      
         | 1781 |  |  |       next = elt->next_same_value;
 | 
      
         | 1782 |  |  |  
 | 
      
         | 1783 |  |  |       /* Remove old entry, make a new one in CLASS1's class.
 | 
      
         | 1784 |  |  |          Don't do this for invalid entries as we cannot find their
 | 
      
         | 1785 |  |  |          hash code (it also isn't necessary).  */
 | 
      
         | 1786 |  |  |       if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
 | 
      
         | 1787 |  |  |         {
 | 
      
         | 1788 |  |  |           bool need_rehash = false;
 | 
      
         | 1789 |  |  |  
 | 
      
         | 1790 |  |  |           hash_arg_in_memory = 0;
 | 
      
         | 1791 |  |  |           hash = HASH (exp, mode);
 | 
      
         | 1792 |  |  |  
 | 
      
         | 1793 |  |  |           if (REG_P (exp))
 | 
      
         | 1794 |  |  |             {
 | 
      
         | 1795 |  |  |               need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
 | 
      
         | 1796 |  |  |               delete_reg_equiv (REGNO (exp));
 | 
      
         | 1797 |  |  |             }
 | 
      
         | 1798 |  |  |  
 | 
      
         | 1799 |  |  |           if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER)
 | 
      
         | 1800 |  |  |             remove_pseudo_from_table (exp, hash);
 | 
      
         | 1801 |  |  |           else
 | 
      
         | 1802 |  |  |             remove_from_table (elt, hash);
 | 
      
         | 1803 |  |  |  
 | 
      
         | 1804 |  |  |           if (insert_regs (exp, class1, 0) || need_rehash)
 | 
      
         | 1805 |  |  |             {
 | 
      
         | 1806 |  |  |               rehash_using_reg (exp);
 | 
      
         | 1807 |  |  |               hash = HASH (exp, mode);
 | 
      
         | 1808 |  |  |             }
 | 
      
         | 1809 |  |  |           new_elt = insert (exp, class1, hash, mode);
 | 
      
         | 1810 |  |  |           new_elt->in_memory = hash_arg_in_memory;
 | 
      
         | 1811 |  |  |         }
 | 
      
         | 1812 |  |  |     }
 | 
      
         | 1813 |  |  | }
 | 
      
         | 1814 |  |  |  
 | 
      
         | 1815 |  |  | /* Flush the entire hash table.  */
 | 
      
         | 1816 |  |  |  
 | 
      
         | 1817 |  |  | static void
 | 
      
         | 1818 |  |  | flush_hash_table (void)
 | 
      
         | 1819 |  |  | {
 | 
      
         | 1820 |  |  |   int i;
 | 
      
         | 1821 |  |  |   struct table_elt *p;
 | 
      
         | 1822 |  |  |  
 | 
      
         | 1823 |  |  |   for (i = 0; i < HASH_SIZE; i++)
 | 
      
         | 1824 |  |  |     for (p = table[i]; p; p = table[i])
 | 
      
         | 1825 |  |  |       {
 | 
      
         | 1826 |  |  |         /* Note that invalidate can remove elements
 | 
      
         | 1827 |  |  |            after P in the current hash chain.  */
 | 
      
         | 1828 |  |  |         if (REG_P (p->exp))
 | 
      
         | 1829 |  |  |           invalidate (p->exp, VOIDmode);
 | 
      
         | 1830 |  |  |         else
 | 
      
         | 1831 |  |  |           remove_from_table (p, i);
 | 
      
         | 1832 |  |  |       }
 | 
      
         | 1833 |  |  | }
 | 
      
         | 1834 |  |  |  
 | 
      
         | 1835 |  |  | /* Function called for each rtx to check whether true dependence exist.  */
 | 
      
         | 1836 |  |  | struct check_dependence_data
 | 
      
         | 1837 |  |  | {
 | 
      
         | 1838 |  |  |   enum machine_mode mode;
 | 
      
         | 1839 |  |  |   rtx exp;
 | 
      
         | 1840 |  |  |   rtx addr;
 | 
      
         | 1841 |  |  | };
 | 
      
         | 1842 |  |  |  
 | 
      
         | 1843 |  |  | static int
 | 
      
         | 1844 |  |  | check_dependence (rtx *x, void *data)
 | 
      
         | 1845 |  |  | {
 | 
      
         | 1846 |  |  |   struct check_dependence_data *d = (struct check_dependence_data *) data;
 | 
      
         | 1847 |  |  |   if (*x && MEM_P (*x))
 | 
      
         | 1848 |  |  |     return canon_true_dependence (d->exp, d->mode, d->addr, *x, NULL_RTX);
 | 
      
         | 1849 |  |  |   else
 | 
      
         | 1850 |  |  |     return 0;
 | 
      
         | 1851 |  |  | }
 | 
      
         | 1852 |  |  |  
 | 
      
         | 1853 |  |  | /* Remove from the hash table, or mark as invalid, all expressions whose
 | 
      
         | 1854 |  |  |    values could be altered by storing in X.  X is a register, a subreg, or
 | 
      
         | 1855 |  |  |    a memory reference with nonvarying address (because, when a memory
 | 
      
         | 1856 |  |  |    reference with a varying address is stored in, all memory references are
 | 
      
         | 1857 |  |  |    removed by invalidate_memory so specific invalidation is superfluous).
 | 
      
         | 1858 |  |  |    FULL_MODE, if not VOIDmode, indicates that this much should be
 | 
      
         | 1859 |  |  |    invalidated instead of just the amount indicated by the mode of X.  This
 | 
      
         | 1860 |  |  |    is only used for bitfield stores into memory.
 | 
      
         | 1861 |  |  |  
 | 
      
         | 1862 |  |  |    A nonvarying address may be just a register or just a symbol reference,
 | 
      
         | 1863 |  |  |    or it may be either of those plus a numeric offset.  */
 | 
      
         | 1864 |  |  |  
 | 
      
         | 1865 |  |  | static void
 | 
      
         | 1866 |  |  | invalidate (rtx x, enum machine_mode full_mode)
 | 
      
         | 1867 |  |  | {
 | 
      
         | 1868 |  |  |   int i;
 | 
      
         | 1869 |  |  |   struct table_elt *p;
 | 
      
         | 1870 |  |  |   rtx addr;
 | 
      
         | 1871 |  |  |  
 | 
      
         | 1872 |  |  |   switch (GET_CODE (x))
 | 
      
         | 1873 |  |  |     {
 | 
      
         | 1874 |  |  |     case REG:
 | 
      
         | 1875 |  |  |       {
 | 
      
         | 1876 |  |  |         /* If X is a register, dependencies on its contents are recorded
 | 
      
         | 1877 |  |  |            through the qty number mechanism.  Just change the qty number of
 | 
      
         | 1878 |  |  |            the register, mark it as invalid for expressions that refer to it,
 | 
      
         | 1879 |  |  |            and remove it itself.  */
 | 
      
         | 1880 |  |  |         unsigned int regno = REGNO (x);
 | 
      
         | 1881 |  |  |         unsigned int hash = HASH (x, GET_MODE (x));
 | 
      
         | 1882 |  |  |  
 | 
      
         | 1883 |  |  |         /* Remove REGNO from any quantity list it might be on and indicate
 | 
      
         | 1884 |  |  |            that its value might have changed.  If it is a pseudo, remove its
 | 
      
         | 1885 |  |  |            entry from the hash table.
 | 
      
         | 1886 |  |  |  
 | 
      
         | 1887 |  |  |            For a hard register, we do the first two actions above for any
 | 
      
         | 1888 |  |  |            additional hard registers corresponding to X.  Then, if any of these
 | 
      
         | 1889 |  |  |            registers are in the table, we must remove any REG entries that
 | 
      
         | 1890 |  |  |            overlap these registers.  */
 | 
      
         | 1891 |  |  |  
 | 
      
         | 1892 |  |  |         delete_reg_equiv (regno);
 | 
      
         | 1893 |  |  |         REG_TICK (regno)++;
 | 
      
         | 1894 |  |  |         SUBREG_TICKED (regno) = -1;
 | 
      
         | 1895 |  |  |  
 | 
      
         | 1896 |  |  |         if (regno >= FIRST_PSEUDO_REGISTER)
 | 
      
         | 1897 |  |  |           remove_pseudo_from_table (x, hash);
 | 
      
         | 1898 |  |  |         else
 | 
      
         | 1899 |  |  |           {
 | 
      
         | 1900 |  |  |             HOST_WIDE_INT in_table
 | 
      
         | 1901 |  |  |               = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
 | 
      
         | 1902 |  |  |             unsigned int endregno = END_HARD_REGNO (x);
 | 
      
         | 1903 |  |  |             unsigned int tregno, tendregno, rn;
 | 
      
         | 1904 |  |  |             struct table_elt *p, *next;
 | 
      
         | 1905 |  |  |  
 | 
      
         | 1906 |  |  |             CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
 | 
      
         | 1907 |  |  |  
 | 
      
         | 1908 |  |  |             for (rn = regno + 1; rn < endregno; rn++)
 | 
      
         | 1909 |  |  |               {
 | 
      
         | 1910 |  |  |                 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
 | 
      
         | 1911 |  |  |                 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
 | 
      
         | 1912 |  |  |                 delete_reg_equiv (rn);
 | 
      
         | 1913 |  |  |                 REG_TICK (rn)++;
 | 
      
         | 1914 |  |  |                 SUBREG_TICKED (rn) = -1;
 | 
      
         | 1915 |  |  |               }
 | 
      
         | 1916 |  |  |  
 | 
      
         | 1917 |  |  |             if (in_table)
 | 
      
         | 1918 |  |  |               for (hash = 0; hash < HASH_SIZE; hash++)
 | 
      
         | 1919 |  |  |                 for (p = table[hash]; p; p = next)
 | 
      
         | 1920 |  |  |                   {
 | 
      
         | 1921 |  |  |                     next = p->next_same_hash;
 | 
      
         | 1922 |  |  |  
 | 
      
         | 1923 |  |  |                     if (!REG_P (p->exp)
 | 
      
         | 1924 |  |  |                         || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
 | 
      
         | 1925 |  |  |                       continue;
 | 
      
         | 1926 |  |  |  
 | 
      
         | 1927 |  |  |                     tregno = REGNO (p->exp);
 | 
      
         | 1928 |  |  |                     tendregno = END_HARD_REGNO (p->exp);
 | 
      
         | 1929 |  |  |                     if (tendregno > regno && tregno < endregno)
 | 
      
         | 1930 |  |  |                       remove_from_table (p, hash);
 | 
      
         | 1931 |  |  |                   }
 | 
      
         | 1932 |  |  |           }
 | 
      
         | 1933 |  |  |       }
 | 
      
         | 1934 |  |  |       return;
 | 
      
         | 1935 |  |  |  
 | 
      
         | 1936 |  |  |     case SUBREG:
 | 
      
         | 1937 |  |  |       invalidate (SUBREG_REG (x), VOIDmode);
 | 
      
         | 1938 |  |  |       return;
 | 
      
         | 1939 |  |  |  
 | 
      
         | 1940 |  |  |     case PARALLEL:
 | 
      
         | 1941 |  |  |       for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
 | 
      
         | 1942 |  |  |         invalidate (XVECEXP (x, 0, i), VOIDmode);
 | 
      
         | 1943 |  |  |       return;
 | 
      
         | 1944 |  |  |  
 | 
      
         | 1945 |  |  |     case EXPR_LIST:
 | 
      
         | 1946 |  |  |       /* This is part of a disjoint return value; extract the location in
 | 
      
         | 1947 |  |  |          question ignoring the offset.  */
 | 
      
         | 1948 |  |  |       invalidate (XEXP (x, 0), VOIDmode);
 | 
      
         | 1949 |  |  |       return;
 | 
      
         | 1950 |  |  |  
 | 
      
         | 1951 |  |  |     case MEM:
 | 
      
         | 1952 |  |  |       addr = canon_rtx (get_addr (XEXP (x, 0)));
 | 
      
         | 1953 |  |  |       /* Calculate the canonical version of X here so that
 | 
      
         | 1954 |  |  |          true_dependence doesn't generate new RTL for X on each call.  */
 | 
      
         | 1955 |  |  |       x = canon_rtx (x);
 | 
      
         | 1956 |  |  |  
 | 
      
         | 1957 |  |  |       /* Remove all hash table elements that refer to overlapping pieces of
 | 
      
         | 1958 |  |  |          memory.  */
 | 
      
         | 1959 |  |  |       if (full_mode == VOIDmode)
 | 
      
         | 1960 |  |  |         full_mode = GET_MODE (x);
 | 
      
         | 1961 |  |  |  
 | 
      
         | 1962 |  |  |       for (i = 0; i < HASH_SIZE; i++)
 | 
      
         | 1963 |  |  |         {
 | 
      
         | 1964 |  |  |           struct table_elt *next;
 | 
      
         | 1965 |  |  |  
 | 
      
         | 1966 |  |  |           for (p = table[i]; p; p = next)
 | 
      
         | 1967 |  |  |             {
 | 
      
         | 1968 |  |  |               next = p->next_same_hash;
 | 
      
         | 1969 |  |  |               if (p->in_memory)
 | 
      
         | 1970 |  |  |                 {
 | 
      
         | 1971 |  |  |                   struct check_dependence_data d;
 | 
      
         | 1972 |  |  |  
 | 
      
         | 1973 |  |  |                   /* Just canonicalize the expression once;
 | 
      
         | 1974 |  |  |                      otherwise each time we call invalidate
 | 
      
         | 1975 |  |  |                      true_dependence will canonicalize the
 | 
      
         | 1976 |  |  |                      expression again.  */
 | 
      
         | 1977 |  |  |                   if (!p->canon_exp)
 | 
      
         | 1978 |  |  |                     p->canon_exp = canon_rtx (p->exp);
 | 
      
         | 1979 |  |  |                   d.exp = x;
 | 
      
         | 1980 |  |  |                   d.addr = addr;
 | 
      
         | 1981 |  |  |                   d.mode = full_mode;
 | 
      
         | 1982 |  |  |                   if (for_each_rtx (&p->canon_exp, check_dependence, &d))
 | 
      
         | 1983 |  |  |                     remove_from_table (p, i);
 | 
      
         | 1984 |  |  |                 }
 | 
      
         | 1985 |  |  |             }
 | 
      
         | 1986 |  |  |         }
 | 
      
         | 1987 |  |  |       return;
 | 
      
         | 1988 |  |  |  
 | 
      
         | 1989 |  |  |     default:
 | 
      
         | 1990 |  |  |       gcc_unreachable ();
 | 
      
         | 1991 |  |  |     }
 | 
      
         | 1992 |  |  | }
 | 
      
         | 1993 |  |  |  
 | 
      
         | 1994 |  |  | /* Remove all expressions that refer to register REGNO,
 | 
      
         | 1995 |  |  |    since they are already invalid, and we are about to
 | 
      
         | 1996 |  |  |    mark that register valid again and don't want the old
 | 
      
         | 1997 |  |  |    expressions to reappear as valid.  */
 | 
      
         | 1998 |  |  |  
 | 
      
         | 1999 |  |  | static void
 | 
      
         | 2000 |  |  | remove_invalid_refs (unsigned int regno)
 | 
      
         | 2001 |  |  | {
 | 
      
         | 2002 |  |  |   unsigned int i;
 | 
      
         | 2003 |  |  |   struct table_elt *p, *next;
 | 
      
         | 2004 |  |  |  
 | 
      
         | 2005 |  |  |   for (i = 0; i < HASH_SIZE; i++)
 | 
      
         | 2006 |  |  |     for (p = table[i]; p; p = next)
 | 
      
         | 2007 |  |  |       {
 | 
      
         | 2008 |  |  |         next = p->next_same_hash;
 | 
      
         | 2009 |  |  |         if (!REG_P (p->exp)
 | 
      
         | 2010 |  |  |             && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
 | 
      
         | 2011 |  |  |           remove_from_table (p, i);
 | 
      
         | 2012 |  |  |       }
 | 
      
         | 2013 |  |  | }
 | 
      
         | 2014 |  |  |  
 | 
      
         | 2015 |  |  | /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
 | 
      
         | 2016 |  |  |    and mode MODE.  */
 | 
      
         | 2017 |  |  | static void
 | 
      
         | 2018 |  |  | remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
 | 
      
         | 2019 |  |  |                             enum machine_mode mode)
 | 
      
         | 2020 |  |  | {
 | 
      
         | 2021 |  |  |   unsigned int i;
 | 
      
         | 2022 |  |  |   struct table_elt *p, *next;
 | 
      
         | 2023 |  |  |   unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
 | 
      
         | 2024 |  |  |  
 | 
      
         | 2025 |  |  |   for (i = 0; i < HASH_SIZE; i++)
 | 
      
         | 2026 |  |  |     for (p = table[i]; p; p = next)
 | 
      
         | 2027 |  |  |       {
 | 
      
         | 2028 |  |  |         rtx exp = p->exp;
 | 
      
         | 2029 |  |  |         next = p->next_same_hash;
 | 
      
         | 2030 |  |  |  
 | 
      
         | 2031 |  |  |         if (!REG_P (exp)
 | 
      
         | 2032 |  |  |             && (GET_CODE (exp) != SUBREG
 | 
      
         | 2033 |  |  |                 || !REG_P (SUBREG_REG (exp))
 | 
      
         | 2034 |  |  |                 || REGNO (SUBREG_REG (exp)) != regno
 | 
      
         | 2035 |  |  |                 || (((SUBREG_BYTE (exp)
 | 
      
         | 2036 |  |  |                       + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
 | 
      
         | 2037 |  |  |                     && SUBREG_BYTE (exp) <= end))
 | 
      
         | 2038 |  |  |             && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
 | 
      
         | 2039 |  |  |           remove_from_table (p, i);
 | 
      
         | 2040 |  |  |       }
 | 
      
         | 2041 |  |  | }
 | 
      
         | 2042 |  |  |  
 | 
      
         | 2043 |  |  | /* Recompute the hash codes of any valid entries in the hash table that
 | 
      
         | 2044 |  |  |    reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
 | 
      
         | 2045 |  |  |  
 | 
      
         | 2046 |  |  |    This is called when we make a jump equivalence.  */
 | 
      
         | 2047 |  |  |  
 | 
      
         | 2048 |  |  | static void
 | 
      
         | 2049 |  |  | rehash_using_reg (rtx x)
 | 
      
         | 2050 |  |  | {
 | 
      
         | 2051 |  |  |   unsigned int i;
 | 
      
         | 2052 |  |  |   struct table_elt *p, *next;
 | 
      
         | 2053 |  |  |   unsigned hash;
 | 
      
         | 2054 |  |  |  
 | 
      
         | 2055 |  |  |   if (GET_CODE (x) == SUBREG)
 | 
      
         | 2056 |  |  |     x = SUBREG_REG (x);
 | 
      
         | 2057 |  |  |  
 | 
      
         | 2058 |  |  |   /* If X is not a register or if the register is known not to be in any
 | 
      
         | 2059 |  |  |      valid entries in the table, we have no work to do.  */
 | 
      
         | 2060 |  |  |  
 | 
      
         | 2061 |  |  |   if (!REG_P (x)
 | 
      
         | 2062 |  |  |       || REG_IN_TABLE (REGNO (x)) < 0
 | 
      
         | 2063 |  |  |       || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
 | 
      
         | 2064 |  |  |     return;
 | 
      
         | 2065 |  |  |  
 | 
      
         | 2066 |  |  |   /* Scan all hash chains looking for valid entries that mention X.
 | 
      
         | 2067 |  |  |      If we find one and it is in the wrong hash chain, move it.  */
 | 
      
         | 2068 |  |  |  
 | 
      
         | 2069 |  |  |   for (i = 0; i < HASH_SIZE; i++)
 | 
      
         | 2070 |  |  |     for (p = table[i]; p; p = next)
 | 
      
         | 2071 |  |  |       {
 | 
      
         | 2072 |  |  |         next = p->next_same_hash;
 | 
      
         | 2073 |  |  |         if (reg_mentioned_p (x, p->exp)
 | 
      
         | 2074 |  |  |             && exp_equiv_p (p->exp, p->exp, 1, false)
 | 
      
         | 2075 |  |  |             && i != (hash = SAFE_HASH (p->exp, p->mode)))
 | 
      
         | 2076 |  |  |           {
 | 
      
         | 2077 |  |  |             if (p->next_same_hash)
 | 
      
         | 2078 |  |  |               p->next_same_hash->prev_same_hash = p->prev_same_hash;
 | 
      
         | 2079 |  |  |  
 | 
      
         | 2080 |  |  |             if (p->prev_same_hash)
 | 
      
         | 2081 |  |  |               p->prev_same_hash->next_same_hash = p->next_same_hash;
 | 
      
         | 2082 |  |  |             else
 | 
      
         | 2083 |  |  |               table[i] = p->next_same_hash;
 | 
      
         | 2084 |  |  |  
 | 
      
         | 2085 |  |  |             p->next_same_hash = table[hash];
 | 
      
         | 2086 |  |  |             p->prev_same_hash = 0;
 | 
      
         | 2087 |  |  |             if (table[hash])
 | 
      
         | 2088 |  |  |               table[hash]->prev_same_hash = p;
 | 
      
         | 2089 |  |  |             table[hash] = p;
 | 
      
         | 2090 |  |  |           }
 | 
      
         | 2091 |  |  |       }
 | 
      
         | 2092 |  |  | }
 | 
      
         | 2093 |  |  |  
 | 
      
         | 2094 |  |  | /* Remove from the hash table any expression that is a call-clobbered
 | 
      
         | 2095 |  |  |    register.  Also update their TICK values.  */
 | 
      
         | 2096 |  |  |  
 | 
      
         | 2097 |  |  | static void
 | 
      
         | 2098 |  |  | invalidate_for_call (void)
 | 
      
         | 2099 |  |  | {
 | 
      
         | 2100 |  |  |   unsigned int regno, endregno;
 | 
      
         | 2101 |  |  |   unsigned int i;
 | 
      
         | 2102 |  |  |   unsigned hash;
 | 
      
         | 2103 |  |  |   struct table_elt *p, *next;
 | 
      
         | 2104 |  |  |   int in_table = 0;
 | 
      
         | 2105 |  |  |  
 | 
      
         | 2106 |  |  |   /* Go through all the hard registers.  For each that is clobbered in
 | 
      
         | 2107 |  |  |      a CALL_INSN, remove the register from quantity chains and update
 | 
      
         | 2108 |  |  |      reg_tick if defined.  Also see if any of these registers is currently
 | 
      
         | 2109 |  |  |      in the table.  */
 | 
      
         | 2110 |  |  |  
 | 
      
         | 2111 |  |  |   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
 | 
      
         | 2112 |  |  |     if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
 | 
      
         | 2113 |  |  |       {
 | 
      
         | 2114 |  |  |         delete_reg_equiv (regno);
 | 
      
         | 2115 |  |  |         if (REG_TICK (regno) >= 0)
 | 
      
         | 2116 |  |  |           {
 | 
      
         | 2117 |  |  |             REG_TICK (regno)++;
 | 
      
         | 2118 |  |  |             SUBREG_TICKED (regno) = -1;
 | 
      
         | 2119 |  |  |           }
 | 
      
         | 2120 |  |  |  
 | 
      
         | 2121 |  |  |         in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
 | 
      
         | 2122 |  |  |       }
 | 
      
         | 2123 |  |  |  
 | 
      
         | 2124 |  |  |   /* In the case where we have no call-clobbered hard registers in the
 | 
      
         | 2125 |  |  |      table, we are done.  Otherwise, scan the table and remove any
 | 
      
         | 2126 |  |  |      entry that overlaps a call-clobbered register.  */
 | 
      
         | 2127 |  |  |  
 | 
      
         | 2128 |  |  |   if (in_table)
 | 
      
         | 2129 |  |  |     for (hash = 0; hash < HASH_SIZE; hash++)
 | 
      
         | 2130 |  |  |       for (p = table[hash]; p; p = next)
 | 
      
         | 2131 |  |  |         {
 | 
      
         | 2132 |  |  |           next = p->next_same_hash;
 | 
      
         | 2133 |  |  |  
 | 
      
         | 2134 |  |  |           if (!REG_P (p->exp)
 | 
      
         | 2135 |  |  |               || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
 | 
      
         | 2136 |  |  |             continue;
 | 
      
         | 2137 |  |  |  
 | 
      
         | 2138 |  |  |           regno = REGNO (p->exp);
 | 
      
         | 2139 |  |  |           endregno = END_HARD_REGNO (p->exp);
 | 
      
         | 2140 |  |  |  
 | 
      
         | 2141 |  |  |           for (i = regno; i < endregno; i++)
 | 
      
         | 2142 |  |  |             if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
 | 
      
         | 2143 |  |  |               {
 | 
      
         | 2144 |  |  |                 remove_from_table (p, hash);
 | 
      
         | 2145 |  |  |                 break;
 | 
      
         | 2146 |  |  |               }
 | 
      
         | 2147 |  |  |         }
 | 
      
         | 2148 |  |  | }
 | 
      
         | 2149 |  |  |  
 | 
      
         | 2150 |  |  | /* Given an expression X of type CONST,
 | 
      
         | 2151 |  |  |    and ELT which is its table entry (or 0 if it
 | 
      
         | 2152 |  |  |    is not in the hash table),
 | 
      
         | 2153 |  |  |    return an alternate expression for X as a register plus integer.
 | 
      
         | 2154 |  |  |    If none can be found, return 0.  */
 | 
      
         | 2155 |  |  |  
 | 
      
         | 2156 |  |  | static rtx
 | 
      
         | 2157 |  |  | use_related_value (rtx x, struct table_elt *elt)
 | 
      
         | 2158 |  |  | {
 | 
      
         | 2159 |  |  |   struct table_elt *relt = 0;
 | 
      
         | 2160 |  |  |   struct table_elt *p, *q;
 | 
      
         | 2161 |  |  |   HOST_WIDE_INT offset;
 | 
      
         | 2162 |  |  |  
 | 
      
         | 2163 |  |  |   /* First, is there anything related known?
 | 
      
         | 2164 |  |  |      If we have a table element, we can tell from that.
 | 
      
         | 2165 |  |  |      Otherwise, must look it up.  */
 | 
      
         | 2166 |  |  |  
 | 
      
         | 2167 |  |  |   if (elt != 0 && elt->related_value != 0)
 | 
      
         | 2168 |  |  |     relt = elt;
 | 
      
         | 2169 |  |  |   else if (elt == 0 && GET_CODE (x) == CONST)
 | 
      
         | 2170 |  |  |     {
 | 
      
         | 2171 |  |  |       rtx subexp = get_related_value (x);
 | 
      
         | 2172 |  |  |       if (subexp != 0)
 | 
      
         | 2173 |  |  |         relt = lookup (subexp,
 | 
      
         | 2174 |  |  |                        SAFE_HASH (subexp, GET_MODE (subexp)),
 | 
      
         | 2175 |  |  |                        GET_MODE (subexp));
 | 
      
         | 2176 |  |  |     }
 | 
      
         | 2177 |  |  |  
 | 
      
         | 2178 |  |  |   if (relt == 0)
 | 
      
         | 2179 |  |  |     return 0;
 | 
      
         | 2180 |  |  |  
 | 
      
         | 2181 |  |  |   /* Search all related table entries for one that has an
 | 
      
         | 2182 |  |  |      equivalent register.  */
 | 
      
         | 2183 |  |  |  
 | 
      
         | 2184 |  |  |   p = relt;
 | 
      
         | 2185 |  |  |   while (1)
 | 
      
         | 2186 |  |  |     {
 | 
      
         | 2187 |  |  |       /* This loop is strange in that it is executed in two different cases.
 | 
      
         | 2188 |  |  |          The first is when X is already in the table.  Then it is searching
 | 
      
         | 2189 |  |  |          the RELATED_VALUE list of X's class (RELT).  The second case is when
 | 
      
         | 2190 |  |  |          X is not in the table.  Then RELT points to a class for the related
 | 
      
         | 2191 |  |  |          value.
 | 
      
         | 2192 |  |  |  
 | 
      
         | 2193 |  |  |          Ensure that, whatever case we are in, that we ignore classes that have
 | 
      
         | 2194 |  |  |          the same value as X.  */
 | 
      
         | 2195 |  |  |  
 | 
      
         | 2196 |  |  |       if (rtx_equal_p (x, p->exp))
 | 
      
         | 2197 |  |  |         q = 0;
 | 
      
         | 2198 |  |  |       else
 | 
      
         | 2199 |  |  |         for (q = p->first_same_value; q; q = q->next_same_value)
 | 
      
         | 2200 |  |  |           if (REG_P (q->exp))
 | 
      
         | 2201 |  |  |             break;
 | 
      
         | 2202 |  |  |  
 | 
      
         | 2203 |  |  |       if (q)
 | 
      
         | 2204 |  |  |         break;
 | 
      
         | 2205 |  |  |  
 | 
      
         | 2206 |  |  |       p = p->related_value;
 | 
      
         | 2207 |  |  |  
 | 
      
         | 2208 |  |  |       /* We went all the way around, so there is nothing to be found.
 | 
      
         | 2209 |  |  |          Alternatively, perhaps RELT was in the table for some other reason
 | 
      
         | 2210 |  |  |          and it has no related values recorded.  */
 | 
      
         | 2211 |  |  |       if (p == relt || p == 0)
 | 
      
         | 2212 |  |  |         break;
 | 
      
         | 2213 |  |  |     }
 | 
      
         | 2214 |  |  |  
 | 
      
         | 2215 |  |  |   if (q == 0)
 | 
      
         | 2216 |  |  |     return 0;
 | 
      
         | 2217 |  |  |  
 | 
      
         | 2218 |  |  |   offset = (get_integer_term (x) - get_integer_term (p->exp));
 | 
      
         | 2219 |  |  |   /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
 | 
      
         | 2220 |  |  |   return plus_constant (q->exp, offset);
 | 
      
         | 2221 |  |  | }
 | 
      
         | 2222 |  |  |  
 | 
      
         | 2223 |  |  |  
 | 
      
         | 2224 |  |  | /* Hash a string.  Just add its bytes up.  */
 | 
      
         | 2225 |  |  | static inline unsigned
 | 
      
         | 2226 |  |  | hash_rtx_string (const char *ps)
 | 
      
         | 2227 |  |  | {
 | 
      
         | 2228 |  |  |   unsigned hash = 0;
 | 
      
         | 2229 |  |  |   const unsigned char *p = (const unsigned char *) ps;
 | 
      
         | 2230 |  |  |  
 | 
      
         | 2231 |  |  |   if (p)
 | 
      
         | 2232 |  |  |     while (*p)
 | 
      
         | 2233 |  |  |       hash += *p++;
 | 
      
         | 2234 |  |  |  
 | 
      
         | 2235 |  |  |   return hash;
 | 
      
         | 2236 |  |  | }
 | 
      
         | 2237 |  |  |  
 | 
      
         | 2238 |  |  | /* Same as hash_rtx, but call CB on each rtx if it is not NULL.
 | 
      
         | 2239 |  |  |    When the callback returns true, we continue with the new rtx.  */
 | 
      
         | 2240 |  |  |  
 | 
      
         | 2241 |  |  | unsigned
 | 
      
         | 2242 |  |  | hash_rtx_cb (const_rtx x, enum machine_mode mode,
 | 
      
         | 2243 |  |  |              int *do_not_record_p, int *hash_arg_in_memory_p,
 | 
      
         | 2244 |  |  |              bool have_reg_qty, hash_rtx_callback_function cb)
 | 
      
         | 2245 |  |  | {
 | 
      
         | 2246 |  |  |   int i, j;
 | 
      
         | 2247 |  |  |   unsigned hash = 0;
 | 
      
         | 2248 |  |  |   enum rtx_code code;
 | 
      
         | 2249 |  |  |   const char *fmt;
 | 
      
         | 2250 |  |  |   enum machine_mode newmode;
 | 
      
         | 2251 |  |  |   rtx newx;
 | 
      
         | 2252 |  |  |  
 | 
      
         | 2253 |  |  |   /* Used to turn recursion into iteration.  We can't rely on GCC's
 | 
      
         | 2254 |  |  |      tail-recursion elimination since we need to keep accumulating values
 | 
      
         | 2255 |  |  |      in HASH.  */
 | 
      
         | 2256 |  |  |  repeat:
 | 
      
         | 2257 |  |  |   if (x == 0)
 | 
      
         | 2258 |  |  |     return hash;
 | 
      
         | 2259 |  |  |  
 | 
      
         | 2260 |  |  |   /* Invoke the callback first.  */
 | 
      
         | 2261 |  |  |   if (cb != NULL
 | 
      
         | 2262 |  |  |       && ((*cb) (x, mode, &newx, &newmode)))
 | 
      
         | 2263 |  |  |     {
 | 
      
         | 2264 |  |  |       hash += hash_rtx_cb (newx, newmode, do_not_record_p,
 | 
      
         | 2265 |  |  |                            hash_arg_in_memory_p, have_reg_qty, cb);
 | 
      
         | 2266 |  |  |       return hash;
 | 
      
         | 2267 |  |  |     }
 | 
      
         | 2268 |  |  |  
 | 
      
         | 2269 |  |  |   code = GET_CODE (x);
 | 
      
         | 2270 |  |  |   switch (code)
 | 
      
         | 2271 |  |  |     {
 | 
      
         | 2272 |  |  |     case REG:
 | 
      
         | 2273 |  |  |       {
 | 
      
         | 2274 |  |  |         unsigned int regno = REGNO (x);
 | 
      
         | 2275 |  |  |  
 | 
      
         | 2276 |  |  |         if (do_not_record_p && !reload_completed)
 | 
      
         | 2277 |  |  |           {
 | 
      
         | 2278 |  |  |             /* On some machines, we can't record any non-fixed hard register,
 | 
      
         | 2279 |  |  |                because extending its life will cause reload problems.  We
 | 
      
         | 2280 |  |  |                consider ap, fp, sp, gp to be fixed for this purpose.
 | 
      
         | 2281 |  |  |  
 | 
      
         | 2282 |  |  |                We also consider CCmode registers to be fixed for this purpose;
 | 
      
         | 2283 |  |  |                failure to do so leads to failure to simplify 0<100 type of
 | 
      
         | 2284 |  |  |                conditionals.
 | 
      
         | 2285 |  |  |  
 | 
      
         | 2286 |  |  |                On all machines, we can't record any global registers.
 | 
      
         | 2287 |  |  |                Nor should we record any register that is in a small
 | 
      
         | 2288 |  |  |                class, as defined by TARGET_CLASS_LIKELY_SPILLED_P.  */
 | 
      
         | 2289 |  |  |             bool record;
 | 
      
         | 2290 |  |  |  
 | 
      
         | 2291 |  |  |             if (regno >= FIRST_PSEUDO_REGISTER)
 | 
      
         | 2292 |  |  |               record = true;
 | 
      
         | 2293 |  |  |             else if (x == frame_pointer_rtx
 | 
      
         | 2294 |  |  |                      || x == hard_frame_pointer_rtx
 | 
      
         | 2295 |  |  |                      || x == arg_pointer_rtx
 | 
      
         | 2296 |  |  |                      || x == stack_pointer_rtx
 | 
      
         | 2297 |  |  |                      || x == pic_offset_table_rtx)
 | 
      
         | 2298 |  |  |               record = true;
 | 
      
         | 2299 |  |  |             else if (global_regs[regno])
 | 
      
         | 2300 |  |  |               record = false;
 | 
      
         | 2301 |  |  |             else if (fixed_regs[regno])
 | 
      
         | 2302 |  |  |               record = true;
 | 
      
         | 2303 |  |  |             else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
 | 
      
         | 2304 |  |  |               record = true;
 | 
      
         | 2305 |  |  |             else if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
 | 
      
         | 2306 |  |  |               record = false;
 | 
      
         | 2307 |  |  |             else if (targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno)))
 | 
      
         | 2308 |  |  |               record = false;
 | 
      
         | 2309 |  |  |             else
 | 
      
         | 2310 |  |  |               record = true;
 | 
      
         | 2311 |  |  |  
 | 
      
         | 2312 |  |  |             if (!record)
 | 
      
         | 2313 |  |  |               {
 | 
      
         | 2314 |  |  |                 *do_not_record_p = 1;
 | 
      
         | 2315 |  |  |                 return 0;
 | 
      
         | 2316 |  |  |               }
 | 
      
         | 2317 |  |  |           }
 | 
      
         | 2318 |  |  |  
 | 
      
         | 2319 |  |  |         hash += ((unsigned int) REG << 7);
 | 
      
         | 2320 |  |  |         hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
 | 
      
         | 2321 |  |  |         return hash;
 | 
      
         | 2322 |  |  |       }
 | 
      
         | 2323 |  |  |  
 | 
      
         | 2324 |  |  |     /* We handle SUBREG of a REG specially because the underlying
 | 
      
         | 2325 |  |  |        reg changes its hash value with every value change; we don't
 | 
      
         | 2326 |  |  |        want to have to forget unrelated subregs when one subreg changes.  */
 | 
      
         | 2327 |  |  |     case SUBREG:
 | 
      
         | 2328 |  |  |       {
 | 
      
         | 2329 |  |  |         if (REG_P (SUBREG_REG (x)))
 | 
      
         | 2330 |  |  |           {
 | 
      
         | 2331 |  |  |             hash += (((unsigned int) SUBREG << 7)
 | 
      
         | 2332 |  |  |                      + REGNO (SUBREG_REG (x))
 | 
      
         | 2333 |  |  |                      + (SUBREG_BYTE (x) / UNITS_PER_WORD));
 | 
      
         | 2334 |  |  |             return hash;
 | 
      
         | 2335 |  |  |           }
 | 
      
         | 2336 |  |  |         break;
 | 
      
         | 2337 |  |  |       }
 | 
      
         | 2338 |  |  |  
 | 
      
         | 2339 |  |  |     case CONST_INT:
 | 
      
         | 2340 |  |  |       hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
 | 
      
         | 2341 |  |  |                + (unsigned int) INTVAL (x));
 | 
      
         | 2342 |  |  |       return hash;
 | 
      
         | 2343 |  |  |  
 | 
      
         | 2344 |  |  |     case CONST_DOUBLE:
 | 
      
         | 2345 |  |  |       /* This is like the general case, except that it only counts
 | 
      
         | 2346 |  |  |          the integers representing the constant.  */
 | 
      
         | 2347 |  |  |       hash += (unsigned int) code + (unsigned int) GET_MODE (x);
 | 
      
         | 2348 |  |  |       if (GET_MODE (x) != VOIDmode)
 | 
      
         | 2349 |  |  |         hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
 | 
      
         | 2350 |  |  |       else
 | 
      
         | 2351 |  |  |         hash += ((unsigned int) CONST_DOUBLE_LOW (x)
 | 
      
         | 2352 |  |  |                  + (unsigned int) CONST_DOUBLE_HIGH (x));
 | 
      
         | 2353 |  |  |       return hash;
 | 
      
         | 2354 |  |  |  
 | 
      
         | 2355 |  |  |     case CONST_FIXED:
 | 
      
         | 2356 |  |  |       hash += (unsigned int) code + (unsigned int) GET_MODE (x);
 | 
      
         | 2357 |  |  |       hash += fixed_hash (CONST_FIXED_VALUE (x));
 | 
      
         | 2358 |  |  |       return hash;
 | 
      
         | 2359 |  |  |  
 | 
      
         | 2360 |  |  |     case CONST_VECTOR:
 | 
      
         | 2361 |  |  |       {
 | 
      
         | 2362 |  |  |         int units;
 | 
      
         | 2363 |  |  |         rtx elt;
 | 
      
         | 2364 |  |  |  
 | 
      
         | 2365 |  |  |         units = CONST_VECTOR_NUNITS (x);
 | 
      
         | 2366 |  |  |  
 | 
      
         | 2367 |  |  |         for (i = 0; i < units; ++i)
 | 
      
         | 2368 |  |  |           {
 | 
      
         | 2369 |  |  |             elt = CONST_VECTOR_ELT (x, i);
 | 
      
         | 2370 |  |  |             hash += hash_rtx_cb (elt, GET_MODE (elt),
 | 
      
         | 2371 |  |  |                                  do_not_record_p, hash_arg_in_memory_p,
 | 
      
         | 2372 |  |  |                                  have_reg_qty, cb);
 | 
      
         | 2373 |  |  |           }
 | 
      
         | 2374 |  |  |  
 | 
      
         | 2375 |  |  |         return hash;
 | 
      
         | 2376 |  |  |       }
 | 
      
         | 2377 |  |  |  
 | 
      
         | 2378 |  |  |       /* Assume there is only one rtx object for any given label.  */
 | 
      
         | 2379 |  |  |     case LABEL_REF:
 | 
      
         | 2380 |  |  |       /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
 | 
      
         | 2381 |  |  |          differences and differences between each stage's debugging dumps.  */
 | 
      
         | 2382 |  |  |          hash += (((unsigned int) LABEL_REF << 7)
 | 
      
         | 2383 |  |  |                   + CODE_LABEL_NUMBER (XEXP (x, 0)));
 | 
      
         | 2384 |  |  |       return hash;
 | 
      
         | 2385 |  |  |  
 | 
      
         | 2386 |  |  |     case SYMBOL_REF:
 | 
      
         | 2387 |  |  |       {
 | 
      
         | 2388 |  |  |         /* Don't hash on the symbol's address to avoid bootstrap differences.
 | 
      
         | 2389 |  |  |            Different hash values may cause expressions to be recorded in
 | 
      
         | 2390 |  |  |            different orders and thus different registers to be used in the
 | 
      
         | 2391 |  |  |            final assembler.  This also avoids differences in the dump files
 | 
      
         | 2392 |  |  |            between various stages.  */
 | 
      
         | 2393 |  |  |         unsigned int h = 0;
 | 
      
         | 2394 |  |  |         const unsigned char *p = (const unsigned char *) XSTR (x, 0);
 | 
      
         | 2395 |  |  |  
 | 
      
         | 2396 |  |  |         while (*p)
 | 
      
         | 2397 |  |  |           h += (h << 7) + *p++; /* ??? revisit */
 | 
      
         | 2398 |  |  |  
 | 
      
         | 2399 |  |  |         hash += ((unsigned int) SYMBOL_REF << 7) + h;
 | 
      
         | 2400 |  |  |         return hash;
 | 
      
         | 2401 |  |  |       }
 | 
      
         | 2402 |  |  |  
 | 
      
         | 2403 |  |  |     case MEM:
 | 
      
         | 2404 |  |  |       /* We don't record if marked volatile or if BLKmode since we don't
 | 
      
         | 2405 |  |  |          know the size of the move.  */
 | 
      
         | 2406 |  |  |       if (do_not_record_p && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
 | 
      
         | 2407 |  |  |         {
 | 
      
         | 2408 |  |  |           *do_not_record_p = 1;
 | 
      
         | 2409 |  |  |           return 0;
 | 
      
         | 2410 |  |  |         }
 | 
      
         | 2411 |  |  |       if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
 | 
      
         | 2412 |  |  |         *hash_arg_in_memory_p = 1;
 | 
      
         | 2413 |  |  |  
 | 
      
         | 2414 |  |  |       /* Now that we have already found this special case,
 | 
      
         | 2415 |  |  |          might as well speed it up as much as possible.  */
 | 
      
         | 2416 |  |  |       hash += (unsigned) MEM;
 | 
      
         | 2417 |  |  |       x = XEXP (x, 0);
 | 
      
         | 2418 |  |  |       goto repeat;
 | 
      
         | 2419 |  |  |  
 | 
      
         | 2420 |  |  |     case USE:
 | 
      
         | 2421 |  |  |       /* A USE that mentions non-volatile memory needs special
 | 
      
         | 2422 |  |  |          handling since the MEM may be BLKmode which normally
 | 
      
         | 2423 |  |  |          prevents an entry from being made.  Pure calls are
 | 
      
         | 2424 |  |  |          marked by a USE which mentions BLKmode memory.
 | 
      
         | 2425 |  |  |          See calls.c:emit_call_1.  */
 | 
      
         | 2426 |  |  |       if (MEM_P (XEXP (x, 0))
 | 
      
         | 2427 |  |  |           && ! MEM_VOLATILE_P (XEXP (x, 0)))
 | 
      
         | 2428 |  |  |         {
 | 
      
         | 2429 |  |  |           hash += (unsigned) USE;
 | 
      
         | 2430 |  |  |           x = XEXP (x, 0);
 | 
      
         | 2431 |  |  |  
 | 
      
         | 2432 |  |  |           if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
 | 
      
         | 2433 |  |  |             *hash_arg_in_memory_p = 1;
 | 
      
         | 2434 |  |  |  
 | 
      
         | 2435 |  |  |           /* Now that we have already found this special case,
 | 
      
         | 2436 |  |  |              might as well speed it up as much as possible.  */
 | 
      
         | 2437 |  |  |           hash += (unsigned) MEM;
 | 
      
         | 2438 |  |  |           x = XEXP (x, 0);
 | 
      
         | 2439 |  |  |           goto repeat;
 | 
      
         | 2440 |  |  |         }
 | 
      
         | 2441 |  |  |       break;
 | 
      
         | 2442 |  |  |  
 | 
      
         | 2443 |  |  |     case PRE_DEC:
 | 
      
         | 2444 |  |  |     case PRE_INC:
 | 
      
         | 2445 |  |  |     case POST_DEC:
 | 
      
         | 2446 |  |  |     case POST_INC:
 | 
      
         | 2447 |  |  |     case PRE_MODIFY:
 | 
      
         | 2448 |  |  |     case POST_MODIFY:
 | 
      
         | 2449 |  |  |     case PC:
 | 
      
         | 2450 |  |  |     case CC0:
 | 
      
         | 2451 |  |  |     case CALL:
 | 
      
         | 2452 |  |  |     case UNSPEC_VOLATILE:
 | 
      
         | 2453 |  |  |       if (do_not_record_p) {
 | 
      
         | 2454 |  |  |         *do_not_record_p = 1;
 | 
      
         | 2455 |  |  |         return 0;
 | 
      
         | 2456 |  |  |       }
 | 
      
         | 2457 |  |  |       else
 | 
      
         | 2458 |  |  |         return hash;
 | 
      
         | 2459 |  |  |       break;
 | 
      
         | 2460 |  |  |  
 | 
      
         | 2461 |  |  |     case ASM_OPERANDS:
 | 
      
         | 2462 |  |  |       if (do_not_record_p && MEM_VOLATILE_P (x))
 | 
      
         | 2463 |  |  |         {
 | 
      
         | 2464 |  |  |           *do_not_record_p = 1;
 | 
      
         | 2465 |  |  |           return 0;
 | 
      
         | 2466 |  |  |         }
 | 
      
         | 2467 |  |  |       else
 | 
      
         | 2468 |  |  |         {
 | 
      
         | 2469 |  |  |           /* We don't want to take the filename and line into account.  */
 | 
      
         | 2470 |  |  |           hash += (unsigned) code + (unsigned) GET_MODE (x)
 | 
      
         | 2471 |  |  |             + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
 | 
      
         | 2472 |  |  |             + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
 | 
      
         | 2473 |  |  |             + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
 | 
      
         | 2474 |  |  |  
 | 
      
         | 2475 |  |  |           if (ASM_OPERANDS_INPUT_LENGTH (x))
 | 
      
         | 2476 |  |  |             {
 | 
      
         | 2477 |  |  |               for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
 | 
      
         | 2478 |  |  |                 {
 | 
      
         | 2479 |  |  |                   hash += (hash_rtx_cb (ASM_OPERANDS_INPUT (x, i),
 | 
      
         | 2480 |  |  |                                         GET_MODE (ASM_OPERANDS_INPUT (x, i)),
 | 
      
         | 2481 |  |  |                                         do_not_record_p, hash_arg_in_memory_p,
 | 
      
         | 2482 |  |  |                                         have_reg_qty, cb)
 | 
      
         | 2483 |  |  |                            + hash_rtx_string
 | 
      
         | 2484 |  |  |                            (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
 | 
      
         | 2485 |  |  |                 }
 | 
      
         | 2486 |  |  |  
 | 
      
         | 2487 |  |  |               hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
 | 
      
         | 2488 |  |  |               x = ASM_OPERANDS_INPUT (x, 0);
 | 
      
         | 2489 |  |  |               mode = GET_MODE (x);
 | 
      
         | 2490 |  |  |               goto repeat;
 | 
      
         | 2491 |  |  |             }
 | 
      
         | 2492 |  |  |  
 | 
      
         | 2493 |  |  |           return hash;
 | 
      
         | 2494 |  |  |         }
 | 
      
         | 2495 |  |  |       break;
 | 
      
         | 2496 |  |  |  
 | 
      
         | 2497 |  |  |     default:
 | 
      
         | 2498 |  |  |       break;
 | 
      
         | 2499 |  |  |     }
 | 
      
         | 2500 |  |  |  
 | 
      
         | 2501 |  |  |   i = GET_RTX_LENGTH (code) - 1;
 | 
      
         | 2502 |  |  |   hash += (unsigned) code + (unsigned) GET_MODE (x);
 | 
      
         | 2503 |  |  |   fmt = GET_RTX_FORMAT (code);
 | 
      
         | 2504 |  |  |   for (; i >= 0; i--)
 | 
      
         | 2505 |  |  |     {
 | 
      
         | 2506 |  |  |       switch (fmt[i])
 | 
      
         | 2507 |  |  |         {
 | 
      
         | 2508 |  |  |         case 'e':
 | 
      
         | 2509 |  |  |           /* If we are about to do the last recursive call
 | 
      
         | 2510 |  |  |              needed at this level, change it into iteration.
 | 
      
         | 2511 |  |  |              This function  is called enough to be worth it.  */
 | 
      
         | 2512 |  |  |           if (i == 0)
 | 
      
         | 2513 |  |  |             {
 | 
      
         | 2514 |  |  |               x = XEXP (x, i);
 | 
      
         | 2515 |  |  |               goto repeat;
 | 
      
         | 2516 |  |  |             }
 | 
      
         | 2517 |  |  |  
 | 
      
         | 2518 |  |  |           hash += hash_rtx_cb (XEXP (x, i), VOIDmode, do_not_record_p,
 | 
      
         | 2519 |  |  |                                hash_arg_in_memory_p,
 | 
      
         | 2520 |  |  |                                have_reg_qty, cb);
 | 
      
         | 2521 |  |  |           break;
 | 
      
         | 2522 |  |  |  
 | 
      
         | 2523 |  |  |         case 'E':
 | 
      
         | 2524 |  |  |           for (j = 0; j < XVECLEN (x, i); j++)
 | 
      
         | 2525 |  |  |             hash += hash_rtx_cb (XVECEXP (x, i, j), VOIDmode, do_not_record_p,
 | 
      
         | 2526 |  |  |                                  hash_arg_in_memory_p,
 | 
      
         | 2527 |  |  |                                  have_reg_qty, cb);
 | 
      
         | 2528 |  |  |           break;
 | 
      
         | 2529 |  |  |  
 | 
      
         | 2530 |  |  |         case 's':
 | 
      
         | 2531 |  |  |           hash += hash_rtx_string (XSTR (x, i));
 | 
      
         | 2532 |  |  |           break;
 | 
      
         | 2533 |  |  |  
 | 
      
         | 2534 |  |  |         case 'i':
 | 
      
         | 2535 |  |  |           hash += (unsigned int) XINT (x, i);
 | 
      
         | 2536 |  |  |           break;
 | 
      
         | 2537 |  |  |  
 | 
      
         | 2538 |  |  |         case '0': case 't':
 | 
      
         | 2539 |  |  |           /* Unused.  */
 | 
      
         | 2540 |  |  |           break;
 | 
      
         | 2541 |  |  |  
 | 
      
         | 2542 |  |  |         default:
 | 
      
         | 2543 |  |  |           gcc_unreachable ();
 | 
      
         | 2544 |  |  |         }
 | 
      
         | 2545 |  |  |     }
 | 
      
         | 2546 |  |  |  
 | 
      
         | 2547 |  |  |   return hash;
 | 
      
         | 2548 |  |  | }
 | 
      
         | 2549 |  |  |  
 | 
      
         | 2550 |  |  | /* Hash an rtx.  We are careful to make sure the value is never negative.
 | 
      
         | 2551 |  |  |    Equivalent registers hash identically.
 | 
      
         | 2552 |  |  |    MODE is used in hashing for CONST_INTs only;
 | 
      
         | 2553 |  |  |    otherwise the mode of X is used.
 | 
      
         | 2554 |  |  |  
 | 
      
         | 2555 |  |  |    Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
 | 
      
         | 2556 |  |  |  
 | 
      
         | 2557 |  |  |    If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
 | 
      
         | 2558 |  |  |    a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
 | 
      
         | 2559 |  |  |  
 | 
      
         | 2560 |  |  |    Note that cse_insn knows that the hash code of a MEM expression
 | 
      
         | 2561 |  |  |    is just (int) MEM plus the hash code of the address.  */
 | 
      
         | 2562 |  |  |  
 | 
      
         | 2563 |  |  | unsigned
 | 
      
         | 2564 |  |  | hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
 | 
      
         | 2565 |  |  |           int *hash_arg_in_memory_p, bool have_reg_qty)
 | 
      
         | 2566 |  |  | {
 | 
      
         | 2567 |  |  |   return hash_rtx_cb (x, mode, do_not_record_p,
 | 
      
         | 2568 |  |  |                       hash_arg_in_memory_p, have_reg_qty, NULL);
 | 
      
         | 2569 |  |  | }
 | 
      
         | 2570 |  |  |  
 | 
      
         | 2571 |  |  | /* Hash an rtx X for cse via hash_rtx.
 | 
      
         | 2572 |  |  |    Stores 1 in do_not_record if any subexpression is volatile.
 | 
      
         | 2573 |  |  |    Stores 1 in hash_arg_in_memory if X contains a mem rtx which
 | 
      
         | 2574 |  |  |    does not have the RTX_UNCHANGING_P bit set.  */
 | 
      
         | 2575 |  |  |  
 | 
      
         | 2576 |  |  | static inline unsigned
 | 
      
         | 2577 |  |  | canon_hash (rtx x, enum machine_mode mode)
 | 
      
         | 2578 |  |  | {
 | 
      
         | 2579 |  |  |   return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
 | 
      
         | 2580 |  |  | }
 | 
      
         | 2581 |  |  |  
 | 
      
         | 2582 |  |  | /* Like canon_hash but with no side effects, i.e. do_not_record
 | 
      
         | 2583 |  |  |    and hash_arg_in_memory are not changed.  */
 | 
      
         | 2584 |  |  |  
 | 
      
         | 2585 |  |  | static inline unsigned
 | 
      
         | 2586 |  |  | safe_hash (rtx x, enum machine_mode mode)
 | 
      
         | 2587 |  |  | {
 | 
      
         | 2588 |  |  |   int dummy_do_not_record;
 | 
      
         | 2589 |  |  |   return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
 | 
      
         | 2590 |  |  | }
 | 
      
         | 2591 |  |  |  
 | 
      
         | 2592 |  |  | /* Return 1 iff X and Y would canonicalize into the same thing,
 | 
      
         | 2593 |  |  |    without actually constructing the canonicalization of either one.
 | 
      
         | 2594 |  |  |    If VALIDATE is nonzero,
 | 
      
         | 2595 |  |  |    we assume X is an expression being processed from the rtl
 | 
      
         | 2596 |  |  |    and Y was found in the hash table.  We check register refs
 | 
      
         | 2597 |  |  |    in Y for being marked as valid.
 | 
      
         | 2598 |  |  |  
 | 
      
         | 2599 |  |  |    If FOR_GCSE is true, we compare X and Y for equivalence for GCSE.  */
 | 
      
         | 2600 |  |  |  
 | 
      
         | 2601 |  |  | int
 | 
      
         | 2602 |  |  | exp_equiv_p (const_rtx x, const_rtx y, int validate, bool for_gcse)
 | 
      
         | 2603 |  |  | {
 | 
      
         | 2604 |  |  |   int i, j;
 | 
      
         | 2605 |  |  |   enum rtx_code code;
 | 
      
         | 2606 |  |  |   const char *fmt;
 | 
      
         | 2607 |  |  |  
 | 
      
         | 2608 |  |  |   /* Note: it is incorrect to assume an expression is equivalent to itself
 | 
      
         | 2609 |  |  |      if VALIDATE is nonzero.  */
 | 
      
         | 2610 |  |  |   if (x == y && !validate)
 | 
      
         | 2611 |  |  |     return 1;
 | 
      
         | 2612 |  |  |  
 | 
      
         | 2613 |  |  |   if (x == 0 || y == 0)
 | 
      
         | 2614 |  |  |     return x == y;
 | 
      
         | 2615 |  |  |  
 | 
      
         | 2616 |  |  |   code = GET_CODE (x);
 | 
      
         | 2617 |  |  |   if (code != GET_CODE (y))
 | 
      
         | 2618 |  |  |     return 0;
 | 
      
         | 2619 |  |  |  
 | 
      
         | 2620 |  |  |   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
 | 
      
         | 2621 |  |  |   if (GET_MODE (x) != GET_MODE (y))
 | 
      
         | 2622 |  |  |     return 0;
 | 
      
         | 2623 |  |  |  
 | 
      
         | 2624 |  |  |   /* MEMs refering to different address space are not equivalent.  */
 | 
      
         | 2625 |  |  |   if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
 | 
      
         | 2626 |  |  |     return 0;
 | 
      
         | 2627 |  |  |  
 | 
      
         | 2628 |  |  |   switch (code)
 | 
      
         | 2629 |  |  |     {
 | 
      
         | 2630 |  |  |     case PC:
 | 
      
         | 2631 |  |  |     case CC0:
 | 
      
         | 2632 |  |  |     case CONST_INT:
 | 
      
         | 2633 |  |  |     case CONST_DOUBLE:
 | 
      
         | 2634 |  |  |     case CONST_FIXED:
 | 
      
         | 2635 |  |  |       return x == y;
 | 
      
         | 2636 |  |  |  
 | 
      
         | 2637 |  |  |     case LABEL_REF:
 | 
      
         | 2638 |  |  |       return XEXP (x, 0) == XEXP (y, 0);
 | 
      
         | 2639 |  |  |  
 | 
      
         | 2640 |  |  |     case SYMBOL_REF:
 | 
      
         | 2641 |  |  |       return XSTR (x, 0) == XSTR (y, 0);
 | 
      
         | 2642 |  |  |  
 | 
      
         | 2643 |  |  |     case REG:
 | 
      
         | 2644 |  |  |       if (for_gcse)
 | 
      
         | 2645 |  |  |         return REGNO (x) == REGNO (y);
 | 
      
         | 2646 |  |  |       else
 | 
      
         | 2647 |  |  |         {
 | 
      
         | 2648 |  |  |           unsigned int regno = REGNO (y);
 | 
      
         | 2649 |  |  |           unsigned int i;
 | 
      
         | 2650 |  |  |           unsigned int endregno = END_REGNO (y);
 | 
      
         | 2651 |  |  |  
 | 
      
         | 2652 |  |  |           /* If the quantities are not the same, the expressions are not
 | 
      
         | 2653 |  |  |              equivalent.  If there are and we are not to validate, they
 | 
      
         | 2654 |  |  |              are equivalent.  Otherwise, ensure all regs are up-to-date.  */
 | 
      
         | 2655 |  |  |  
 | 
      
         | 2656 |  |  |           if (REG_QTY (REGNO (x)) != REG_QTY (regno))
 | 
      
         | 2657 |  |  |             return 0;
 | 
      
         | 2658 |  |  |  
 | 
      
         | 2659 |  |  |           if (! validate)
 | 
      
         | 2660 |  |  |             return 1;
 | 
      
         | 2661 |  |  |  
 | 
      
         | 2662 |  |  |           for (i = regno; i < endregno; i++)
 | 
      
         | 2663 |  |  |             if (REG_IN_TABLE (i) != REG_TICK (i))
 | 
      
         | 2664 |  |  |               return 0;
 | 
      
         | 2665 |  |  |  
 | 
      
         | 2666 |  |  |           return 1;
 | 
      
         | 2667 |  |  |         }
 | 
      
         | 2668 |  |  |  
 | 
      
         | 2669 |  |  |     case MEM:
 | 
      
         | 2670 |  |  |       if (for_gcse)
 | 
      
         | 2671 |  |  |         {
 | 
      
         | 2672 |  |  |           /* A volatile mem should not be considered equivalent to any
 | 
      
         | 2673 |  |  |              other.  */
 | 
      
         | 2674 |  |  |           if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
 | 
      
         | 2675 |  |  |             return 0;
 | 
      
         | 2676 |  |  |  
 | 
      
         | 2677 |  |  |           /* Can't merge two expressions in different alias sets, since we
 | 
      
         | 2678 |  |  |              can decide that the expression is transparent in a block when
 | 
      
         | 2679 |  |  |              it isn't, due to it being set with the different alias set.
 | 
      
         | 2680 |  |  |  
 | 
      
         | 2681 |  |  |              Also, can't merge two expressions with different MEM_ATTRS.
 | 
      
         | 2682 |  |  |              They could e.g. be two different entities allocated into the
 | 
      
         | 2683 |  |  |              same space on the stack (see e.g. PR25130).  In that case, the
 | 
      
         | 2684 |  |  |              MEM addresses can be the same, even though the two MEMs are
 | 
      
         | 2685 |  |  |              absolutely not equivalent.
 | 
      
         | 2686 |  |  |  
 | 
      
         | 2687 |  |  |              But because really all MEM attributes should be the same for
 | 
      
         | 2688 |  |  |              equivalent MEMs, we just use the invariant that MEMs that have
 | 
      
         | 2689 |  |  |              the same attributes share the same mem_attrs data structure.  */
 | 
      
         | 2690 |  |  |           if (MEM_ATTRS (x) != MEM_ATTRS (y))
 | 
      
         | 2691 |  |  |             return 0;
 | 
      
         | 2692 |  |  |         }
 | 
      
         | 2693 |  |  |       break;
 | 
      
         | 2694 |  |  |  
 | 
      
         | 2695 |  |  |     /*  For commutative operations, check both orders.  */
 | 
      
         | 2696 |  |  |     case PLUS:
 | 
      
         | 2697 |  |  |     case MULT:
 | 
      
         | 2698 |  |  |     case AND:
 | 
      
         | 2699 |  |  |     case IOR:
 | 
      
         | 2700 |  |  |     case XOR:
 | 
      
         | 2701 |  |  |     case NE:
 | 
      
         | 2702 |  |  |     case EQ:
 | 
      
         | 2703 |  |  |       return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
 | 
      
         | 2704 |  |  |                              validate, for_gcse)
 | 
      
         | 2705 |  |  |                && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
 | 
      
         | 2706 |  |  |                                 validate, for_gcse))
 | 
      
         | 2707 |  |  |               || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
 | 
      
         | 2708 |  |  |                                 validate, for_gcse)
 | 
      
         | 2709 |  |  |                   && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
 | 
      
         | 2710 |  |  |                                    validate, for_gcse)));
 | 
      
         | 2711 |  |  |  
 | 
      
         | 2712 |  |  |     case ASM_OPERANDS:
 | 
      
         | 2713 |  |  |       /* We don't use the generic code below because we want to
 | 
      
         | 2714 |  |  |          disregard filename and line numbers.  */
 | 
      
         | 2715 |  |  |  
 | 
      
         | 2716 |  |  |       /* A volatile asm isn't equivalent to any other.  */
 | 
      
         | 2717 |  |  |       if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
 | 
      
         | 2718 |  |  |         return 0;
 | 
      
         | 2719 |  |  |  
 | 
      
         | 2720 |  |  |       if (GET_MODE (x) != GET_MODE (y)
 | 
      
         | 2721 |  |  |           || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
 | 
      
         | 2722 |  |  |           || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
 | 
      
         | 2723 |  |  |                      ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
 | 
      
         | 2724 |  |  |           || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
 | 
      
         | 2725 |  |  |           || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
 | 
      
         | 2726 |  |  |         return 0;
 | 
      
         | 2727 |  |  |  
 | 
      
         | 2728 |  |  |       if (ASM_OPERANDS_INPUT_LENGTH (x))
 | 
      
         | 2729 |  |  |         {
 | 
      
         | 2730 |  |  |           for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
 | 
      
         | 2731 |  |  |             if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
 | 
      
         | 2732 |  |  |                                ASM_OPERANDS_INPUT (y, i),
 | 
      
         | 2733 |  |  |                                validate, for_gcse)
 | 
      
         | 2734 |  |  |                 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
 | 
      
         | 2735 |  |  |                            ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
 | 
      
         | 2736 |  |  |               return 0;
 | 
      
         | 2737 |  |  |         }
 | 
      
         | 2738 |  |  |  
 | 
      
         | 2739 |  |  |       return 1;
 | 
      
         | 2740 |  |  |  
 | 
      
         | 2741 |  |  |     default:
 | 
      
         | 2742 |  |  |       break;
 | 
      
         | 2743 |  |  |     }
 | 
      
         | 2744 |  |  |  
 | 
      
         | 2745 |  |  |   /* Compare the elements.  If any pair of corresponding elements
 | 
      
         | 2746 |  |  |      fail to match, return 0 for the whole thing.  */
 | 
      
         | 2747 |  |  |  
 | 
      
         | 2748 |  |  |   fmt = GET_RTX_FORMAT (code);
 | 
      
         | 2749 |  |  |   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
 | 
      
         | 2750 |  |  |     {
 | 
      
         | 2751 |  |  |       switch (fmt[i])
 | 
      
         | 2752 |  |  |         {
 | 
      
         | 2753 |  |  |         case 'e':
 | 
      
         | 2754 |  |  |           if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
 | 
      
         | 2755 |  |  |                               validate, for_gcse))
 | 
      
         | 2756 |  |  |             return 0;
 | 
      
         | 2757 |  |  |           break;
 | 
      
         | 2758 |  |  |  
 | 
      
         | 2759 |  |  |         case 'E':
 | 
      
         | 2760 |  |  |           if (XVECLEN (x, i) != XVECLEN (y, i))
 | 
      
         | 2761 |  |  |             return 0;
 | 
      
         | 2762 |  |  |           for (j = 0; j < XVECLEN (x, i); j++)
 | 
      
         | 2763 |  |  |             if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
 | 
      
         | 2764 |  |  |                                 validate, for_gcse))
 | 
      
         | 2765 |  |  |               return 0;
 | 
      
         | 2766 |  |  |           break;
 | 
      
         | 2767 |  |  |  
 | 
      
         | 2768 |  |  |         case 's':
 | 
      
         | 2769 |  |  |           if (strcmp (XSTR (x, i), XSTR (y, i)))
 | 
      
         | 2770 |  |  |             return 0;
 | 
      
         | 2771 |  |  |           break;
 | 
      
         | 2772 |  |  |  
 | 
      
         | 2773 |  |  |         case 'i':
 | 
      
         | 2774 |  |  |           if (XINT (x, i) != XINT (y, i))
 | 
      
         | 2775 |  |  |             return 0;
 | 
      
         | 2776 |  |  |           break;
 | 
      
         | 2777 |  |  |  
 | 
      
         | 2778 |  |  |         case 'w':
 | 
      
         | 2779 |  |  |           if (XWINT (x, i) != XWINT (y, i))
 | 
      
         | 2780 |  |  |             return 0;
 | 
      
         | 2781 |  |  |           break;
 | 
      
         | 2782 |  |  |  
 | 
      
         | 2783 |  |  |         case '0':
 | 
      
         | 2784 |  |  |         case 't':
 | 
      
         | 2785 |  |  |           break;
 | 
      
         | 2786 |  |  |  
 | 
      
         | 2787 |  |  |         default:
 | 
      
         | 2788 |  |  |           gcc_unreachable ();
 | 
      
         | 2789 |  |  |         }
 | 
      
         | 2790 |  |  |     }
 | 
      
         | 2791 |  |  |  
 | 
      
         | 2792 |  |  |   return 1;
 | 
      
         | 2793 |  |  | }
 | 
      
         | 2794 |  |  |  
 | 
      
         | 2795 |  |  | /* Subroutine of canon_reg.  Pass *XLOC through canon_reg, and validate
 | 
      
         | 2796 |  |  |    the result if necessary.  INSN is as for canon_reg.  */
 | 
      
         | 2797 |  |  |  
 | 
      
         | 2798 |  |  | static void
 | 
      
         | 2799 |  |  | validate_canon_reg (rtx *xloc, rtx insn)
 | 
      
         | 2800 |  |  | {
 | 
      
         | 2801 |  |  |   if (*xloc)
 | 
      
         | 2802 |  |  |     {
 | 
      
         | 2803 |  |  |       rtx new_rtx = canon_reg (*xloc, insn);
 | 
      
         | 2804 |  |  |  
 | 
      
         | 2805 |  |  |       /* If replacing pseudo with hard reg or vice versa, ensure the
 | 
      
         | 2806 |  |  |          insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
 | 
      
         | 2807 |  |  |       gcc_assert (insn && new_rtx);
 | 
      
         | 2808 |  |  |       validate_change (insn, xloc, new_rtx, 1);
 | 
      
         | 2809 |  |  |     }
 | 
      
         | 2810 |  |  | }
 | 
      
         | 2811 |  |  |  
 | 
      
         | 2812 |  |  | /* Canonicalize an expression:
 | 
      
         | 2813 |  |  |    replace each register reference inside it
 | 
      
         | 2814 |  |  |    with the "oldest" equivalent register.
 | 
      
         | 2815 |  |  |  
 | 
      
         | 2816 |  |  |    If INSN is nonzero validate_change is used to ensure that INSN remains valid
 | 
      
         | 2817 |  |  |    after we make our substitution.  The calls are made with IN_GROUP nonzero
 | 
      
         | 2818 |  |  |    so apply_change_group must be called upon the outermost return from this
 | 
      
         | 2819 |  |  |    function (unless INSN is zero).  The result of apply_change_group can
 | 
      
         | 2820 |  |  |    generally be discarded since the changes we are making are optional.  */
 | 
      
         | 2821 |  |  |  
 | 
      
         | 2822 |  |  | static rtx
 | 
      
         | 2823 |  |  | canon_reg (rtx x, rtx insn)
 | 
      
         | 2824 |  |  | {
 | 
      
         | 2825 |  |  |   int i;
 | 
      
         | 2826 |  |  |   enum rtx_code code;
 | 
      
         | 2827 |  |  |   const char *fmt;
 | 
      
         | 2828 |  |  |  
 | 
      
         | 2829 |  |  |   if (x == 0)
 | 
      
         | 2830 |  |  |     return x;
 | 
      
         | 2831 |  |  |  
 | 
      
         | 2832 |  |  |   code = GET_CODE (x);
 | 
      
         | 2833 |  |  |   switch (code)
 | 
      
         | 2834 |  |  |     {
 | 
      
         | 2835 |  |  |     case PC:
 | 
      
         | 2836 |  |  |     case CC0:
 | 
      
         | 2837 |  |  |     case CONST:
 | 
      
         | 2838 |  |  |     case CONST_INT:
 | 
      
         | 2839 |  |  |     case CONST_DOUBLE:
 | 
      
         | 2840 |  |  |     case CONST_FIXED:
 | 
      
         | 2841 |  |  |     case CONST_VECTOR:
 | 
      
         | 2842 |  |  |     case SYMBOL_REF:
 | 
      
         | 2843 |  |  |     case LABEL_REF:
 | 
      
         | 2844 |  |  |     case ADDR_VEC:
 | 
      
         | 2845 |  |  |     case ADDR_DIFF_VEC:
 | 
      
         | 2846 |  |  |       return x;
 | 
      
         | 2847 |  |  |  
 | 
      
         | 2848 |  |  |     case REG:
 | 
      
         | 2849 |  |  |       {
 | 
      
         | 2850 |  |  |         int first;
 | 
      
         | 2851 |  |  |         int q;
 | 
      
         | 2852 |  |  |         struct qty_table_elem *ent;
 | 
      
         | 2853 |  |  |  
 | 
      
         | 2854 |  |  |         /* Never replace a hard reg, because hard regs can appear
 | 
      
         | 2855 |  |  |            in more than one machine mode, and we must preserve the mode
 | 
      
         | 2856 |  |  |            of each occurrence.  Also, some hard regs appear in
 | 
      
         | 2857 |  |  |            MEMs that are shared and mustn't be altered.  Don't try to
 | 
      
         | 2858 |  |  |            replace any reg that maps to a reg of class NO_REGS.  */
 | 
      
         | 2859 |  |  |         if (REGNO (x) < FIRST_PSEUDO_REGISTER
 | 
      
         | 2860 |  |  |             || ! REGNO_QTY_VALID_P (REGNO (x)))
 | 
      
         | 2861 |  |  |           return x;
 | 
      
         | 2862 |  |  |  
 | 
      
         | 2863 |  |  |         q = REG_QTY (REGNO (x));
 | 
      
         | 2864 |  |  |         ent = &qty_table[q];
 | 
      
         | 2865 |  |  |         first = ent->first_reg;
 | 
      
         | 2866 |  |  |         return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
 | 
      
         | 2867 |  |  |                 : REGNO_REG_CLASS (first) == NO_REGS ? x
 | 
      
         | 2868 |  |  |                 : gen_rtx_REG (ent->mode, first));
 | 
      
         | 2869 |  |  |       }
 | 
      
         | 2870 |  |  |  
 | 
      
         | 2871 |  |  |     default:
 | 
      
         | 2872 |  |  |       break;
 | 
      
         | 2873 |  |  |     }
 | 
      
         | 2874 |  |  |  
 | 
      
         | 2875 |  |  |   fmt = GET_RTX_FORMAT (code);
 | 
      
         | 2876 |  |  |   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
 | 
      
         | 2877 |  |  |     {
 | 
      
         | 2878 |  |  |       int j;
 | 
      
         | 2879 |  |  |  
 | 
      
         | 2880 |  |  |       if (fmt[i] == 'e')
 | 
      
         | 2881 |  |  |         validate_canon_reg (&XEXP (x, i), insn);
 | 
      
         | 2882 |  |  |       else if (fmt[i] == 'E')
 | 
      
         | 2883 |  |  |         for (j = 0; j < XVECLEN (x, i); j++)
 | 
      
         | 2884 |  |  |           validate_canon_reg (&XVECEXP (x, i, j), insn);
 | 
      
         | 2885 |  |  |     }
 | 
      
         | 2886 |  |  |  
 | 
      
         | 2887 |  |  |   return x;
 | 
      
         | 2888 |  |  | }
 | 
      
         | 2889 |  |  |  
 | 
      
         | 2890 |  |  | /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
 | 
      
         | 2891 |  |  |    operation (EQ, NE, GT, etc.), follow it back through the hash table and
 | 
      
         | 2892 |  |  |    what values are being compared.
 | 
      
         | 2893 |  |  |  
 | 
      
         | 2894 |  |  |    *PARG1 and *PARG2 are updated to contain the rtx representing the values
 | 
      
         | 2895 |  |  |    actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
 | 
      
         | 2896 |  |  |    was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
 | 
      
         | 2897 |  |  |    compared to produce cc0.
 | 
      
         | 2898 |  |  |  
 | 
      
         | 2899 |  |  |    The return value is the comparison operator and is either the code of
 | 
      
         | 2900 |  |  |    A or the code corresponding to the inverse of the comparison.  */
 | 
      
         | 2901 |  |  |  
 | 
      
         | 2902 |  |  | static enum rtx_code
 | 
      
         | 2903 |  |  | find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
 | 
      
         | 2904 |  |  |                       enum machine_mode *pmode1, enum machine_mode *pmode2)
 | 
      
         | 2905 |  |  | {
 | 
      
         | 2906 |  |  |   rtx arg1, arg2;
 | 
      
         | 2907 |  |  |  
 | 
      
         | 2908 |  |  |   arg1 = *parg1, arg2 = *parg2;
 | 
      
         | 2909 |  |  |  
 | 
      
         | 2910 |  |  |   /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
 | 
      
         | 2911 |  |  |  
 | 
      
         | 2912 |  |  |   while (arg2 == CONST0_RTX (GET_MODE (arg1)))
 | 
      
         | 2913 |  |  |     {
 | 
      
         | 2914 |  |  |       /* Set nonzero when we find something of interest.  */
 | 
      
         | 2915 |  |  |       rtx x = 0;
 | 
      
         | 2916 |  |  |       int reverse_code = 0;
 | 
      
         | 2917 |  |  |       struct table_elt *p = 0;
 | 
      
         | 2918 |  |  |  
 | 
      
         | 2919 |  |  |       /* If arg1 is a COMPARE, extract the comparison arguments from it.
 | 
      
         | 2920 |  |  |          On machines with CC0, this is the only case that can occur, since
 | 
      
         | 2921 |  |  |          fold_rtx will return the COMPARE or item being compared with zero
 | 
      
         | 2922 |  |  |          when given CC0.  */
 | 
      
         | 2923 |  |  |  
 | 
      
         | 2924 |  |  |       if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
 | 
      
         | 2925 |  |  |         x = arg1;
 | 
      
         | 2926 |  |  |  
 | 
      
         | 2927 |  |  |       /* If ARG1 is a comparison operator and CODE is testing for
 | 
      
         | 2928 |  |  |          STORE_FLAG_VALUE, get the inner arguments.  */
 | 
      
         | 2929 |  |  |  
 | 
      
         | 2930 |  |  |       else if (COMPARISON_P (arg1))
 | 
      
         | 2931 |  |  |         {
 | 
      
         | 2932 |  |  | #ifdef FLOAT_STORE_FLAG_VALUE
 | 
      
         | 2933 |  |  |           REAL_VALUE_TYPE fsfv;
 | 
      
         | 2934 |  |  | #endif
 | 
      
         | 2935 |  |  |  
 | 
      
         | 2936 |  |  |           if (code == NE
 | 
      
         | 2937 |  |  |               || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
 | 
      
         | 2938 |  |  |                   && code == LT && STORE_FLAG_VALUE == -1)
 | 
      
         | 2939 |  |  | #ifdef FLOAT_STORE_FLAG_VALUE
 | 
      
         | 2940 |  |  |               || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
 | 
      
         | 2941 |  |  |                   && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
 | 
      
         | 2942 |  |  |                       REAL_VALUE_NEGATIVE (fsfv)))
 | 
      
         | 2943 |  |  | #endif
 | 
      
         | 2944 |  |  |               )
 | 
      
         | 2945 |  |  |             x = arg1;
 | 
      
         | 2946 |  |  |           else if (code == EQ
 | 
      
         | 2947 |  |  |                    || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
 | 
      
         | 2948 |  |  |                        && code == GE && STORE_FLAG_VALUE == -1)
 | 
      
         | 2949 |  |  | #ifdef FLOAT_STORE_FLAG_VALUE
 | 
      
         | 2950 |  |  |                    || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
 | 
      
         | 2951 |  |  |                        && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
 | 
      
         | 2952 |  |  |                            REAL_VALUE_NEGATIVE (fsfv)))
 | 
      
         | 2953 |  |  | #endif
 | 
      
         | 2954 |  |  |                    )
 | 
      
         | 2955 |  |  |             x = arg1, reverse_code = 1;
 | 
      
         | 2956 |  |  |         }
 | 
      
         | 2957 |  |  |  
 | 
      
         | 2958 |  |  |       /* ??? We could also check for
 | 
      
         | 2959 |  |  |  
 | 
      
         | 2960 |  |  |          (ne (and (eq (...) (const_int 1))) (const_int 0))
 | 
      
         | 2961 |  |  |  
 | 
      
         | 2962 |  |  |          and related forms, but let's wait until we see them occurring.  */
 | 
      
         | 2963 |  |  |  
 | 
      
         | 2964 |  |  |       if (x == 0)
 | 
      
         | 2965 |  |  |         /* Look up ARG1 in the hash table and see if it has an equivalence
 | 
      
         | 2966 |  |  |            that lets us see what is being compared.  */
 | 
      
         | 2967 |  |  |         p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
 | 
      
         | 2968 |  |  |       if (p)
 | 
      
         | 2969 |  |  |         {
 | 
      
         | 2970 |  |  |           p = p->first_same_value;
 | 
      
         | 2971 |  |  |  
 | 
      
         | 2972 |  |  |           /* If what we compare is already known to be constant, that is as
 | 
      
         | 2973 |  |  |              good as it gets.
 | 
      
         | 2974 |  |  |              We need to break the loop in this case, because otherwise we
 | 
      
         | 2975 |  |  |              can have an infinite loop when looking at a reg that is known
 | 
      
         | 2976 |  |  |              to be a constant which is the same as a comparison of a reg
 | 
      
         | 2977 |  |  |              against zero which appears later in the insn stream, which in
 | 
      
         | 2978 |  |  |              turn is constant and the same as the comparison of the first reg
 | 
      
         | 2979 |  |  |              against zero...  */
 | 
      
         | 2980 |  |  |           if (p->is_const)
 | 
      
         | 2981 |  |  |             break;
 | 
      
         | 2982 |  |  |         }
 | 
      
         | 2983 |  |  |  
 | 
      
         | 2984 |  |  |       for (; p; p = p->next_same_value)
 | 
      
         | 2985 |  |  |         {
 | 
      
         | 2986 |  |  |           enum machine_mode inner_mode = GET_MODE (p->exp);
 | 
      
         | 2987 |  |  | #ifdef FLOAT_STORE_FLAG_VALUE
 | 
      
         | 2988 |  |  |           REAL_VALUE_TYPE fsfv;
 | 
      
         | 2989 |  |  | #endif
 | 
      
         | 2990 |  |  |  
 | 
      
         | 2991 |  |  |           /* If the entry isn't valid, skip it.  */
 | 
      
         | 2992 |  |  |           if (! exp_equiv_p (p->exp, p->exp, 1, false))
 | 
      
         | 2993 |  |  |             continue;
 | 
      
         | 2994 |  |  |  
 | 
      
         | 2995 |  |  |           /* If it's the same comparison we're already looking at, skip it.  */
 | 
      
         | 2996 |  |  |           if (COMPARISON_P (p->exp)
 | 
      
         | 2997 |  |  |               && XEXP (p->exp, 0) == arg1
 | 
      
         | 2998 |  |  |               && XEXP (p->exp, 1) == arg2)
 | 
      
         | 2999 |  |  |             continue;
 | 
      
         | 3000 |  |  |  
 | 
      
         | 3001 |  |  |           if (GET_CODE (p->exp) == COMPARE
 | 
      
         | 3002 |  |  |               /* Another possibility is that this machine has a compare insn
 | 
      
         | 3003 |  |  |                  that includes the comparison code.  In that case, ARG1 would
 | 
      
         | 3004 |  |  |                  be equivalent to a comparison operation that would set ARG1 to
 | 
      
         | 3005 |  |  |                  either STORE_FLAG_VALUE or zero.  If this is an NE operation,
 | 
      
         | 3006 |  |  |                  ORIG_CODE is the actual comparison being done; if it is an EQ,
 | 
      
         | 3007 |  |  |                  we must reverse ORIG_CODE.  On machine with a negative value
 | 
      
         | 3008 |  |  |                  for STORE_FLAG_VALUE, also look at LT and GE operations.  */
 | 
      
         | 3009 |  |  |               || ((code == NE
 | 
      
         | 3010 |  |  |                    || (code == LT
 | 
      
         | 3011 |  |  |                        && val_signbit_known_set_p (inner_mode,
 | 
      
         | 3012 |  |  |                                                    STORE_FLAG_VALUE))
 | 
      
         | 3013 |  |  | #ifdef FLOAT_STORE_FLAG_VALUE
 | 
      
         | 3014 |  |  |                    || (code == LT
 | 
      
         | 3015 |  |  |                        && SCALAR_FLOAT_MODE_P (inner_mode)
 | 
      
         | 3016 |  |  |                        && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
 | 
      
         | 3017 |  |  |                            REAL_VALUE_NEGATIVE (fsfv)))
 | 
      
         | 3018 |  |  | #endif
 | 
      
         | 3019 |  |  |                    )
 | 
      
         | 3020 |  |  |                   && COMPARISON_P (p->exp)))
 | 
      
         | 3021 |  |  |             {
 | 
      
         | 3022 |  |  |               x = p->exp;
 | 
      
         | 3023 |  |  |               break;
 | 
      
         | 3024 |  |  |             }
 | 
      
         | 3025 |  |  |           else if ((code == EQ
 | 
      
         | 3026 |  |  |                     || (code == GE
 | 
      
         | 3027 |  |  |                         && val_signbit_known_set_p (inner_mode,
 | 
      
         | 3028 |  |  |                                                     STORE_FLAG_VALUE))
 | 
      
         | 3029 |  |  | #ifdef FLOAT_STORE_FLAG_VALUE
 | 
      
         | 3030 |  |  |                     || (code == GE
 | 
      
         | 3031 |  |  |                         && SCALAR_FLOAT_MODE_P (inner_mode)
 | 
      
         | 3032 |  |  |                         && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
 | 
      
         | 3033 |  |  |                             REAL_VALUE_NEGATIVE (fsfv)))
 | 
      
         | 3034 |  |  | #endif
 | 
      
         | 3035 |  |  |                     )
 | 
      
         | 3036 |  |  |                    && COMPARISON_P (p->exp))
 | 
      
         | 3037 |  |  |             {
 | 
      
         | 3038 |  |  |               reverse_code = 1;
 | 
      
         | 3039 |  |  |               x = p->exp;
 | 
      
         | 3040 |  |  |               break;
 | 
      
         | 3041 |  |  |             }
 | 
      
         | 3042 |  |  |  
 | 
      
         | 3043 |  |  |           /* If this non-trapping address, e.g. fp + constant, the
 | 
      
         | 3044 |  |  |              equivalent is a better operand since it may let us predict
 | 
      
         | 3045 |  |  |              the value of the comparison.  */
 | 
      
         | 3046 |  |  |           else if (!rtx_addr_can_trap_p (p->exp))
 | 
      
         | 3047 |  |  |             {
 | 
      
         | 3048 |  |  |               arg1 = p->exp;
 | 
      
         | 3049 |  |  |               continue;
 | 
      
         | 3050 |  |  |             }
 | 
      
         | 3051 |  |  |         }
 | 
      
         | 3052 |  |  |  
 | 
      
         | 3053 |  |  |       /* If we didn't find a useful equivalence for ARG1, we are done.
 | 
      
         | 3054 |  |  |          Otherwise, set up for the next iteration.  */
 | 
      
         | 3055 |  |  |       if (x == 0)
 | 
      
         | 3056 |  |  |         break;
 | 
      
         | 3057 |  |  |  
 | 
      
         | 3058 |  |  |       /* If we need to reverse the comparison, make sure that that is
 | 
      
         | 3059 |  |  |          possible -- we can't necessarily infer the value of GE from LT
 | 
      
         | 3060 |  |  |          with floating-point operands.  */
 | 
      
         | 3061 |  |  |       if (reverse_code)
 | 
      
         | 3062 |  |  |         {
 | 
      
         | 3063 |  |  |           enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
 | 
      
         | 3064 |  |  |           if (reversed == UNKNOWN)
 | 
      
         | 3065 |  |  |             break;
 | 
      
         | 3066 |  |  |           else
 | 
      
         | 3067 |  |  |             code = reversed;
 | 
      
         | 3068 |  |  |         }
 | 
      
         | 3069 |  |  |       else if (COMPARISON_P (x))
 | 
      
         | 3070 |  |  |         code = GET_CODE (x);
 | 
      
         | 3071 |  |  |       arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
 | 
      
         | 3072 |  |  |     }
 | 
      
         | 3073 |  |  |  
 | 
      
         | 3074 |  |  |   /* Return our results.  Return the modes from before fold_rtx
 | 
      
         | 3075 |  |  |      because fold_rtx might produce const_int, and then it's too late.  */
 | 
      
         | 3076 |  |  |   *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
 | 
      
         | 3077 |  |  |   *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
 | 
      
         | 3078 |  |  |  
 | 
      
         | 3079 |  |  |   return code;
 | 
      
         | 3080 |  |  | }
 | 
      
         | 3081 |  |  |  
 | 
      
         | 3082 |  |  | /* If X is a nontrivial arithmetic operation on an argument for which
 | 
      
         | 3083 |  |  |    a constant value can be determined, return the result of operating
 | 
      
         | 3084 |  |  |    on that value, as a constant.  Otherwise, return X, possibly with
 | 
      
         | 3085 |  |  |    one or more operands changed to a forward-propagated constant.
 | 
      
         | 3086 |  |  |  
 | 
      
         | 3087 |  |  |    If X is a register whose contents are known, we do NOT return
 | 
      
         | 3088 |  |  |    those contents here; equiv_constant is called to perform that task.
 | 
      
         | 3089 |  |  |    For SUBREGs and MEMs, we do that both here and in equiv_constant.
 | 
      
         | 3090 |  |  |  
 | 
      
         | 3091 |  |  |    INSN is the insn that we may be modifying.  If it is 0, make a copy
 | 
      
         | 3092 |  |  |    of X before modifying it.  */
 | 
      
         | 3093 |  |  |  
 | 
      
         | 3094 |  |  | static rtx
 | 
      
         | 3095 |  |  | fold_rtx (rtx x, rtx insn)
 | 
      
         | 3096 |  |  | {
 | 
      
         | 3097 |  |  |   enum rtx_code code;
 | 
      
         | 3098 |  |  |   enum machine_mode mode;
 | 
      
         | 3099 |  |  |   const char *fmt;
 | 
      
         | 3100 |  |  |   int i;
 | 
      
         | 3101 |  |  |   rtx new_rtx = 0;
 | 
      
         | 3102 |  |  |   int changed = 0;
 | 
      
         | 3103 |  |  |  
 | 
      
         | 3104 |  |  |   /* Operands of X.  */
 | 
      
         | 3105 |  |  |   rtx folded_arg0;
 | 
      
         | 3106 |  |  |   rtx folded_arg1;
 | 
      
         | 3107 |  |  |  
 | 
      
         | 3108 |  |  |   /* Constant equivalents of first three operands of X;
 | 
      
         | 3109 |  |  |  
 | 
      
         | 3110 |  |  |   rtx const_arg0;
 | 
      
         | 3111 |  |  |   rtx const_arg1;
 | 
      
         | 3112 |  |  |   rtx const_arg2;
 | 
      
         | 3113 |  |  |  
 | 
      
         | 3114 |  |  |   /* The mode of the first operand of X.  We need this for sign and zero
 | 
      
         | 3115 |  |  |      extends.  */
 | 
      
         | 3116 |  |  |   enum machine_mode mode_arg0;
 | 
      
         | 3117 |  |  |  
 | 
      
         | 3118 |  |  |   if (x == 0)
 | 
      
         | 3119 |  |  |     return x;
 | 
      
         | 3120 |  |  |  
 | 
      
         | 3121 |  |  |   /* Try to perform some initial simplifications on X.  */
 | 
      
         | 3122 |  |  |   code = GET_CODE (x);
 | 
      
         | 3123 |  |  |   switch (code)
 | 
      
         | 3124 |  |  |     {
 | 
      
         | 3125 |  |  |     case MEM:
 | 
      
         | 3126 |  |  |     case SUBREG:
 | 
      
         | 3127 |  |  |       if ((new_rtx = equiv_constant (x)) != NULL_RTX)
 | 
      
         | 3128 |  |  |         return new_rtx;
 | 
      
         | 3129 |  |  |       return x;
 | 
      
         | 3130 |  |  |  
 | 
      
         | 3131 |  |  |     case CONST:
 | 
      
         | 3132 |  |  |     case CONST_INT:
 | 
      
         | 3133 |  |  |     case CONST_DOUBLE:
 | 
      
         | 3134 |  |  |     case CONST_FIXED:
 | 
      
         | 3135 |  |  |     case CONST_VECTOR:
 | 
      
         | 3136 |  |  |     case SYMBOL_REF:
 | 
      
         | 3137 |  |  |     case LABEL_REF:
 | 
      
         | 3138 |  |  |     case REG:
 | 
      
         | 3139 |  |  |     case PC:
 | 
      
         | 3140 |  |  |       /* No use simplifying an EXPR_LIST
 | 
      
         | 3141 |  |  |          since they are used only for lists of args
 | 
      
         | 3142 |  |  |          in a function call's REG_EQUAL note.  */
 | 
      
         | 3143 |  |  |     case EXPR_LIST:
 | 
      
         | 3144 |  |  |       return x;
 | 
      
         | 3145 |  |  |  
 | 
      
         | 3146 |  |  | #ifdef HAVE_cc0
 | 
      
         | 3147 |  |  |     case CC0:
 | 
      
         | 3148 |  |  |       return prev_insn_cc0;
 | 
      
         | 3149 |  |  | #endif
 | 
      
         | 3150 |  |  |  
 | 
      
         | 3151 |  |  |     case ASM_OPERANDS:
 | 
      
         | 3152 |  |  |       if (insn)
 | 
      
         | 3153 |  |  |         {
 | 
      
         | 3154 |  |  |           for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
 | 
      
         | 3155 |  |  |             validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
 | 
      
         | 3156 |  |  |                              fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
 | 
      
         | 3157 |  |  |         }
 | 
      
         | 3158 |  |  |       return x;
 | 
      
         | 3159 |  |  |  
 | 
      
         | 3160 |  |  | #ifdef NO_FUNCTION_CSE
 | 
      
         | 3161 |  |  |     case CALL:
 | 
      
         | 3162 |  |  |       if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
 | 
      
         | 3163 |  |  |         return x;
 | 
      
         | 3164 |  |  |       break;
 | 
      
         | 3165 |  |  | #endif
 | 
      
         | 3166 |  |  |  
 | 
      
         | 3167 |  |  |     /* Anything else goes through the loop below.  */
 | 
      
         | 3168 |  |  |     default:
 | 
      
         | 3169 |  |  |       break;
 | 
      
         | 3170 |  |  |     }
 | 
      
         | 3171 |  |  |  
 | 
      
         | 3172 |  |  |   mode = GET_MODE (x);
 | 
      
         | 3173 |  |  |   const_arg0 = 0;
 | 
      
         | 3174 |  |  |   const_arg1 = 0;
 | 
      
         | 3175 |  |  |   const_arg2 = 0;
 | 
      
         | 3176 |  |  |   mode_arg0 = VOIDmode;
 | 
      
         | 3177 |  |  |  
 | 
      
         | 3178 |  |  |   /* Try folding our operands.
 | 
      
         | 3179 |  |  |      Then see which ones have constant values known.  */
 | 
      
         | 3180 |  |  |  
 | 
      
         | 3181 |  |  |   fmt = GET_RTX_FORMAT (code);
 | 
      
         | 3182 |  |  |   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
 | 
      
         | 3183 |  |  |     if (fmt[i] == 'e')
 | 
      
         | 3184 |  |  |       {
 | 
      
         | 3185 |  |  |         rtx folded_arg = XEXP (x, i), const_arg;
 | 
      
         | 3186 |  |  |         enum machine_mode mode_arg = GET_MODE (folded_arg);
 | 
      
         | 3187 |  |  |  
 | 
      
         | 3188 |  |  |         switch (GET_CODE (folded_arg))
 | 
      
         | 3189 |  |  |           {
 | 
      
         | 3190 |  |  |           case MEM:
 | 
      
         | 3191 |  |  |           case REG:
 | 
      
         | 3192 |  |  |           case SUBREG:
 | 
      
         | 3193 |  |  |             const_arg = equiv_constant (folded_arg);
 | 
      
         | 3194 |  |  |             break;
 | 
      
         | 3195 |  |  |  
 | 
      
         | 3196 |  |  |           case CONST:
 | 
      
         | 3197 |  |  |           case CONST_INT:
 | 
      
         | 3198 |  |  |           case SYMBOL_REF:
 | 
      
         | 3199 |  |  |           case LABEL_REF:
 | 
      
         | 3200 |  |  |           case CONST_DOUBLE:
 | 
      
         | 3201 |  |  |           case CONST_FIXED:
 | 
      
         | 3202 |  |  |           case CONST_VECTOR:
 | 
      
         | 3203 |  |  |             const_arg = folded_arg;
 | 
      
         | 3204 |  |  |             break;
 | 
      
         | 3205 |  |  |  
 | 
      
         | 3206 |  |  | #ifdef HAVE_cc0
 | 
      
         | 3207 |  |  |           case CC0:
 | 
      
         | 3208 |  |  |             folded_arg = prev_insn_cc0;
 | 
      
         | 3209 |  |  |             mode_arg = prev_insn_cc0_mode;
 | 
      
         | 3210 |  |  |             const_arg = equiv_constant (folded_arg);
 | 
      
         | 3211 |  |  |             break;
 | 
      
         | 3212 |  |  | #endif
 | 
      
         | 3213 |  |  |  
 | 
      
         | 3214 |  |  |           default:
 | 
      
         | 3215 |  |  |             folded_arg = fold_rtx (folded_arg, insn);
 | 
      
         | 3216 |  |  |             const_arg = equiv_constant (folded_arg);
 | 
      
         | 3217 |  |  |             break;
 | 
      
         | 3218 |  |  |           }
 | 
      
         | 3219 |  |  |  
 | 
      
         | 3220 |  |  |         /* For the first three operands, see if the operand
 | 
      
         | 3221 |  |  |            is constant or equivalent to a constant.  */
 | 
      
         | 3222 |  |  |         switch (i)
 | 
      
         | 3223 |  |  |           {
 | 
      
         | 3224 |  |  |           case 0:
 | 
      
         | 3225 |  |  |             folded_arg0 = folded_arg;
 | 
      
         | 3226 |  |  |             const_arg0 = const_arg;
 | 
      
         | 3227 |  |  |             mode_arg0 = mode_arg;
 | 
      
         | 3228 |  |  |             break;
 | 
      
         | 3229 |  |  |           case 1:
 | 
      
         | 3230 |  |  |             folded_arg1 = folded_arg;
 | 
      
         | 3231 |  |  |             const_arg1 = const_arg;
 | 
      
         | 3232 |  |  |             break;
 | 
      
         | 3233 |  |  |           case 2:
 | 
      
         | 3234 |  |  |             const_arg2 = const_arg;
 | 
      
         | 3235 |  |  |             break;
 | 
      
         | 3236 |  |  |           }
 | 
      
         | 3237 |  |  |  
 | 
      
         | 3238 |  |  |         /* Pick the least expensive of the argument and an equivalent constant
 | 
      
         | 3239 |  |  |            argument.  */
 | 
      
         | 3240 |  |  |         if (const_arg != 0
 | 
      
         | 3241 |  |  |             && const_arg != folded_arg
 | 
      
         | 3242 |  |  |             && COST_IN (const_arg, code, i) <= COST_IN (folded_arg, code, i)
 | 
      
         | 3243 |  |  |  
 | 
      
         | 3244 |  |  |             /* It's not safe to substitute the operand of a conversion
 | 
      
         | 3245 |  |  |                operator with a constant, as the conversion's identity
 | 
      
         | 3246 |  |  |                depends upon the mode of its operand.  This optimization
 | 
      
         | 3247 |  |  |                is handled by the call to simplify_unary_operation.  */
 | 
      
         | 3248 |  |  |             && (GET_RTX_CLASS (code) != RTX_UNARY
 | 
      
         | 3249 |  |  |                 || GET_MODE (const_arg) == mode_arg0
 | 
      
         | 3250 |  |  |                 || (code != ZERO_EXTEND
 | 
      
         | 3251 |  |  |                     && code != SIGN_EXTEND
 | 
      
         | 3252 |  |  |                     && code != TRUNCATE
 | 
      
         | 3253 |  |  |                     && code != FLOAT_TRUNCATE
 | 
      
         | 3254 |  |  |                     && code != FLOAT_EXTEND
 | 
      
         | 3255 |  |  |                     && code != FLOAT
 | 
      
         | 3256 |  |  |                     && code != FIX
 | 
      
         | 3257 |  |  |                     && code != UNSIGNED_FLOAT
 | 
      
         | 3258 |  |  |                     && code != UNSIGNED_FIX)))
 | 
      
         | 3259 |  |  |           folded_arg = const_arg;
 | 
      
         | 3260 |  |  |  
 | 
      
         | 3261 |  |  |         if (folded_arg == XEXP (x, i))
 | 
      
         | 3262 |  |  |           continue;
 | 
      
         | 3263 |  |  |  
 | 
      
         | 3264 |  |  |         if (insn == NULL_RTX && !changed)
 | 
      
         | 3265 |  |  |           x = copy_rtx (x);
 | 
      
         | 3266 |  |  |         changed = 1;
 | 
      
         | 3267 |  |  |         validate_unshare_change (insn, &XEXP (x, i), folded_arg, 1);
 | 
      
         | 3268 |  |  |       }
 | 
      
         | 3269 |  |  |  
 | 
      
         | 3270 |  |  |   if (changed)
 | 
      
         | 3271 |  |  |     {
 | 
      
         | 3272 |  |  |       /* Canonicalize X if necessary, and keep const_argN and folded_argN
 | 
      
         | 3273 |  |  |          consistent with the order in X.  */
 | 
      
         | 3274 |  |  |       if (canonicalize_change_group (insn, x))
 | 
      
         | 3275 |  |  |         {
 | 
      
         | 3276 |  |  |           rtx tem;
 | 
      
         | 3277 |  |  |           tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
 | 
      
         | 3278 |  |  |           tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
 | 
      
         | 3279 |  |  |         }
 | 
      
         | 3280 |  |  |  
 | 
      
         | 3281 |  |  |       apply_change_group ();
 | 
      
         | 3282 |  |  |     }
 | 
      
         | 3283 |  |  |  
 | 
      
         | 3284 |  |  |   /* If X is an arithmetic operation, see if we can simplify it.  */
 | 
      
         | 3285 |  |  |  
 | 
      
         | 3286 |  |  |   switch (GET_RTX_CLASS (code))
 | 
      
         | 3287 |  |  |     {
 | 
      
         | 3288 |  |  |     case RTX_UNARY:
 | 
      
         | 3289 |  |  |       {
 | 
      
         | 3290 |  |  |         /* We can't simplify extension ops unless we know the
 | 
      
         | 3291 |  |  |            original mode.  */
 | 
      
         | 3292 |  |  |         if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
 | 
      
         | 3293 |  |  |             && mode_arg0 == VOIDmode)
 | 
      
         | 3294 |  |  |           break;
 | 
      
         | 3295 |  |  |  
 | 
      
         | 3296 |  |  |         new_rtx = simplify_unary_operation (code, mode,
 | 
      
         | 3297 |  |  |                                         const_arg0 ? const_arg0 : folded_arg0,
 | 
      
         | 3298 |  |  |                                         mode_arg0);
 | 
      
         | 3299 |  |  |       }
 | 
      
         | 3300 |  |  |       break;
 | 
      
         | 3301 |  |  |  
 | 
      
         | 3302 |  |  |     case RTX_COMPARE:
 | 
      
         | 3303 |  |  |     case RTX_COMM_COMPARE:
 | 
      
         | 3304 |  |  |       /* See what items are actually being compared and set FOLDED_ARG[01]
 | 
      
         | 3305 |  |  |          to those values and CODE to the actual comparison code.  If any are
 | 
      
         | 3306 |  |  |          constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
 | 
      
         | 3307 |  |  |          do anything if both operands are already known to be constant.  */
 | 
      
         | 3308 |  |  |  
 | 
      
         | 3309 |  |  |       /* ??? Vector mode comparisons are not supported yet.  */
 | 
      
         | 3310 |  |  |       if (VECTOR_MODE_P (mode))
 | 
      
         | 3311 |  |  |         break;
 | 
      
         | 3312 |  |  |  
 | 
      
         | 3313 |  |  |       if (const_arg0 == 0 || const_arg1 == 0)
 | 
      
         | 3314 |  |  |         {
 | 
      
         | 3315 |  |  |           struct table_elt *p0, *p1;
 | 
      
         | 3316 |  |  |           rtx true_rtx, false_rtx;
 | 
      
         | 3317 |  |  |           enum machine_mode mode_arg1;
 | 
      
         | 3318 |  |  |  
 | 
      
         | 3319 |  |  |           if (SCALAR_FLOAT_MODE_P (mode))
 | 
      
         | 3320 |  |  |             {
 | 
      
         | 3321 |  |  | #ifdef FLOAT_STORE_FLAG_VALUE
 | 
      
         | 3322 |  |  |               true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
 | 
      
         | 3323 |  |  |                           (FLOAT_STORE_FLAG_VALUE (mode), mode));
 | 
      
         | 3324 |  |  | #else
 | 
      
         | 3325 |  |  |               true_rtx = NULL_RTX;
 | 
      
         | 3326 |  |  | #endif
 | 
      
         | 3327 |  |  |               false_rtx = CONST0_RTX (mode);
 | 
      
         | 3328 |  |  |             }
 | 
      
         | 3329 |  |  |           else
 | 
      
         | 3330 |  |  |             {
 | 
      
         | 3331 |  |  |               true_rtx = const_true_rtx;
 | 
      
         | 3332 |  |  |               false_rtx = const0_rtx;
 | 
      
         | 3333 |  |  |             }
 | 
      
         | 3334 |  |  |  
 | 
      
         | 3335 |  |  |           code = find_comparison_args (code, &folded_arg0, &folded_arg1,
 | 
      
         | 3336 |  |  |                                        &mode_arg0, &mode_arg1);
 | 
      
         | 3337 |  |  |  
 | 
      
         | 3338 |  |  |           /* If the mode is VOIDmode or a MODE_CC mode, we don't know
 | 
      
         | 3339 |  |  |              what kinds of things are being compared, so we can't do
 | 
      
         | 3340 |  |  |              anything with this comparison.  */
 | 
      
         | 3341 |  |  |  
 | 
      
         | 3342 |  |  |           if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
 | 
      
         | 3343 |  |  |             break;
 | 
      
         | 3344 |  |  |  
 | 
      
         | 3345 |  |  |           const_arg0 = equiv_constant (folded_arg0);
 | 
      
         | 3346 |  |  |           const_arg1 = equiv_constant (folded_arg1);
 | 
      
         | 3347 |  |  |  
 | 
      
         | 3348 |  |  |           /* If we do not now have two constants being compared, see
 | 
      
         | 3349 |  |  |              if we can nevertheless deduce some things about the
 | 
      
         | 3350 |  |  |              comparison.  */
 | 
      
         | 3351 |  |  |           if (const_arg0 == 0 || const_arg1 == 0)
 | 
      
         | 3352 |  |  |             {
 | 
      
         | 3353 |  |  |               if (const_arg1 != NULL)
 | 
      
         | 3354 |  |  |                 {
 | 
      
         | 3355 |  |  |                   rtx cheapest_simplification;
 | 
      
         | 3356 |  |  |                   int cheapest_cost;
 | 
      
         | 3357 |  |  |                   rtx simp_result;
 | 
      
         | 3358 |  |  |                   struct table_elt *p;
 | 
      
         | 3359 |  |  |  
 | 
      
         | 3360 |  |  |                   /* See if we can find an equivalent of folded_arg0
 | 
      
         | 3361 |  |  |                      that gets us a cheaper expression, possibly a
 | 
      
         | 3362 |  |  |                      constant through simplifications.  */
 | 
      
         | 3363 |  |  |                   p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
 | 
      
         | 3364 |  |  |                               mode_arg0);
 | 
      
         | 3365 |  |  |  
 | 
      
         | 3366 |  |  |                   if (p != NULL)
 | 
      
         | 3367 |  |  |                     {
 | 
      
         | 3368 |  |  |                       cheapest_simplification = x;
 | 
      
         | 3369 |  |  |                       cheapest_cost = COST (x);
 | 
      
         | 3370 |  |  |  
 | 
      
         | 3371 |  |  |                       for (p = p->first_same_value; p != NULL; p = p->next_same_value)
 | 
      
         | 3372 |  |  |                         {
 | 
      
         | 3373 |  |  |                           int cost;
 | 
      
         | 3374 |  |  |  
 | 
      
         | 3375 |  |  |                           /* If the entry isn't valid, skip it.  */
 | 
      
         | 3376 |  |  |                           if (! exp_equiv_p (p->exp, p->exp, 1, false))
 | 
      
         | 3377 |  |  |                             continue;
 | 
      
         | 3378 |  |  |  
 | 
      
         | 3379 |  |  |                           /* Try to simplify using this equivalence.  */
 | 
      
         | 3380 |  |  |                           simp_result
 | 
      
         | 3381 |  |  |                             = simplify_relational_operation (code, mode,
 | 
      
         | 3382 |  |  |                                                              mode_arg0,
 | 
      
         | 3383 |  |  |                                                              p->exp,
 | 
      
         | 3384 |  |  |                                                              const_arg1);
 | 
      
         | 3385 |  |  |  
 | 
      
         | 3386 |  |  |                           if (simp_result == NULL)
 | 
      
         | 3387 |  |  |                             continue;
 | 
      
         | 3388 |  |  |  
 | 
      
         | 3389 |  |  |                           cost = COST (simp_result);
 | 
      
         | 3390 |  |  |                           if (cost < cheapest_cost)
 | 
      
         | 3391 |  |  |                             {
 | 
      
         | 3392 |  |  |                               cheapest_cost = cost;
 | 
      
         | 3393 |  |  |                               cheapest_simplification = simp_result;
 | 
      
         | 3394 |  |  |                             }
 | 
      
         | 3395 |  |  |                         }
 | 
      
         | 3396 |  |  |  
 | 
      
         | 3397 |  |  |                       /* If we have a cheaper expression now, use that
 | 
      
         | 3398 |  |  |                          and try folding it further, from the top.  */
 | 
      
         | 3399 |  |  |                       if (cheapest_simplification != x)
 | 
      
         | 3400 |  |  |                         return fold_rtx (copy_rtx (cheapest_simplification),
 | 
      
         | 3401 |  |  |                                          insn);
 | 
      
         | 3402 |  |  |                     }
 | 
      
         | 3403 |  |  |                 }
 | 
      
         | 3404 |  |  |  
 | 
      
         | 3405 |  |  |               /* See if the two operands are the same.  */
 | 
      
         | 3406 |  |  |  
 | 
      
         | 3407 |  |  |               if ((REG_P (folded_arg0)
 | 
      
         | 3408 |  |  |                    && REG_P (folded_arg1)
 | 
      
         | 3409 |  |  |                    && (REG_QTY (REGNO (folded_arg0))
 | 
      
         | 3410 |  |  |                        == REG_QTY (REGNO (folded_arg1))))
 | 
      
         | 3411 |  |  |                   || ((p0 = lookup (folded_arg0,
 | 
      
         | 3412 |  |  |                                     SAFE_HASH (folded_arg0, mode_arg0),
 | 
      
         | 3413 |  |  |                                     mode_arg0))
 | 
      
         | 3414 |  |  |                       && (p1 = lookup (folded_arg1,
 | 
      
         | 3415 |  |  |                                        SAFE_HASH (folded_arg1, mode_arg0),
 | 
      
         | 3416 |  |  |                                        mode_arg0))
 | 
      
         | 3417 |  |  |                       && p0->first_same_value == p1->first_same_value))
 | 
      
         | 3418 |  |  |                 folded_arg1 = folded_arg0;
 | 
      
         | 3419 |  |  |  
 | 
      
         | 3420 |  |  |               /* If FOLDED_ARG0 is a register, see if the comparison we are
 | 
      
         | 3421 |  |  |                  doing now is either the same as we did before or the reverse
 | 
      
         | 3422 |  |  |                  (we only check the reverse if not floating-point).  */
 | 
      
         | 3423 |  |  |               else if (REG_P (folded_arg0))
 | 
      
         | 3424 |  |  |                 {
 | 
      
         | 3425 |  |  |                   int qty = REG_QTY (REGNO (folded_arg0));
 | 
      
         | 3426 |  |  |  
 | 
      
         | 3427 |  |  |                   if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
 | 
      
         | 3428 |  |  |                     {
 | 
      
         | 3429 |  |  |                       struct qty_table_elem *ent = &qty_table[qty];
 | 
      
         | 3430 |  |  |  
 | 
      
         | 3431 |  |  |                       if ((comparison_dominates_p (ent->comparison_code, code)
 | 
      
         | 3432 |  |  |                            || (! FLOAT_MODE_P (mode_arg0)
 | 
      
         | 3433 |  |  |                                && comparison_dominates_p (ent->comparison_code,
 | 
      
         | 3434 |  |  |                                                           reverse_condition (code))))
 | 
      
         | 3435 |  |  |                           && (rtx_equal_p (ent->comparison_const, folded_arg1)
 | 
      
         | 3436 |  |  |                               || (const_arg1
 | 
      
         | 3437 |  |  |                                   && rtx_equal_p (ent->comparison_const,
 | 
      
         | 3438 |  |  |                                                   const_arg1))
 | 
      
         | 3439 |  |  |                               || (REG_P (folded_arg1)
 | 
      
         | 3440 |  |  |                                   && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
 | 
      
         | 3441 |  |  |                         {
 | 
      
         | 3442 |  |  |                           if (comparison_dominates_p (ent->comparison_code, code))
 | 
      
         | 3443 |  |  |                             {
 | 
      
         | 3444 |  |  |                               if (true_rtx)
 | 
      
         | 3445 |  |  |                                 return true_rtx;
 | 
      
         | 3446 |  |  |                               else
 | 
      
         | 3447 |  |  |                                 break;
 | 
      
         | 3448 |  |  |                             }
 | 
      
         | 3449 |  |  |                           else
 | 
      
         | 3450 |  |  |                             return false_rtx;
 | 
      
         | 3451 |  |  |                         }
 | 
      
         | 3452 |  |  |                     }
 | 
      
         | 3453 |  |  |                 }
 | 
      
         | 3454 |  |  |             }
 | 
      
         | 3455 |  |  |         }
 | 
      
         | 3456 |  |  |  
 | 
      
         | 3457 |  |  |       /* If we are comparing against zero, see if the first operand is
 | 
      
         | 3458 |  |  |          equivalent to an IOR with a constant.  If so, we may be able to
 | 
      
         | 3459 |  |  |          determine the result of this comparison.  */
 | 
      
         | 3460 |  |  |       if (const_arg1 == const0_rtx && !const_arg0)
 | 
      
         | 3461 |  |  |         {
 | 
      
         | 3462 |  |  |           rtx y = lookup_as_function (folded_arg0, IOR);
 | 
      
         | 3463 |  |  |           rtx inner_const;
 | 
      
         | 3464 |  |  |  
 | 
      
         | 3465 |  |  |           if (y != 0
 | 
      
         | 3466 |  |  |               && (inner_const = equiv_constant (XEXP (y, 1))) != 0
 | 
      
         | 3467 |  |  |               && CONST_INT_P (inner_const)
 | 
      
         | 3468 |  |  |               && INTVAL (inner_const) != 0)
 | 
      
         | 3469 |  |  |             folded_arg0 = gen_rtx_IOR (mode_arg0, XEXP (y, 0), inner_const);
 | 
      
         | 3470 |  |  |         }
 | 
      
         | 3471 |  |  |  
 | 
      
         | 3472 |  |  |       {
 | 
      
         | 3473 |  |  |         rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
 | 
      
         | 3474 |  |  |         rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
 | 
      
         | 3475 |  |  |         new_rtx = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
 | 
      
         | 3476 |  |  |       }
 | 
      
         | 3477 |  |  |       break;
 | 
      
         | 3478 |  |  |  
 | 
      
         | 3479 |  |  |     case RTX_BIN_ARITH:
 | 
      
         | 3480 |  |  |     case RTX_COMM_ARITH:
 | 
      
         | 3481 |  |  |       switch (code)
 | 
      
         | 3482 |  |  |         {
 | 
      
         | 3483 |  |  |         case PLUS:
 | 
      
         | 3484 |  |  |           /* If the second operand is a LABEL_REF, see if the first is a MINUS
 | 
      
         | 3485 |  |  |              with that LABEL_REF as its second operand.  If so, the result is
 | 
      
         | 3486 |  |  |              the first operand of that MINUS.  This handles switches with an
 | 
      
         | 3487 |  |  |              ADDR_DIFF_VEC table.  */
 | 
      
         | 3488 |  |  |           if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
 | 
      
         | 3489 |  |  |             {
 | 
      
         | 3490 |  |  |               rtx y
 | 
      
         | 3491 |  |  |                 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
 | 
      
         | 3492 |  |  |                 : lookup_as_function (folded_arg0, MINUS);
 | 
      
         | 3493 |  |  |  
 | 
      
         | 3494 |  |  |               if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
 | 
      
         | 3495 |  |  |                   && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
 | 
      
         | 3496 |  |  |                 return XEXP (y, 0);
 | 
      
         | 3497 |  |  |  
 | 
      
         | 3498 |  |  |               /* Now try for a CONST of a MINUS like the above.  */
 | 
      
         | 3499 |  |  |               if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
 | 
      
         | 3500 |  |  |                         : lookup_as_function (folded_arg0, CONST))) != 0
 | 
      
         | 3501 |  |  |                   && GET_CODE (XEXP (y, 0)) == MINUS
 | 
      
         | 3502 |  |  |                   && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
 | 
      
         | 3503 |  |  |                   && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
 | 
      
         | 3504 |  |  |                 return XEXP (XEXP (y, 0), 0);
 | 
      
         | 3505 |  |  |             }
 | 
      
         | 3506 |  |  |  
 | 
      
         | 3507 |  |  |           /* Likewise if the operands are in the other order.  */
 | 
      
         | 3508 |  |  |           if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
 | 
      
         | 3509 |  |  |             {
 | 
      
         | 3510 |  |  |               rtx y
 | 
      
         | 3511 |  |  |                 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
 | 
      
         | 3512 |  |  |                 : lookup_as_function (folded_arg1, MINUS);
 | 
      
         | 3513 |  |  |  
 | 
      
         | 3514 |  |  |               if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
 | 
      
         | 3515 |  |  |                   && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
 | 
      
         | 3516 |  |  |                 return XEXP (y, 0);
 | 
      
         | 3517 |  |  |  
 | 
      
         | 3518 |  |  |               /* Now try for a CONST of a MINUS like the above.  */
 | 
      
         | 3519 |  |  |               if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
 | 
      
         | 3520 |  |  |                         : lookup_as_function (folded_arg1, CONST))) != 0
 | 
      
         | 3521 |  |  |                   && GET_CODE (XEXP (y, 0)) == MINUS
 | 
      
         | 3522 |  |  |                   && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
 | 
      
         | 3523 |  |  |                   && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
 | 
      
         | 3524 |  |  |                 return XEXP (XEXP (y, 0), 0);
 | 
      
         | 3525 |  |  |             }
 | 
      
         | 3526 |  |  |  
 | 
      
         | 3527 |  |  |           /* If second operand is a register equivalent to a negative
 | 
      
         | 3528 |  |  |              CONST_INT, see if we can find a register equivalent to the
 | 
      
         | 3529 |  |  |              positive constant.  Make a MINUS if so.  Don't do this for
 | 
      
         | 3530 |  |  |              a non-negative constant since we might then alternate between
 | 
      
         | 3531 |  |  |              choosing positive and negative constants.  Having the positive
 | 
      
         | 3532 |  |  |              constant previously-used is the more common case.  Be sure
 | 
      
         | 3533 |  |  |              the resulting constant is non-negative; if const_arg1 were
 | 
      
         | 3534 |  |  |              the smallest negative number this would overflow: depending
 | 
      
         | 3535 |  |  |              on the mode, this would either just be the same value (and
 | 
      
         | 3536 |  |  |              hence not save anything) or be incorrect.  */
 | 
      
         | 3537 |  |  |           if (const_arg1 != 0 && CONST_INT_P (const_arg1)
 | 
      
         | 3538 |  |  |               && INTVAL (const_arg1) < 0
 | 
      
         | 3539 |  |  |               /* This used to test
 | 
      
         | 3540 |  |  |  
 | 
      
         | 3541 |  |  |                  -INTVAL (const_arg1) >= 0
 | 
      
         | 3542 |  |  |  
 | 
      
         | 3543 |  |  |                  But The Sun V5.0 compilers mis-compiled that test.  So
 | 
      
         | 3544 |  |  |                  instead we test for the problematic value in a more direct
 | 
      
         | 3545 |  |  |                  manner and hope the Sun compilers get it correct.  */
 | 
      
         | 3546 |  |  |               && INTVAL (const_arg1) !=
 | 
      
         | 3547 |  |  |                 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
 | 
      
         | 3548 |  |  |               && REG_P (folded_arg1))
 | 
      
         | 3549 |  |  |             {
 | 
      
         | 3550 |  |  |               rtx new_const = GEN_INT (-INTVAL (const_arg1));
 | 
      
         | 3551 |  |  |               struct table_elt *p
 | 
      
         | 3552 |  |  |                 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
 | 
      
         | 3553 |  |  |  
 | 
      
         | 3554 |  |  |               if (p)
 | 
      
         | 3555 |  |  |                 for (p = p->first_same_value; p; p = p->next_same_value)
 | 
      
         | 3556 |  |  |                   if (REG_P (p->exp))
 | 
      
         | 3557 |  |  |                     return simplify_gen_binary (MINUS, mode, folded_arg0,
 | 
      
         | 3558 |  |  |                                                 canon_reg (p->exp, NULL_RTX));
 | 
      
         | 3559 |  |  |             }
 | 
      
         | 3560 |  |  |           goto from_plus;
 | 
      
         | 3561 |  |  |  
 | 
      
         | 3562 |  |  |         case MINUS:
 | 
      
         | 3563 |  |  |           /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
 | 
      
         | 3564 |  |  |              If so, produce (PLUS Z C2-C).  */
 | 
      
         | 3565 |  |  |           if (const_arg1 != 0 && CONST_INT_P (const_arg1))
 | 
      
         | 3566 |  |  |             {
 | 
      
         | 3567 |  |  |               rtx y = lookup_as_function (XEXP (x, 0), PLUS);
 | 
      
         | 3568 |  |  |               if (y && CONST_INT_P (XEXP (y, 1)))
 | 
      
         | 3569 |  |  |                 return fold_rtx (plus_constant (copy_rtx (y),
 | 
      
         | 3570 |  |  |                                                 -INTVAL (const_arg1)),
 | 
      
         | 3571 |  |  |                                  NULL_RTX);
 | 
      
         | 3572 |  |  |             }
 | 
      
         | 3573 |  |  |  
 | 
      
         | 3574 |  |  |           /* Fall through.  */
 | 
      
         | 3575 |  |  |  
 | 
      
         | 3576 |  |  |         from_plus:
 | 
      
         | 3577 |  |  |         case SMIN:    case SMAX:      case UMIN:    case UMAX:
 | 
      
         | 3578 |  |  |         case IOR:     case AND:       case XOR:
 | 
      
         | 3579 |  |  |         case MULT:
 | 
      
         | 3580 |  |  |         case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
 | 
      
         | 3581 |  |  |           /* If we have (<op> <reg> <const_int>) for an associative OP and REG
 | 
      
         | 3582 |  |  |              is known to be of similar form, we may be able to replace the
 | 
      
         | 3583 |  |  |              operation with a combined operation.  This may eliminate the
 | 
      
         | 3584 |  |  |              intermediate operation if every use is simplified in this way.
 | 
      
         | 3585 |  |  |              Note that the similar optimization done by combine.c only works
 | 
      
         | 3586 |  |  |              if the intermediate operation's result has only one reference.  */
 | 
      
         | 3587 |  |  |  
 | 
      
         | 3588 |  |  |           if (REG_P (folded_arg0)
 | 
      
         | 3589 |  |  |               && const_arg1 && CONST_INT_P (const_arg1))
 | 
      
         | 3590 |  |  |             {
 | 
      
         | 3591 |  |  |               int is_shift
 | 
      
         | 3592 |  |  |                 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
 | 
      
         | 3593 |  |  |               rtx y, inner_const, new_const;
 | 
      
         | 3594 |  |  |               rtx canon_const_arg1 = const_arg1;
 | 
      
         | 3595 |  |  |               enum rtx_code associate_code;
 | 
      
         | 3596 |  |  |  
 | 
      
         | 3597 |  |  |               if (is_shift
 | 
      
         | 3598 |  |  |                   && (INTVAL (const_arg1) >= GET_MODE_PRECISION (mode)
 | 
      
         | 3599 |  |  |                       || INTVAL (const_arg1) < 0))
 | 
      
         | 3600 |  |  |                 {
 | 
      
         | 3601 |  |  |                   if (SHIFT_COUNT_TRUNCATED)
 | 
      
         | 3602 |  |  |                     canon_const_arg1 = GEN_INT (INTVAL (const_arg1)
 | 
      
         | 3603 |  |  |                                                 & (GET_MODE_BITSIZE (mode)
 | 
      
         | 3604 |  |  |                                                    - 1));
 | 
      
         | 3605 |  |  |                   else
 | 
      
         | 3606 |  |  |                     break;
 | 
      
         | 3607 |  |  |                 }
 | 
      
         | 3608 |  |  |  
 | 
      
         | 3609 |  |  |               y = lookup_as_function (folded_arg0, code);
 | 
      
         | 3610 |  |  |               if (y == 0)
 | 
      
         | 3611 |  |  |                 break;
 | 
      
         | 3612 |  |  |  
 | 
      
         | 3613 |  |  |               /* If we have compiled a statement like
 | 
      
         | 3614 |  |  |                  "if (x == (x & mask1))", and now are looking at
 | 
      
         | 3615 |  |  |                  "x & mask2", we will have a case where the first operand
 | 
      
         | 3616 |  |  |                  of Y is the same as our first operand.  Unless we detect
 | 
      
         | 3617 |  |  |                  this case, an infinite loop will result.  */
 | 
      
         | 3618 |  |  |               if (XEXP (y, 0) == folded_arg0)
 | 
      
         | 3619 |  |  |                 break;
 | 
      
         | 3620 |  |  |  
 | 
      
         | 3621 |  |  |               inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
 | 
      
         | 3622 |  |  |               if (!inner_const || !CONST_INT_P (inner_const))
 | 
      
         | 3623 |  |  |                 break;
 | 
      
         | 3624 |  |  |  
 | 
      
         | 3625 |  |  |               /* Don't associate these operations if they are a PLUS with the
 | 
      
         | 3626 |  |  |                  same constant and it is a power of two.  These might be doable
 | 
      
         | 3627 |  |  |                  with a pre- or post-increment.  Similarly for two subtracts of
 | 
      
         | 3628 |  |  |                  identical powers of two with post decrement.  */
 | 
      
         | 3629 |  |  |  
 | 
      
         | 3630 |  |  |               if (code == PLUS && const_arg1 == inner_const
 | 
      
         | 3631 |  |  |                   && ((HAVE_PRE_INCREMENT
 | 
      
         | 3632 |  |  |                           && exact_log2 (INTVAL (const_arg1)) >= 0)
 | 
      
         | 3633 |  |  |                       || (HAVE_POST_INCREMENT
 | 
      
         | 3634 |  |  |                           && exact_log2 (INTVAL (const_arg1)) >= 0)
 | 
      
         | 3635 |  |  |                       || (HAVE_PRE_DECREMENT
 | 
      
         | 3636 |  |  |                           && exact_log2 (- INTVAL (const_arg1)) >= 0)
 | 
      
         | 3637 |  |  |                       || (HAVE_POST_DECREMENT
 | 
      
         | 3638 |  |  |                           && exact_log2 (- INTVAL (const_arg1)) >= 0)))
 | 
      
         | 3639 |  |  |                 break;
 | 
      
         | 3640 |  |  |  
 | 
      
         | 3641 |  |  |               /* ??? Vector mode shifts by scalar
 | 
      
         | 3642 |  |  |                  shift operand are not supported yet.  */
 | 
      
         | 3643 |  |  |               if (is_shift && VECTOR_MODE_P (mode))
 | 
      
         | 3644 |  |  |                 break;
 | 
      
         | 3645 |  |  |  
 | 
      
         | 3646 |  |  |               if (is_shift
 | 
      
         | 3647 |  |  |                   && (INTVAL (inner_const) >= GET_MODE_PRECISION (mode)
 | 
      
         | 3648 |  |  |                       || INTVAL (inner_const) < 0))
 | 
      
         | 3649 |  |  |                 {
 | 
      
         | 3650 |  |  |                   if (SHIFT_COUNT_TRUNCATED)
 | 
      
         | 3651 |  |  |                     inner_const = GEN_INT (INTVAL (inner_const)
 | 
      
         | 3652 |  |  |                                            & (GET_MODE_BITSIZE (mode) - 1));
 | 
      
         | 3653 |  |  |                   else
 | 
      
         | 3654 |  |  |                     break;
 | 
      
         | 3655 |  |  |                 }
 | 
      
         | 3656 |  |  |  
 | 
      
         | 3657 |  |  |               /* Compute the code used to compose the constants.  For example,
 | 
      
         | 3658 |  |  |                  A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS.  */
 | 
      
         | 3659 |  |  |  
 | 
      
         | 3660 |  |  |               associate_code = (is_shift || code == MINUS ? PLUS : code);
 | 
      
         | 3661 |  |  |  
 | 
      
         | 3662 |  |  |               new_const = simplify_binary_operation (associate_code, mode,
 | 
      
         | 3663 |  |  |                                                      canon_const_arg1,
 | 
      
         | 3664 |  |  |                                                      inner_const);
 | 
      
         | 3665 |  |  |  
 | 
      
         | 3666 |  |  |               if (new_const == 0)
 | 
      
         | 3667 |  |  |                 break;
 | 
      
         | 3668 |  |  |  
 | 
      
         | 3669 |  |  |               /* If we are associating shift operations, don't let this
 | 
      
         | 3670 |  |  |                  produce a shift of the size of the object or larger.
 | 
      
         | 3671 |  |  |                  This could occur when we follow a sign-extend by a right
 | 
      
         | 3672 |  |  |                  shift on a machine that does a sign-extend as a pair
 | 
      
         | 3673 |  |  |                  of shifts.  */
 | 
      
         | 3674 |  |  |  
 | 
      
         | 3675 |  |  |               if (is_shift
 | 
      
         | 3676 |  |  |                   && CONST_INT_P (new_const)
 | 
      
         | 3677 |  |  |                   && INTVAL (new_const) >= GET_MODE_PRECISION (mode))
 | 
      
         | 3678 |  |  |                 {
 | 
      
         | 3679 |  |  |                   /* As an exception, we can turn an ASHIFTRT of this
 | 
      
         | 3680 |  |  |                      form into a shift of the number of bits - 1.  */
 | 
      
         | 3681 |  |  |                   if (code == ASHIFTRT)
 | 
      
         | 3682 |  |  |                     new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
 | 
      
         | 3683 |  |  |                   else if (!side_effects_p (XEXP (y, 0)))
 | 
      
         | 3684 |  |  |                     return CONST0_RTX (mode);
 | 
      
         | 3685 |  |  |                   else
 | 
      
         | 3686 |  |  |                     break;
 | 
      
         | 3687 |  |  |                 }
 | 
      
         | 3688 |  |  |  
 | 
      
         | 3689 |  |  |               y = copy_rtx (XEXP (y, 0));
 | 
      
         | 3690 |  |  |  
 | 
      
         | 3691 |  |  |               /* If Y contains our first operand (the most common way this
 | 
      
         | 3692 |  |  |                  can happen is if Y is a MEM), we would do into an infinite
 | 
      
         | 3693 |  |  |                  loop if we tried to fold it.  So don't in that case.  */
 | 
      
         | 3694 |  |  |  
 | 
      
         | 3695 |  |  |               if (! reg_mentioned_p (folded_arg0, y))
 | 
      
         | 3696 |  |  |                 y = fold_rtx (y, insn);
 | 
      
         | 3697 |  |  |  
 | 
      
         | 3698 |  |  |               return simplify_gen_binary (code, mode, y, new_const);
 | 
      
         | 3699 |  |  |             }
 | 
      
         | 3700 |  |  |           break;
 | 
      
         | 3701 |  |  |  
 | 
      
         | 3702 |  |  |         case DIV:       case UDIV:
 | 
      
         | 3703 |  |  |           /* ??? The associative optimization performed immediately above is
 | 
      
         | 3704 |  |  |              also possible for DIV and UDIV using associate_code of MULT.
 | 
      
         | 3705 |  |  |              However, we would need extra code to verify that the
 | 
      
         | 3706 |  |  |              multiplication does not overflow, that is, there is no overflow
 | 
      
         | 3707 |  |  |              in the calculation of new_const.  */
 | 
      
         | 3708 |  |  |           break;
 | 
      
         | 3709 |  |  |  
 | 
      
         | 3710 |  |  |         default:
 | 
      
         | 3711 |  |  |           break;
 | 
      
         | 3712 |  |  |         }
 | 
      
         | 3713 |  |  |  
 | 
      
         | 3714 |  |  |       new_rtx = simplify_binary_operation (code, mode,
 | 
      
         | 3715 |  |  |                                        const_arg0 ? const_arg0 : folded_arg0,
 | 
      
         | 3716 |  |  |                                        const_arg1 ? const_arg1 : folded_arg1);
 | 
      
         | 3717 |  |  |       break;
 | 
      
         | 3718 |  |  |  
 | 
      
         | 3719 |  |  |     case RTX_OBJ:
 | 
      
         | 3720 |  |  |       /* (lo_sum (high X) X) is simply X.  */
 | 
      
         | 3721 |  |  |       if (code == LO_SUM && const_arg0 != 0
 | 
      
         | 3722 |  |  |           && GET_CODE (const_arg0) == HIGH
 | 
      
         | 3723 |  |  |           && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
 | 
      
         | 3724 |  |  |         return const_arg1;
 | 
      
         | 3725 |  |  |       break;
 | 
      
         | 3726 |  |  |  
 | 
      
         | 3727 |  |  |     case RTX_TERNARY:
 | 
      
         | 3728 |  |  |     case RTX_BITFIELD_OPS:
 | 
      
         | 3729 |  |  |       new_rtx = simplify_ternary_operation (code, mode, mode_arg0,
 | 
      
         | 3730 |  |  |                                         const_arg0 ? const_arg0 : folded_arg0,
 | 
      
         | 3731 |  |  |                                         const_arg1 ? const_arg1 : folded_arg1,
 | 
      
         | 3732 |  |  |                                         const_arg2 ? const_arg2 : XEXP (x, 2));
 | 
      
         | 3733 |  |  |       break;
 | 
      
         | 3734 |  |  |  
 | 
      
         | 3735 |  |  |     default:
 | 
      
         | 3736 |  |  |       break;
 | 
      
         | 3737 |  |  |     }
 | 
      
         | 3738 |  |  |  
 | 
      
         | 3739 |  |  |   return new_rtx ? new_rtx : x;
 | 
      
         | 3740 |  |  | }
 | 
      
         | 3741 |  |  |  
 | 
      
         | 3742 |  |  | /* Return a constant value currently equivalent to X.
 | 
      
         | 3743 |  |  |    Return 0 if we don't know one.  */
 | 
      
         | 3744 |  |  |  
 | 
      
         | 3745 |  |  | static rtx
 | 
      
         | 3746 |  |  | equiv_constant (rtx x)
 | 
      
         | 3747 |  |  | {
 | 
      
         | 3748 |  |  |   if (REG_P (x)
 | 
      
         | 3749 |  |  |       && REGNO_QTY_VALID_P (REGNO (x)))
 | 
      
         | 3750 |  |  |     {
 | 
      
         | 3751 |  |  |       int x_q = REG_QTY (REGNO (x));
 | 
      
         | 3752 |  |  |       struct qty_table_elem *x_ent = &qty_table[x_q];
 | 
      
         | 3753 |  |  |  
 | 
      
         | 3754 |  |  |       if (x_ent->const_rtx)
 | 
      
         | 3755 |  |  |         x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
 | 
      
         | 3756 |  |  |     }
 | 
      
         | 3757 |  |  |  
 | 
      
         | 3758 |  |  |   if (x == 0 || CONSTANT_P (x))
 | 
      
         | 3759 |  |  |     return x;
 | 
      
         | 3760 |  |  |  
 | 
      
         | 3761 |  |  |   if (GET_CODE (x) == SUBREG)
 | 
      
         | 3762 |  |  |     {
 | 
      
         | 3763 |  |  |       enum machine_mode mode = GET_MODE (x);
 | 
      
         | 3764 |  |  |       enum machine_mode imode = GET_MODE (SUBREG_REG (x));
 | 
      
         | 3765 |  |  |       rtx new_rtx;
 | 
      
         | 3766 |  |  |  
 | 
      
         | 3767 |  |  |       /* See if we previously assigned a constant value to this SUBREG.  */
 | 
      
         | 3768 |  |  |       if ((new_rtx = lookup_as_function (x, CONST_INT)) != 0
 | 
      
         | 3769 |  |  |           || (new_rtx = lookup_as_function (x, CONST_DOUBLE)) != 0
 | 
      
         | 3770 |  |  |           || (new_rtx = lookup_as_function (x, CONST_FIXED)) != 0)
 | 
      
         | 3771 |  |  |         return new_rtx;
 | 
      
         | 3772 |  |  |  
 | 
      
         | 3773 |  |  |       /* If we didn't and if doing so makes sense, see if we previously
 | 
      
         | 3774 |  |  |          assigned a constant value to the enclosing word mode SUBREG.  */
 | 
      
         | 3775 |  |  |       if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode)
 | 
      
         | 3776 |  |  |           && GET_MODE_SIZE (word_mode) < GET_MODE_SIZE (imode))
 | 
      
         | 3777 |  |  |         {
 | 
      
         | 3778 |  |  |           int byte = SUBREG_BYTE (x) - subreg_lowpart_offset (mode, word_mode);
 | 
      
         | 3779 |  |  |           if (byte >= 0 && (byte % UNITS_PER_WORD) == 0)
 | 
      
         | 3780 |  |  |             {
 | 
      
         | 3781 |  |  |               rtx y = gen_rtx_SUBREG (word_mode, SUBREG_REG (x), byte);
 | 
      
         | 3782 |  |  |               new_rtx = lookup_as_function (y, CONST_INT);
 | 
      
         | 3783 |  |  |               if (new_rtx)
 | 
      
         | 3784 |  |  |                 return gen_lowpart (mode, new_rtx);
 | 
      
         | 3785 |  |  |             }
 | 
      
         | 3786 |  |  |         }
 | 
      
         | 3787 |  |  |  
 | 
      
         | 3788 |  |  |       /* Otherwise see if we already have a constant for the inner REG.  */
 | 
      
         | 3789 |  |  |       if (REG_P (SUBREG_REG (x))
 | 
      
         | 3790 |  |  |           && (new_rtx = equiv_constant (SUBREG_REG (x))) != 0)
 | 
      
         | 3791 |  |  |         return simplify_subreg (mode, new_rtx, imode, SUBREG_BYTE (x));
 | 
      
         | 3792 |  |  |  
 | 
      
         | 3793 |  |  |       return 0;
 | 
      
         | 3794 |  |  |     }
 | 
      
         | 3795 |  |  |  
 | 
      
         | 3796 |  |  |   /* If X is a MEM, see if it is a constant-pool reference, or look it up in
 | 
      
         | 3797 |  |  |      the hash table in case its value was seen before.  */
 | 
      
         | 3798 |  |  |  
 | 
      
         | 3799 |  |  |   if (MEM_P (x))
 | 
      
         | 3800 |  |  |     {
 | 
      
         | 3801 |  |  |       struct table_elt *elt;
 | 
      
         | 3802 |  |  |  
 | 
      
         | 3803 |  |  |       x = avoid_constant_pool_reference (x);
 | 
      
         | 3804 |  |  |       if (CONSTANT_P (x))
 | 
      
         | 3805 |  |  |         return x;
 | 
      
         | 3806 |  |  |  
 | 
      
         | 3807 |  |  |       elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
 | 
      
         | 3808 |  |  |       if (elt == 0)
 | 
      
         | 3809 |  |  |         return 0;
 | 
      
         | 3810 |  |  |  
 | 
      
         | 3811 |  |  |       for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
 | 
      
         | 3812 |  |  |         if (elt->is_const && CONSTANT_P (elt->exp))
 | 
      
         | 3813 |  |  |           return elt->exp;
 | 
      
         | 3814 |  |  |     }
 | 
      
         | 3815 |  |  |  
 | 
      
         | 3816 |  |  |   return 0;
 | 
      
         | 3817 |  |  | }
 | 
      
         | 3818 |  |  |  
 | 
      
         | 3819 |  |  | /* Given INSN, a jump insn, TAKEN indicates if we are following the
 | 
      
         | 3820 |  |  |    "taken" branch.
 | 
      
         | 3821 |  |  |  
 | 
      
         | 3822 |  |  |    In certain cases, this can cause us to add an equivalence.  For example,
 | 
      
         | 3823 |  |  |    if we are following the taken case of
 | 
      
         | 3824 |  |  |         if (i == 2)
 | 
      
         | 3825 |  |  |    we can add the fact that `i' and '2' are now equivalent.
 | 
      
         | 3826 |  |  |  
 | 
      
         | 3827 |  |  |    In any case, we can record that this comparison was passed.  If the same
 | 
      
         | 3828 |  |  |    comparison is seen later, we will know its value.  */
 | 
      
         | 3829 |  |  |  
 | 
      
         | 3830 |  |  | static void
 | 
      
         | 3831 |  |  | record_jump_equiv (rtx insn, bool taken)
 | 
      
         | 3832 |  |  | {
 | 
      
         | 3833 |  |  |   int cond_known_true;
 | 
      
         | 3834 |  |  |   rtx op0, op1;
 | 
      
         | 3835 |  |  |   rtx set;
 | 
      
         | 3836 |  |  |   enum machine_mode mode, mode0, mode1;
 | 
      
         | 3837 |  |  |   int reversed_nonequality = 0;
 | 
      
         | 3838 |  |  |   enum rtx_code code;
 | 
      
         | 3839 |  |  |  
 | 
      
         | 3840 |  |  |   /* Ensure this is the right kind of insn.  */
 | 
      
         | 3841 |  |  |   gcc_assert (any_condjump_p (insn));
 | 
      
         | 3842 |  |  |  
 | 
      
         | 3843 |  |  |   set = pc_set (insn);
 | 
      
         | 3844 |  |  |  
 | 
      
         | 3845 |  |  |   /* See if this jump condition is known true or false.  */
 | 
      
         | 3846 |  |  |   if (taken)
 | 
      
         | 3847 |  |  |     cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
 | 
      
         | 3848 |  |  |   else
 | 
      
         | 3849 |  |  |     cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
 | 
      
         | 3850 |  |  |  
 | 
      
         | 3851 |  |  |   /* Get the type of comparison being done and the operands being compared.
 | 
      
         | 3852 |  |  |      If we had to reverse a non-equality condition, record that fact so we
 | 
      
         | 3853 |  |  |      know that it isn't valid for floating-point.  */
 | 
      
         | 3854 |  |  |   code = GET_CODE (XEXP (SET_SRC (set), 0));
 | 
      
         | 3855 |  |  |   op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
 | 
      
         | 3856 |  |  |   op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
 | 
      
         | 3857 |  |  |  
 | 
      
         | 3858 |  |  |   code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
 | 
      
         | 3859 |  |  |   if (! cond_known_true)
 | 
      
         | 3860 |  |  |     {
 | 
      
         | 3861 |  |  |       code = reversed_comparison_code_parts (code, op0, op1, insn);
 | 
      
         | 3862 |  |  |  
 | 
      
         | 3863 |  |  |       /* Don't remember if we can't find the inverse.  */
 | 
      
         | 3864 |  |  |       if (code == UNKNOWN)
 | 
      
         | 3865 |  |  |         return;
 | 
      
         | 3866 |  |  |     }
 | 
      
         | 3867 |  |  |  
 | 
      
         | 3868 |  |  |   /* The mode is the mode of the non-constant.  */
 | 
      
         | 3869 |  |  |   mode = mode0;
 | 
      
         | 3870 |  |  |   if (mode1 != VOIDmode)
 | 
      
         | 3871 |  |  |     mode = mode1;
 | 
      
         | 3872 |  |  |  
 | 
      
         | 3873 |  |  |   record_jump_cond (code, mode, op0, op1, reversed_nonequality);
 | 
      
         | 3874 |  |  | }
 | 
      
         | 3875 |  |  |  
 | 
      
         | 3876 |  |  | /* Yet another form of subreg creation.  In this case, we want something in
 | 
      
         | 3877 |  |  |    MODE, and we should assume OP has MODE iff it is naturally modeless.  */
 | 
      
         | 3878 |  |  |  
 | 
      
         | 3879 |  |  | static rtx
 | 
      
         | 3880 |  |  | record_jump_cond_subreg (enum machine_mode mode, rtx op)
 | 
      
         | 3881 |  |  | {
 | 
      
         | 3882 |  |  |   enum machine_mode op_mode = GET_MODE (op);
 | 
      
         | 3883 |  |  |   if (op_mode == mode || op_mode == VOIDmode)
 | 
      
         | 3884 |  |  |     return op;
 | 
      
         | 3885 |  |  |   return lowpart_subreg (mode, op, op_mode);
 | 
      
         | 3886 |  |  | }
 | 
      
         | 3887 |  |  |  
 | 
      
         | 3888 |  |  | /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
 | 
      
         | 3889 |  |  |    REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
 | 
      
         | 3890 |  |  |    Make any useful entries we can with that information.  Called from
 | 
      
         | 3891 |  |  |    above function and called recursively.  */
 | 
      
         | 3892 |  |  |  
 | 
      
         | 3893 |  |  | static void
 | 
      
         | 3894 |  |  | record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
 | 
      
         | 3895 |  |  |                   rtx op1, int reversed_nonequality)
 | 
      
         | 3896 |  |  | {
 | 
      
         | 3897 |  |  |   unsigned op0_hash, op1_hash;
 | 
      
         | 3898 |  |  |   int op0_in_memory, op1_in_memory;
 | 
      
         | 3899 |  |  |   struct table_elt *op0_elt, *op1_elt;
 | 
      
         | 3900 |  |  |  
 | 
      
         | 3901 |  |  |   /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
 | 
      
         | 3902 |  |  |      we know that they are also equal in the smaller mode (this is also
 | 
      
         | 3903 |  |  |      true for all smaller modes whether or not there is a SUBREG, but
 | 
      
         | 3904 |  |  |      is not worth testing for with no SUBREG).  */
 | 
      
         | 3905 |  |  |  
 | 
      
         | 3906 |  |  |   /* Note that GET_MODE (op0) may not equal MODE.  */
 | 
      
         | 3907 |  |  |   if (code == EQ && paradoxical_subreg_p (op0))
 | 
      
         | 3908 |  |  |     {
 | 
      
         | 3909 |  |  |       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
 | 
      
         | 3910 |  |  |       rtx tem = record_jump_cond_subreg (inner_mode, op1);
 | 
      
         | 3911 |  |  |       if (tem)
 | 
      
         | 3912 |  |  |         record_jump_cond (code, mode, SUBREG_REG (op0), tem,
 | 
      
         | 3913 |  |  |                           reversed_nonequality);
 | 
      
         | 3914 |  |  |     }
 | 
      
         | 3915 |  |  |  
 | 
      
         | 3916 |  |  |   if (code == EQ && paradoxical_subreg_p (op1))
 | 
      
         | 3917 |  |  |     {
 | 
      
         | 3918 |  |  |       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
 | 
      
         | 3919 |  |  |       rtx tem = record_jump_cond_subreg (inner_mode, op0);
 | 
      
         | 3920 |  |  |       if (tem)
 | 
      
         | 3921 |  |  |         record_jump_cond (code, mode, SUBREG_REG (op1), tem,
 | 
      
         | 3922 |  |  |                           reversed_nonequality);
 | 
      
         | 3923 |  |  |     }
 | 
      
         | 3924 |  |  |  
 | 
      
         | 3925 |  |  |   /* Similarly, if this is an NE comparison, and either is a SUBREG
 | 
      
         | 3926 |  |  |      making a smaller mode, we know the whole thing is also NE.  */
 | 
      
         | 3927 |  |  |  
 | 
      
         | 3928 |  |  |   /* Note that GET_MODE (op0) may not equal MODE;
 | 
      
         | 3929 |  |  |      if we test MODE instead, we can get an infinite recursion
 | 
      
         | 3930 |  |  |      alternating between two modes each wider than MODE.  */
 | 
      
         | 3931 |  |  |  
 | 
      
         | 3932 |  |  |   if (code == NE && GET_CODE (op0) == SUBREG
 | 
      
         | 3933 |  |  |       && subreg_lowpart_p (op0)
 | 
      
         | 3934 |  |  |       && (GET_MODE_SIZE (GET_MODE (op0))
 | 
      
         | 3935 |  |  |           < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
 | 
      
         | 3936 |  |  |     {
 | 
      
         | 3937 |  |  |       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
 | 
      
         | 3938 |  |  |       rtx tem = record_jump_cond_subreg (inner_mode, op1);
 | 
      
         | 3939 |  |  |       if (tem)
 | 
      
         | 3940 |  |  |         record_jump_cond (code, mode, SUBREG_REG (op0), tem,
 | 
      
         | 3941 |  |  |                           reversed_nonequality);
 | 
      
         | 3942 |  |  |     }
 | 
      
         | 3943 |  |  |  
 | 
      
         | 3944 |  |  |   if (code == NE && GET_CODE (op1) == SUBREG
 | 
      
         | 3945 |  |  |       && subreg_lowpart_p (op1)
 | 
      
         | 3946 |  |  |       && (GET_MODE_SIZE (GET_MODE (op1))
 | 
      
         | 3947 |  |  |           < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
 | 
      
         | 3948 |  |  |     {
 | 
      
         | 3949 |  |  |       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
 | 
      
         | 3950 |  |  |       rtx tem = record_jump_cond_subreg (inner_mode, op0);
 | 
      
         | 3951 |  |  |       if (tem)
 | 
      
         | 3952 |  |  |         record_jump_cond (code, mode, SUBREG_REG (op1), tem,
 | 
      
         | 3953 |  |  |                           reversed_nonequality);
 | 
      
         | 3954 |  |  |     }
 | 
      
         | 3955 |  |  |  
 | 
      
         | 3956 |  |  |   /* Hash both operands.  */
 | 
      
         | 3957 |  |  |  
 | 
      
         | 3958 |  |  |   do_not_record = 0;
 | 
      
         | 3959 |  |  |   hash_arg_in_memory = 0;
 | 
      
         | 3960 |  |  |   op0_hash = HASH (op0, mode);
 | 
      
         | 3961 |  |  |   op0_in_memory = hash_arg_in_memory;
 | 
      
         | 3962 |  |  |  
 | 
      
         | 3963 |  |  |   if (do_not_record)
 | 
      
         | 3964 |  |  |     return;
 | 
      
         | 3965 |  |  |  
 | 
      
         | 3966 |  |  |   do_not_record = 0;
 | 
      
         | 3967 |  |  |   hash_arg_in_memory = 0;
 | 
      
         | 3968 |  |  |   op1_hash = HASH (op1, mode);
 | 
      
         | 3969 |  |  |   op1_in_memory = hash_arg_in_memory;
 | 
      
         | 3970 |  |  |  
 | 
      
         | 3971 |  |  |   if (do_not_record)
 | 
      
         | 3972 |  |  |     return;
 | 
      
         | 3973 |  |  |  
 | 
      
         | 3974 |  |  |   /* Look up both operands.  */
 | 
      
         | 3975 |  |  |   op0_elt = lookup (op0, op0_hash, mode);
 | 
      
         | 3976 |  |  |   op1_elt = lookup (op1, op1_hash, mode);
 | 
      
         | 3977 |  |  |  
 | 
      
         | 3978 |  |  |   /* If both operands are already equivalent or if they are not in the
 | 
      
         | 3979 |  |  |      table but are identical, do nothing.  */
 | 
      
         | 3980 |  |  |   if ((op0_elt != 0 && op1_elt != 0
 | 
      
         | 3981 |  |  |        && op0_elt->first_same_value == op1_elt->first_same_value)
 | 
      
         | 3982 |  |  |       || op0 == op1 || rtx_equal_p (op0, op1))
 | 
      
         | 3983 |  |  |     return;
 | 
      
         | 3984 |  |  |  
 | 
      
         | 3985 |  |  |   /* If we aren't setting two things equal all we can do is save this
 | 
      
         | 3986 |  |  |      comparison.   Similarly if this is floating-point.  In the latter
 | 
      
         | 3987 |  |  |      case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
 | 
      
         | 3988 |  |  |      If we record the equality, we might inadvertently delete code
 | 
      
         | 3989 |  |  |      whose intent was to change -0 to +0.  */
 | 
      
         | 3990 |  |  |  
 | 
      
         | 3991 |  |  |   if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
 | 
      
         | 3992 |  |  |     {
 | 
      
         | 3993 |  |  |       struct qty_table_elem *ent;
 | 
      
         | 3994 |  |  |       int qty;
 | 
      
         | 3995 |  |  |  
 | 
      
         | 3996 |  |  |       /* If we reversed a floating-point comparison, if OP0 is not a
 | 
      
         | 3997 |  |  |          register, or if OP1 is neither a register or constant, we can't
 | 
      
         | 3998 |  |  |          do anything.  */
 | 
      
         | 3999 |  |  |  
 | 
      
         | 4000 |  |  |       if (!REG_P (op1))
 | 
      
         | 4001 |  |  |         op1 = equiv_constant (op1);
 | 
      
         | 4002 |  |  |  
 | 
      
         | 4003 |  |  |       if ((reversed_nonequality && FLOAT_MODE_P (mode))
 | 
      
         | 4004 |  |  |           || !REG_P (op0) || op1 == 0)
 | 
      
         | 4005 |  |  |         return;
 | 
      
         | 4006 |  |  |  
 | 
      
         | 4007 |  |  |       /* Put OP0 in the hash table if it isn't already.  This gives it a
 | 
      
         | 4008 |  |  |          new quantity number.  */
 | 
      
         | 4009 |  |  |       if (op0_elt == 0)
 | 
      
         | 4010 |  |  |         {
 | 
      
         | 4011 |  |  |           if (insert_regs (op0, NULL, 0))
 | 
      
         | 4012 |  |  |             {
 | 
      
         | 4013 |  |  |               rehash_using_reg (op0);
 | 
      
         | 4014 |  |  |               op0_hash = HASH (op0, mode);
 | 
      
         | 4015 |  |  |  
 | 
      
         | 4016 |  |  |               /* If OP0 is contained in OP1, this changes its hash code
 | 
      
         | 4017 |  |  |                  as well.  Faster to rehash than to check, except
 | 
      
         | 4018 |  |  |                  for the simple case of a constant.  */
 | 
      
         | 4019 |  |  |               if (! CONSTANT_P (op1))
 | 
      
         | 4020 |  |  |                 op1_hash = HASH (op1,mode);
 | 
      
         | 4021 |  |  |             }
 | 
      
         | 4022 |  |  |  
 | 
      
         | 4023 |  |  |           op0_elt = insert (op0, NULL, op0_hash, mode);
 | 
      
         | 4024 |  |  |           op0_elt->in_memory = op0_in_memory;
 | 
      
         | 4025 |  |  |         }
 | 
      
         | 4026 |  |  |  
 | 
      
         | 4027 |  |  |       qty = REG_QTY (REGNO (op0));
 | 
      
         | 4028 |  |  |       ent = &qty_table[qty];
 | 
      
         | 4029 |  |  |  
 | 
      
         | 4030 |  |  |       ent->comparison_code = code;
 | 
      
         | 4031 |  |  |       if (REG_P (op1))
 | 
      
         | 4032 |  |  |         {
 | 
      
         | 4033 |  |  |           /* Look it up again--in case op0 and op1 are the same.  */
 | 
      
         | 4034 |  |  |           op1_elt = lookup (op1, op1_hash, mode);
 | 
      
         | 4035 |  |  |  
 | 
      
         | 4036 |  |  |           /* Put OP1 in the hash table so it gets a new quantity number.  */
 | 
      
         | 4037 |  |  |           if (op1_elt == 0)
 | 
      
         | 4038 |  |  |             {
 | 
      
         | 4039 |  |  |               if (insert_regs (op1, NULL, 0))
 | 
      
         | 4040 |  |  |                 {
 | 
      
         | 4041 |  |  |                   rehash_using_reg (op1);
 | 
      
         | 4042 |  |  |                   op1_hash = HASH (op1, mode);
 | 
      
         | 4043 |  |  |                 }
 | 
      
         | 4044 |  |  |  
 | 
      
         | 4045 |  |  |               op1_elt = insert (op1, NULL, op1_hash, mode);
 | 
      
         | 4046 |  |  |               op1_elt->in_memory = op1_in_memory;
 | 
      
         | 4047 |  |  |             }
 | 
      
         | 4048 |  |  |  
 | 
      
         | 4049 |  |  |           ent->comparison_const = NULL_RTX;
 | 
      
         | 4050 |  |  |           ent->comparison_qty = REG_QTY (REGNO (op1));
 | 
      
         | 4051 |  |  |         }
 | 
      
         | 4052 |  |  |       else
 | 
      
         | 4053 |  |  |         {
 | 
      
         | 4054 |  |  |           ent->comparison_const = op1;
 | 
      
         | 4055 |  |  |           ent->comparison_qty = -1;
 | 
      
         | 4056 |  |  |         }
 | 
      
         | 4057 |  |  |  
 | 
      
         | 4058 |  |  |       return;
 | 
      
         | 4059 |  |  |     }
 | 
      
         | 4060 |  |  |  
 | 
      
         | 4061 |  |  |   /* If either side is still missing an equivalence, make it now,
 | 
      
         | 4062 |  |  |      then merge the equivalences.  */
 | 
      
         | 4063 |  |  |  
 | 
      
         | 4064 |  |  |   if (op0_elt == 0)
 | 
      
         | 4065 |  |  |     {
 | 
      
         | 4066 |  |  |       if (insert_regs (op0, NULL, 0))
 | 
      
         | 4067 |  |  |         {
 | 
      
         | 4068 |  |  |           rehash_using_reg (op0);
 | 
      
         | 4069 |  |  |           op0_hash = HASH (op0, mode);
 | 
      
         | 4070 |  |  |         }
 | 
      
         | 4071 |  |  |  
 | 
      
         | 4072 |  |  |       op0_elt = insert (op0, NULL, op0_hash, mode);
 | 
      
         | 4073 |  |  |       op0_elt->in_memory = op0_in_memory;
 | 
      
         | 4074 |  |  |     }
 | 
      
         | 4075 |  |  |  
 | 
      
         | 4076 |  |  |   if (op1_elt == 0)
 | 
      
         | 4077 |  |  |     {
 | 
      
         | 4078 |  |  |       if (insert_regs (op1, NULL, 0))
 | 
      
         | 4079 |  |  |         {
 | 
      
         | 4080 |  |  |           rehash_using_reg (op1);
 | 
      
         | 4081 |  |  |           op1_hash = HASH (op1, mode);
 | 
      
         | 4082 |  |  |         }
 | 
      
         | 4083 |  |  |  
 | 
      
         | 4084 |  |  |       op1_elt = insert (op1, NULL, op1_hash, mode);
 | 
      
         | 4085 |  |  |       op1_elt->in_memory = op1_in_memory;
 | 
      
         | 4086 |  |  |     }
 | 
      
         | 4087 |  |  |  
 | 
      
         | 4088 |  |  |   merge_equiv_classes (op0_elt, op1_elt);
 | 
      
         | 4089 |  |  | }
 | 
      
         | 4090 |  |  |  
 | 
      
         | 4091 |  |  | /* CSE processing for one instruction.
 | 
      
         | 4092 |  |  |    First simplify sources and addresses of all assignments
 | 
      
         | 4093 |  |  |    in the instruction, using previously-computed equivalents values.
 | 
      
         | 4094 |  |  |    Then install the new sources and destinations in the table
 | 
      
         | 4095 |  |  |    of available values.  */
 | 
      
         | 4096 |  |  |  
 | 
      
         | 4097 |  |  | /* Data on one SET contained in the instruction.  */
 | 
      
         | 4098 |  |  |  
 | 
      
         | 4099 |  |  | struct set
 | 
      
         | 4100 |  |  | {
 | 
      
         | 4101 |  |  |   /* The SET rtx itself.  */
 | 
      
         | 4102 |  |  |   rtx rtl;
 | 
      
         | 4103 |  |  |   /* The SET_SRC of the rtx (the original value, if it is changing).  */
 | 
      
         | 4104 |  |  |   rtx src;
 | 
      
         | 4105 |  |  |   /* The hash-table element for the SET_SRC of the SET.  */
 | 
      
         | 4106 |  |  |   struct table_elt *src_elt;
 | 
      
         | 4107 |  |  |   /* Hash value for the SET_SRC.  */
 | 
      
         | 4108 |  |  |   unsigned src_hash;
 | 
      
         | 4109 |  |  |   /* Hash value for the SET_DEST.  */
 | 
      
         | 4110 |  |  |   unsigned dest_hash;
 | 
      
         | 4111 |  |  |   /* The SET_DEST, with SUBREG, etc., stripped.  */
 | 
      
         | 4112 |  |  |   rtx inner_dest;
 | 
      
         | 4113 |  |  |   /* Nonzero if the SET_SRC is in memory.  */
 | 
      
         | 4114 |  |  |   char src_in_memory;
 | 
      
         | 4115 |  |  |   /* Nonzero if the SET_SRC contains something
 | 
      
         | 4116 |  |  |      whose value cannot be predicted and understood.  */
 | 
      
         | 4117 |  |  |   char src_volatile;
 | 
      
         | 4118 |  |  |   /* Original machine mode, in case it becomes a CONST_INT.
 | 
      
         | 4119 |  |  |      The size of this field should match the size of the mode
 | 
      
         | 4120 |  |  |      field of struct rtx_def (see rtl.h).  */
 | 
      
         | 4121 |  |  |   ENUM_BITFIELD(machine_mode) mode : 8;
 | 
      
         | 4122 |  |  |   /* A constant equivalent for SET_SRC, if any.  */
 | 
      
         | 4123 |  |  |   rtx src_const;
 | 
      
         | 4124 |  |  |   /* Hash value of constant equivalent for SET_SRC.  */
 | 
      
         | 4125 |  |  |   unsigned src_const_hash;
 | 
      
         | 4126 |  |  |   /* Table entry for constant equivalent for SET_SRC, if any.  */
 | 
      
         | 4127 |  |  |   struct table_elt *src_const_elt;
 | 
      
         | 4128 |  |  |   /* Table entry for the destination address.  */
 | 
      
         | 4129 |  |  |   struct table_elt *dest_addr_elt;
 | 
      
         | 4130 |  |  | };
 | 
      
         | 4131 |  |  |  
 | 
      
         | 4132 |  |  | static void
 | 
      
         | 4133 |  |  | cse_insn (rtx insn)
 | 
      
         | 4134 |  |  | {
 | 
      
         | 4135 |  |  |   rtx x = PATTERN (insn);
 | 
      
         | 4136 |  |  |   int i;
 | 
      
         | 4137 |  |  |   rtx tem;
 | 
      
         | 4138 |  |  |   int n_sets = 0;
 | 
      
         | 4139 |  |  |  
 | 
      
         | 4140 |  |  |   rtx src_eqv = 0;
 | 
      
         | 4141 |  |  |   struct table_elt *src_eqv_elt = 0;
 | 
      
         | 4142 |  |  |   int src_eqv_volatile = 0;
 | 
      
         | 4143 |  |  |   int src_eqv_in_memory = 0;
 | 
      
         | 4144 |  |  |   unsigned src_eqv_hash = 0;
 | 
      
         | 4145 |  |  |  
 | 
      
         | 4146 |  |  |   struct set *sets = (struct set *) 0;
 | 
      
         | 4147 |  |  |  
 | 
      
         | 4148 |  |  |   this_insn = insn;
 | 
      
         | 4149 |  |  | #ifdef HAVE_cc0
 | 
      
         | 4150 |  |  |   /* Records what this insn does to set CC0.  */
 | 
      
         | 4151 |  |  |   this_insn_cc0 = 0;
 | 
      
         | 4152 |  |  |   this_insn_cc0_mode = VOIDmode;
 | 
      
         | 4153 |  |  | #endif
 | 
      
         | 4154 |  |  |  
 | 
      
         | 4155 |  |  |   /* Find all the SETs and CLOBBERs in this instruction.
 | 
      
         | 4156 |  |  |      Record all the SETs in the array `set' and count them.
 | 
      
         | 4157 |  |  |      Also determine whether there is a CLOBBER that invalidates
 | 
      
         | 4158 |  |  |      all memory references, or all references at varying addresses.  */
 | 
      
         | 4159 |  |  |  
 | 
      
         | 4160 |  |  |   if (CALL_P (insn))
 | 
      
         | 4161 |  |  |     {
 | 
      
         | 4162 |  |  |       for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
 | 
      
         | 4163 |  |  |         {
 | 
      
         | 4164 |  |  |           if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
 | 
      
         | 4165 |  |  |             invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
 | 
      
         | 4166 |  |  |           XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
 | 
      
         | 4167 |  |  |         }
 | 
      
         | 4168 |  |  |     }
 | 
      
         | 4169 |  |  |  
 | 
      
         | 4170 |  |  |   if (GET_CODE (x) == SET)
 | 
      
         | 4171 |  |  |     {
 | 
      
         | 4172 |  |  |       sets = XALLOCA (struct set);
 | 
      
         | 4173 |  |  |       sets[0].rtl = x;
 | 
      
         | 4174 |  |  |  
 | 
      
         | 4175 |  |  |       /* Ignore SETs that are unconditional jumps.
 | 
      
         | 4176 |  |  |          They never need cse processing, so this does not hurt.
 | 
      
         | 4177 |  |  |          The reason is not efficiency but rather
 | 
      
         | 4178 |  |  |          so that we can test at the end for instructions
 | 
      
         | 4179 |  |  |          that have been simplified to unconditional jumps
 | 
      
         | 4180 |  |  |          and not be misled by unchanged instructions
 | 
      
         | 4181 |  |  |          that were unconditional jumps to begin with.  */
 | 
      
         | 4182 |  |  |       if (SET_DEST (x) == pc_rtx
 | 
      
         | 4183 |  |  |           && GET_CODE (SET_SRC (x)) == LABEL_REF)
 | 
      
         | 4184 |  |  |         ;
 | 
      
         | 4185 |  |  |  
 | 
      
         | 4186 |  |  |       /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
 | 
      
         | 4187 |  |  |          The hard function value register is used only once, to copy to
 | 
      
         | 4188 |  |  |          someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
 | 
      
         | 4189 |  |  |          Ensure we invalidate the destination register.  On the 80386 no
 | 
      
         | 4190 |  |  |          other code would invalidate it since it is a fixed_reg.
 | 
      
         | 4191 |  |  |          We need not check the return of apply_change_group; see canon_reg.  */
 | 
      
         | 4192 |  |  |  
 | 
      
         | 4193 |  |  |       else if (GET_CODE (SET_SRC (x)) == CALL)
 | 
      
         | 4194 |  |  |         {
 | 
      
         | 4195 |  |  |           canon_reg (SET_SRC (x), insn);
 | 
      
         | 4196 |  |  |           apply_change_group ();
 | 
      
         | 4197 |  |  |           fold_rtx (SET_SRC (x), insn);
 | 
      
         | 4198 |  |  |           invalidate (SET_DEST (x), VOIDmode);
 | 
      
         | 4199 |  |  |         }
 | 
      
         | 4200 |  |  |       else
 | 
      
         | 4201 |  |  |         n_sets = 1;
 | 
      
         | 4202 |  |  |     }
 | 
      
         | 4203 |  |  |   else if (GET_CODE (x) == PARALLEL)
 | 
      
         | 4204 |  |  |     {
 | 
      
         | 4205 |  |  |       int lim = XVECLEN (x, 0);
 | 
      
         | 4206 |  |  |  
 | 
      
         | 4207 |  |  |       sets = XALLOCAVEC (struct set, lim);
 | 
      
         | 4208 |  |  |  
 | 
      
         | 4209 |  |  |       /* Find all regs explicitly clobbered in this insn,
 | 
      
         | 4210 |  |  |          and ensure they are not replaced with any other regs
 | 
      
         | 4211 |  |  |          elsewhere in this insn.
 | 
      
         | 4212 |  |  |          When a reg that is clobbered is also used for input,
 | 
      
         | 4213 |  |  |          we should presume that that is for a reason,
 | 
      
         | 4214 |  |  |          and we should not substitute some other register
 | 
      
         | 4215 |  |  |          which is not supposed to be clobbered.
 | 
      
         | 4216 |  |  |          Therefore, this loop cannot be merged into the one below
 | 
      
         | 4217 |  |  |          because a CALL may precede a CLOBBER and refer to the
 | 
      
         | 4218 |  |  |          value clobbered.  We must not let a canonicalization do
 | 
      
         | 4219 |  |  |          anything in that case.  */
 | 
      
         | 4220 |  |  |       for (i = 0; i < lim; i++)
 | 
      
         | 4221 |  |  |         {
 | 
      
         | 4222 |  |  |           rtx y = XVECEXP (x, 0, i);
 | 
      
         | 4223 |  |  |           if (GET_CODE (y) == CLOBBER)
 | 
      
         | 4224 |  |  |             {
 | 
      
         | 4225 |  |  |               rtx clobbered = XEXP (y, 0);
 | 
      
         | 4226 |  |  |  
 | 
      
         | 4227 |  |  |               if (REG_P (clobbered)
 | 
      
         | 4228 |  |  |                   || GET_CODE (clobbered) == SUBREG)
 | 
      
         | 4229 |  |  |                 invalidate (clobbered, VOIDmode);
 | 
      
         | 4230 |  |  |               else if (GET_CODE (clobbered) == STRICT_LOW_PART
 | 
      
         | 4231 |  |  |                        || GET_CODE (clobbered) == ZERO_EXTRACT)
 | 
      
         | 4232 |  |  |                 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
 | 
      
         | 4233 |  |  |             }
 | 
      
         | 4234 |  |  |         }
 | 
      
         | 4235 |  |  |  
 | 
      
         | 4236 |  |  |       for (i = 0; i < lim; i++)
 | 
      
         | 4237 |  |  |         {
 | 
      
         | 4238 |  |  |           rtx y = XVECEXP (x, 0, i);
 | 
      
         | 4239 |  |  |           if (GET_CODE (y) == SET)
 | 
      
         | 4240 |  |  |             {
 | 
      
         | 4241 |  |  |               /* As above, we ignore unconditional jumps and call-insns and
 | 
      
         | 4242 |  |  |                  ignore the result of apply_change_group.  */
 | 
      
         | 4243 |  |  |               if (GET_CODE (SET_SRC (y)) == CALL)
 | 
      
         | 4244 |  |  |                 {
 | 
      
         | 4245 |  |  |                   canon_reg (SET_SRC (y), insn);
 | 
      
         | 4246 |  |  |                   apply_change_group ();
 | 
      
         | 4247 |  |  |                   fold_rtx (SET_SRC (y), insn);
 | 
      
         | 4248 |  |  |                   invalidate (SET_DEST (y), VOIDmode);
 | 
      
         | 4249 |  |  |                 }
 | 
      
         | 4250 |  |  |               else if (SET_DEST (y) == pc_rtx
 | 
      
         | 4251 |  |  |                        && GET_CODE (SET_SRC (y)) == LABEL_REF)
 | 
      
         | 4252 |  |  |                 ;
 | 
      
         | 4253 |  |  |               else
 | 
      
         | 4254 |  |  |                 sets[n_sets++].rtl = y;
 | 
      
         | 4255 |  |  |             }
 | 
      
         | 4256 |  |  |           else if (GET_CODE (y) == CLOBBER)
 | 
      
         | 4257 |  |  |             {
 | 
      
         | 4258 |  |  |               /* If we clobber memory, canon the address.
 | 
      
         | 4259 |  |  |                  This does nothing when a register is clobbered
 | 
      
         | 4260 |  |  |                  because we have already invalidated the reg.  */
 | 
      
         | 4261 |  |  |               if (MEM_P (XEXP (y, 0)))
 | 
      
         | 4262 |  |  |                 canon_reg (XEXP (y, 0), insn);
 | 
      
         | 4263 |  |  |             }
 | 
      
         | 4264 |  |  |           else if (GET_CODE (y) == USE
 | 
      
         | 4265 |  |  |                    && ! (REG_P (XEXP (y, 0))
 | 
      
         | 4266 |  |  |                          && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
 | 
      
         | 4267 |  |  |             canon_reg (y, insn);
 | 
      
         | 4268 |  |  |           else if (GET_CODE (y) == CALL)
 | 
      
         | 4269 |  |  |             {
 | 
      
         | 4270 |  |  |               /* The result of apply_change_group can be ignored; see
 | 
      
         | 4271 |  |  |                  canon_reg.  */
 | 
      
         | 4272 |  |  |               canon_reg (y, insn);
 | 
      
         | 4273 |  |  |               apply_change_group ();
 | 
      
         | 4274 |  |  |               fold_rtx (y, insn);
 | 
      
         | 4275 |  |  |             }
 | 
      
         | 4276 |  |  |         }
 | 
      
         | 4277 |  |  |     }
 | 
      
         | 4278 |  |  |   else if (GET_CODE (x) == CLOBBER)
 | 
      
         | 4279 |  |  |     {
 | 
      
         | 4280 |  |  |       if (MEM_P (XEXP (x, 0)))
 | 
      
         | 4281 |  |  |         canon_reg (XEXP (x, 0), insn);
 | 
      
         | 4282 |  |  |     }
 | 
      
         | 4283 |  |  |   /* Canonicalize a USE of a pseudo register or memory location.  */
 | 
      
         | 4284 |  |  |   else if (GET_CODE (x) == USE
 | 
      
         | 4285 |  |  |            && ! (REG_P (XEXP (x, 0))
 | 
      
         | 4286 |  |  |                  && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
 | 
      
         | 4287 |  |  |     canon_reg (x, insn);
 | 
      
         | 4288 |  |  |   else if (GET_CODE (x) == ASM_OPERANDS)
 | 
      
         | 4289 |  |  |     {
 | 
      
         | 4290 |  |  |       for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
 | 
      
         | 4291 |  |  |         {
 | 
      
         | 4292 |  |  |           rtx input = ASM_OPERANDS_INPUT (x, i);
 | 
      
         | 4293 |  |  |           if (!(REG_P (input) && REGNO (input) < FIRST_PSEUDO_REGISTER))
 | 
      
         | 4294 |  |  |             {
 | 
      
         | 4295 |  |  |               input = canon_reg (input, insn);
 | 
      
         | 4296 |  |  |               validate_change (insn, &ASM_OPERANDS_INPUT (x, i), input, 1);
 | 
      
         | 4297 |  |  |             }
 | 
      
         | 4298 |  |  |         }
 | 
      
         | 4299 |  |  |     }
 | 
      
         | 4300 |  |  |   else if (GET_CODE (x) == CALL)
 | 
      
         | 4301 |  |  |     {
 | 
      
         | 4302 |  |  |       /* The result of apply_change_group can be ignored; see canon_reg.  */
 | 
      
         | 4303 |  |  |       canon_reg (x, insn);
 | 
      
         | 4304 |  |  |       apply_change_group ();
 | 
      
         | 4305 |  |  |       fold_rtx (x, insn);
 | 
      
         | 4306 |  |  |     }
 | 
      
         | 4307 |  |  |   else if (DEBUG_INSN_P (insn))
 | 
      
         | 4308 |  |  |     canon_reg (PATTERN (insn), insn);
 | 
      
         | 4309 |  |  |  
 | 
      
         | 4310 |  |  |   /* Store the equivalent value in SRC_EQV, if different, or if the DEST
 | 
      
         | 4311 |  |  |      is a STRICT_LOW_PART.  The latter condition is necessary because SRC_EQV
 | 
      
         | 4312 |  |  |      is handled specially for this case, and if it isn't set, then there will
 | 
      
         | 4313 |  |  |      be no equivalence for the destination.  */
 | 
      
         | 4314 |  |  |   if (n_sets == 1 && REG_NOTES (insn) != 0
 | 
      
         | 4315 |  |  |       && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
 | 
      
         | 4316 |  |  |       && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
 | 
      
         | 4317 |  |  |           || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
 | 
      
         | 4318 |  |  |     {
 | 
      
         | 4319 |  |  |       /* The result of apply_change_group can be ignored; see canon_reg.  */
 | 
      
         | 4320 |  |  |       canon_reg (XEXP (tem, 0), insn);
 | 
      
         | 4321 |  |  |       apply_change_group ();
 | 
      
         | 4322 |  |  |       src_eqv = fold_rtx (XEXP (tem, 0), insn);
 | 
      
         | 4323 |  |  |       XEXP (tem, 0) = copy_rtx (src_eqv);
 | 
      
         | 4324 |  |  |       df_notes_rescan (insn);
 | 
      
         | 4325 |  |  |     }
 | 
      
         | 4326 |  |  |  
 | 
      
         | 4327 |  |  |   /* Canonicalize sources and addresses of destinations.
 | 
      
         | 4328 |  |  |      We do this in a separate pass to avoid problems when a MATCH_DUP is
 | 
      
         | 4329 |  |  |      present in the insn pattern.  In that case, we want to ensure that
 | 
      
         | 4330 |  |  |      we don't break the duplicate nature of the pattern.  So we will replace
 | 
      
         | 4331 |  |  |      both operands at the same time.  Otherwise, we would fail to find an
 | 
      
         | 4332 |  |  |      equivalent substitution in the loop calling validate_change below.
 | 
      
         | 4333 |  |  |  
 | 
      
         | 4334 |  |  |      We used to suppress canonicalization of DEST if it appears in SRC,
 | 
      
         | 4335 |  |  |      but we don't do this any more.  */
 | 
      
         | 4336 |  |  |  
 | 
      
         | 4337 |  |  |   for (i = 0; i < n_sets; i++)
 | 
      
         | 4338 |  |  |     {
 | 
      
         | 4339 |  |  |       rtx dest = SET_DEST (sets[i].rtl);
 | 
      
         | 4340 |  |  |       rtx src = SET_SRC (sets[i].rtl);
 | 
      
         | 4341 |  |  |       rtx new_rtx = canon_reg (src, insn);
 | 
      
         | 4342 |  |  |  
 | 
      
         | 4343 |  |  |       validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
 | 
      
         | 4344 |  |  |  
 | 
      
         | 4345 |  |  |       if (GET_CODE (dest) == ZERO_EXTRACT)
 | 
      
         | 4346 |  |  |         {
 | 
      
         | 4347 |  |  |           validate_change (insn, &XEXP (dest, 1),
 | 
      
         | 4348 |  |  |                            canon_reg (XEXP (dest, 1), insn), 1);
 | 
      
         | 4349 |  |  |           validate_change (insn, &XEXP (dest, 2),
 | 
      
         | 4350 |  |  |                            canon_reg (XEXP (dest, 2), insn), 1);
 | 
      
         | 4351 |  |  |         }
 | 
      
         | 4352 |  |  |  
 | 
      
         | 4353 |  |  |       while (GET_CODE (dest) == SUBREG
 | 
      
         | 4354 |  |  |              || GET_CODE (dest) == ZERO_EXTRACT
 | 
      
         | 4355 |  |  |              || GET_CODE (dest) == STRICT_LOW_PART)
 | 
      
         | 4356 |  |  |         dest = XEXP (dest, 0);
 | 
      
         | 4357 |  |  |  
 | 
      
         | 4358 |  |  |       if (MEM_P (dest))
 | 
      
         | 4359 |  |  |         canon_reg (dest, insn);
 | 
      
         | 4360 |  |  |     }
 | 
      
         | 4361 |  |  |  
 | 
      
         | 4362 |  |  |   /* Now that we have done all the replacements, we can apply the change
 | 
      
         | 4363 |  |  |      group and see if they all work.  Note that this will cause some
 | 
      
         | 4364 |  |  |      canonicalizations that would have worked individually not to be applied
 | 
      
         | 4365 |  |  |      because some other canonicalization didn't work, but this should not
 | 
      
         | 4366 |  |  |      occur often.
 | 
      
         | 4367 |  |  |  
 | 
      
         | 4368 |  |  |      The result of apply_change_group can be ignored; see canon_reg.  */
 | 
      
         | 4369 |  |  |  
 | 
      
         | 4370 |  |  |   apply_change_group ();
 | 
      
         | 4371 |  |  |  
 | 
      
         | 4372 |  |  |   /* Set sets[i].src_elt to the class each source belongs to.
 | 
      
         | 4373 |  |  |      Detect assignments from or to volatile things
 | 
      
         | 4374 |  |  |      and set set[i] to zero so they will be ignored
 | 
      
         | 4375 |  |  |      in the rest of this function.
 | 
      
         | 4376 |  |  |  
 | 
      
         | 4377 |  |  |      Nothing in this loop changes the hash table or the register chains.  */
 | 
      
         | 4378 |  |  |  
 | 
      
         | 4379 |  |  |   for (i = 0; i < n_sets; i++)
 | 
      
         | 4380 |  |  |     {
 | 
      
         | 4381 |  |  |       bool repeat = false;
 | 
      
         | 4382 |  |  |       rtx src, dest;
 | 
      
         | 4383 |  |  |       rtx src_folded;
 | 
      
         | 4384 |  |  |       struct table_elt *elt = 0, *p;
 | 
      
         | 4385 |  |  |       enum machine_mode mode;
 | 
      
         | 4386 |  |  |       rtx src_eqv_here;
 | 
      
         | 4387 |  |  |       rtx src_const = 0;
 | 
      
         | 4388 |  |  |       rtx src_related = 0;
 | 
      
         | 4389 |  |  |       bool src_related_is_const_anchor = false;
 | 
      
         | 4390 |  |  |       struct table_elt *src_const_elt = 0;
 | 
      
         | 4391 |  |  |       int src_cost = MAX_COST;
 | 
      
         | 4392 |  |  |       int src_eqv_cost = MAX_COST;
 | 
      
         | 4393 |  |  |       int src_folded_cost = MAX_COST;
 | 
      
         | 4394 |  |  |       int src_related_cost = MAX_COST;
 | 
      
         | 4395 |  |  |       int src_elt_cost = MAX_COST;
 | 
      
         | 4396 |  |  |       int src_regcost = MAX_COST;
 | 
      
         | 4397 |  |  |       int src_eqv_regcost = MAX_COST;
 | 
      
         | 4398 |  |  |       int src_folded_regcost = MAX_COST;
 | 
      
         | 4399 |  |  |       int src_related_regcost = MAX_COST;
 | 
      
         | 4400 |  |  |       int src_elt_regcost = MAX_COST;
 | 
      
         | 4401 |  |  |       /* Set nonzero if we need to call force_const_mem on with the
 | 
      
         | 4402 |  |  |          contents of src_folded before using it.  */
 | 
      
         | 4403 |  |  |       int src_folded_force_flag = 0;
 | 
      
         | 4404 |  |  |  
 | 
      
         | 4405 |  |  |       dest = SET_DEST (sets[i].rtl);
 | 
      
         | 4406 |  |  |       src = SET_SRC (sets[i].rtl);
 | 
      
         | 4407 |  |  |  
 | 
      
         | 4408 |  |  |       /* If SRC is a constant that has no machine mode,
 | 
      
         | 4409 |  |  |          hash it with the destination's machine mode.
 | 
      
         | 4410 |  |  |          This way we can keep different modes separate.  */
 | 
      
         | 4411 |  |  |  
 | 
      
         | 4412 |  |  |       mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
 | 
      
         | 4413 |  |  |       sets[i].mode = mode;
 | 
      
         | 4414 |  |  |  
 | 
      
         | 4415 |  |  |       if (src_eqv)
 | 
      
         | 4416 |  |  |         {
 | 
      
         | 4417 |  |  |           enum machine_mode eqvmode = mode;
 | 
      
         | 4418 |  |  |           if (GET_CODE (dest) == STRICT_LOW_PART)
 | 
      
         | 4419 |  |  |             eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
 | 
      
         | 4420 |  |  |           do_not_record = 0;
 | 
      
         | 4421 |  |  |           hash_arg_in_memory = 0;
 | 
      
         | 4422 |  |  |           src_eqv_hash = HASH (src_eqv, eqvmode);
 | 
      
         | 4423 |  |  |  
 | 
      
         | 4424 |  |  |           /* Find the equivalence class for the equivalent expression.  */
 | 
      
         | 4425 |  |  |  
 | 
      
         | 4426 |  |  |           if (!do_not_record)
 | 
      
         | 4427 |  |  |             src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
 | 
      
         | 4428 |  |  |  
 | 
      
         | 4429 |  |  |           src_eqv_volatile = do_not_record;
 | 
      
         | 4430 |  |  |           src_eqv_in_memory = hash_arg_in_memory;
 | 
      
         | 4431 |  |  |         }
 | 
      
         | 4432 |  |  |  
 | 
      
         | 4433 |  |  |       /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
 | 
      
         | 4434 |  |  |          value of the INNER register, not the destination.  So it is not
 | 
      
         | 4435 |  |  |          a valid substitution for the source.  But save it for later.  */
 | 
      
         | 4436 |  |  |       if (GET_CODE (dest) == STRICT_LOW_PART)
 | 
      
         | 4437 |  |  |         src_eqv_here = 0;
 | 
      
         | 4438 |  |  |       else
 | 
      
         | 4439 |  |  |         src_eqv_here = src_eqv;
 | 
      
         | 4440 |  |  |  
 | 
      
         | 4441 |  |  |       /* Simplify and foldable subexpressions in SRC.  Then get the fully-
 | 
      
         | 4442 |  |  |          simplified result, which may not necessarily be valid.  */
 | 
      
         | 4443 |  |  |       src_folded = fold_rtx (src, insn);
 | 
      
         | 4444 |  |  |  
 | 
      
         | 4445 |  |  | #if 0
 | 
      
         | 4446 |  |  |       /* ??? This caused bad code to be generated for the m68k port with -O2.
 | 
      
         | 4447 |  |  |          Suppose src is (CONST_INT -1), and that after truncation src_folded
 | 
      
         | 4448 |  |  |          is (CONST_INT 3).  Suppose src_folded is then used for src_const.
 | 
      
         | 4449 |  |  |          At the end we will add src and src_const to the same equivalence
 | 
      
         | 4450 |  |  |          class.  We now have 3 and -1 on the same equivalence class.  This
 | 
      
         | 4451 |  |  |          causes later instructions to be mis-optimized.  */
 | 
      
         | 4452 |  |  |       /* If storing a constant in a bitfield, pre-truncate the constant
 | 
      
         | 4453 |  |  |          so we will be able to record it later.  */
 | 
      
         | 4454 |  |  |       if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
 | 
      
         | 4455 |  |  |         {
 | 
      
         | 4456 |  |  |           rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
 | 
      
         | 4457 |  |  |  
 | 
      
         | 4458 |  |  |           if (CONST_INT_P (src)
 | 
      
         | 4459 |  |  |               && CONST_INT_P (width)
 | 
      
         | 4460 |  |  |               && INTVAL (width) < HOST_BITS_PER_WIDE_INT
 | 
      
         | 4461 |  |  |               && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
 | 
      
         | 4462 |  |  |             src_folded
 | 
      
         | 4463 |  |  |               = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
 | 
      
         | 4464 |  |  |                                           << INTVAL (width)) - 1));
 | 
      
         | 4465 |  |  |         }
 | 
      
         | 4466 |  |  | #endif
 | 
      
         | 4467 |  |  |  
 | 
      
         | 4468 |  |  |       /* Compute SRC's hash code, and also notice if it
 | 
      
         | 4469 |  |  |          should not be recorded at all.  In that case,
 | 
      
         | 4470 |  |  |          prevent any further processing of this assignment.  */
 | 
      
         | 4471 |  |  |       do_not_record = 0;
 | 
      
         | 4472 |  |  |       hash_arg_in_memory = 0;
 | 
      
         | 4473 |  |  |  
 | 
      
         | 4474 |  |  |       sets[i].src = src;
 | 
      
         | 4475 |  |  |       sets[i].src_hash = HASH (src, mode);
 | 
      
         | 4476 |  |  |       sets[i].src_volatile = do_not_record;
 | 
      
         | 4477 |  |  |       sets[i].src_in_memory = hash_arg_in_memory;
 | 
      
         | 4478 |  |  |  
 | 
      
         | 4479 |  |  |       /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
 | 
      
         | 4480 |  |  |          a pseudo, do not record SRC.  Using SRC as a replacement for
 | 
      
         | 4481 |  |  |          anything else will be incorrect in that situation.  Note that
 | 
      
         | 4482 |  |  |          this usually occurs only for stack slots, in which case all the
 | 
      
         | 4483 |  |  |          RTL would be referring to SRC, so we don't lose any optimization
 | 
      
         | 4484 |  |  |          opportunities by not having SRC in the hash table.  */
 | 
      
         | 4485 |  |  |  
 | 
      
         | 4486 |  |  |       if (MEM_P (src)
 | 
      
         | 4487 |  |  |           && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
 | 
      
         | 4488 |  |  |           && REG_P (dest)
 | 
      
         | 4489 |  |  |           && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
 | 
      
         | 4490 |  |  |         sets[i].src_volatile = 1;
 | 
      
         | 4491 |  |  |  
 | 
      
         | 4492 |  |  | #if 0
 | 
      
         | 4493 |  |  |       /* It is no longer clear why we used to do this, but it doesn't
 | 
      
         | 4494 |  |  |          appear to still be needed.  So let's try without it since this
 | 
      
         | 4495 |  |  |          code hurts cse'ing widened ops.  */
 | 
      
         | 4496 |  |  |       /* If source is a paradoxical subreg (such as QI treated as an SI),
 | 
      
         | 4497 |  |  |          treat it as volatile.  It may do the work of an SI in one context
 | 
      
         | 4498 |  |  |          where the extra bits are not being used, but cannot replace an SI
 | 
      
         | 4499 |  |  |          in general.  */
 | 
      
         | 4500 |  |  |       if (paradoxical_subreg_p (src))
 | 
      
         | 4501 |  |  |         sets[i].src_volatile = 1;
 | 
      
         | 4502 |  |  | #endif
 | 
      
         | 4503 |  |  |  
 | 
      
         | 4504 |  |  |       /* Locate all possible equivalent forms for SRC.  Try to replace
 | 
      
         | 4505 |  |  |          SRC in the insn with each cheaper equivalent.
 | 
      
         | 4506 |  |  |  
 | 
      
         | 4507 |  |  |          We have the following types of equivalents: SRC itself, a folded
 | 
      
         | 4508 |  |  |          version, a value given in a REG_EQUAL note, or a value related
 | 
      
         | 4509 |  |  |          to a constant.
 | 
      
         | 4510 |  |  |  
 | 
      
         | 4511 |  |  |          Each of these equivalents may be part of an additional class
 | 
      
         | 4512 |  |  |          of equivalents (if more than one is in the table, they must be in
 | 
      
         | 4513 |  |  |          the same class; we check for this).
 | 
      
         | 4514 |  |  |  
 | 
      
         | 4515 |  |  |          If the source is volatile, we don't do any table lookups.
 | 
      
         | 4516 |  |  |  
 | 
      
         | 4517 |  |  |          We note any constant equivalent for possible later use in a
 | 
      
         | 4518 |  |  |          REG_NOTE.  */
 | 
      
         | 4519 |  |  |  
 | 
      
         | 4520 |  |  |       if (!sets[i].src_volatile)
 | 
      
         | 4521 |  |  |         elt = lookup (src, sets[i].src_hash, mode);
 | 
      
         | 4522 |  |  |  
 | 
      
         | 4523 |  |  |       sets[i].src_elt = elt;
 | 
      
         | 4524 |  |  |  
 | 
      
         | 4525 |  |  |       if (elt && src_eqv_here && src_eqv_elt)
 | 
      
         | 4526 |  |  |         {
 | 
      
         | 4527 |  |  |           if (elt->first_same_value != src_eqv_elt->first_same_value)
 | 
      
         | 4528 |  |  |             {
 | 
      
         | 4529 |  |  |               /* The REG_EQUAL is indicating that two formerly distinct
 | 
      
         | 4530 |  |  |                  classes are now equivalent.  So merge them.  */
 | 
      
         | 4531 |  |  |               merge_equiv_classes (elt, src_eqv_elt);
 | 
      
         | 4532 |  |  |               src_eqv_hash = HASH (src_eqv, elt->mode);
 | 
      
         | 4533 |  |  |               src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
 | 
      
         | 4534 |  |  |             }
 | 
      
         | 4535 |  |  |  
 | 
      
         | 4536 |  |  |           src_eqv_here = 0;
 | 
      
         | 4537 |  |  |         }
 | 
      
         | 4538 |  |  |  
 | 
      
         | 4539 |  |  |       else if (src_eqv_elt)
 | 
      
         | 4540 |  |  |         elt = src_eqv_elt;
 | 
      
         | 4541 |  |  |  
 | 
      
         | 4542 |  |  |       /* Try to find a constant somewhere and record it in `src_const'.
 | 
      
         | 4543 |  |  |          Record its table element, if any, in `src_const_elt'.  Look in
 | 
      
         | 4544 |  |  |          any known equivalences first.  (If the constant is not in the
 | 
      
         | 4545 |  |  |          table, also set `sets[i].src_const_hash').  */
 | 
      
         | 4546 |  |  |       if (elt)
 | 
      
         | 4547 |  |  |         for (p = elt->first_same_value; p; p = p->next_same_value)
 | 
      
         | 4548 |  |  |           if (p->is_const)
 | 
      
         | 4549 |  |  |             {
 | 
      
         | 4550 |  |  |               src_const = p->exp;
 | 
      
         | 4551 |  |  |               src_const_elt = elt;
 | 
      
         | 4552 |  |  |               break;
 | 
      
         | 4553 |  |  |             }
 | 
      
         | 4554 |  |  |  
 | 
      
         | 4555 |  |  |       if (src_const == 0
 | 
      
         | 4556 |  |  |           && (CONSTANT_P (src_folded)
 | 
      
         | 4557 |  |  |               /* Consider (minus (label_ref L1) (label_ref L2)) as
 | 
      
         | 4558 |  |  |                  "constant" here so we will record it. This allows us
 | 
      
         | 4559 |  |  |                  to fold switch statements when an ADDR_DIFF_VEC is used.  */
 | 
      
         | 4560 |  |  |               || (GET_CODE (src_folded) == MINUS
 | 
      
         | 4561 |  |  |                   && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
 | 
      
         | 4562 |  |  |                   && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
 | 
      
         | 4563 |  |  |         src_const = src_folded, src_const_elt = elt;
 | 
      
         | 4564 |  |  |       else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
 | 
      
         | 4565 |  |  |         src_const = src_eqv_here, src_const_elt = src_eqv_elt;
 | 
      
         | 4566 |  |  |  
 | 
      
         | 4567 |  |  |       /* If we don't know if the constant is in the table, get its
 | 
      
         | 4568 |  |  |          hash code and look it up.  */
 | 
      
         | 4569 |  |  |       if (src_const && src_const_elt == 0)
 | 
      
         | 4570 |  |  |         {
 | 
      
         | 4571 |  |  |           sets[i].src_const_hash = HASH (src_const, mode);
 | 
      
         | 4572 |  |  |           src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
 | 
      
         | 4573 |  |  |         }
 | 
      
         | 4574 |  |  |  
 | 
      
         | 4575 |  |  |       sets[i].src_const = src_const;
 | 
      
         | 4576 |  |  |       sets[i].src_const_elt = src_const_elt;
 | 
      
         | 4577 |  |  |  
 | 
      
         | 4578 |  |  |       /* If the constant and our source are both in the table, mark them as
 | 
      
         | 4579 |  |  |          equivalent.  Otherwise, if a constant is in the table but the source
 | 
      
         | 4580 |  |  |          isn't, set ELT to it.  */
 | 
      
         | 4581 |  |  |       if (src_const_elt && elt
 | 
      
         | 4582 |  |  |           && src_const_elt->first_same_value != elt->first_same_value)
 | 
      
         | 4583 |  |  |         merge_equiv_classes (elt, src_const_elt);
 | 
      
         | 4584 |  |  |       else if (src_const_elt && elt == 0)
 | 
      
         | 4585 |  |  |         elt = src_const_elt;
 | 
      
         | 4586 |  |  |  
 | 
      
         | 4587 |  |  |       /* See if there is a register linearly related to a constant
 | 
      
         | 4588 |  |  |          equivalent of SRC.  */
 | 
      
         | 4589 |  |  |       if (src_const
 | 
      
         | 4590 |  |  |           && (GET_CODE (src_const) == CONST
 | 
      
         | 4591 |  |  |               || (src_const_elt && src_const_elt->related_value != 0)))
 | 
      
         | 4592 |  |  |         {
 | 
      
         | 4593 |  |  |           src_related = use_related_value (src_const, src_const_elt);
 | 
      
         | 4594 |  |  |           if (src_related)
 | 
      
         | 4595 |  |  |             {
 | 
      
         | 4596 |  |  |               struct table_elt *src_related_elt
 | 
      
         | 4597 |  |  |                 = lookup (src_related, HASH (src_related, mode), mode);
 | 
      
         | 4598 |  |  |               if (src_related_elt && elt)
 | 
      
         | 4599 |  |  |                 {
 | 
      
         | 4600 |  |  |                   if (elt->first_same_value
 | 
      
         | 4601 |  |  |                       != src_related_elt->first_same_value)
 | 
      
         | 4602 |  |  |                     /* This can occur when we previously saw a CONST
 | 
      
         | 4603 |  |  |                        involving a SYMBOL_REF and then see the SYMBOL_REF
 | 
      
         | 4604 |  |  |                        twice.  Merge the involved classes.  */
 | 
      
         | 4605 |  |  |                     merge_equiv_classes (elt, src_related_elt);
 | 
      
         | 4606 |  |  |  
 | 
      
         | 4607 |  |  |                   src_related = 0;
 | 
      
         | 4608 |  |  |                   src_related_elt = 0;
 | 
      
         | 4609 |  |  |                 }
 | 
      
         | 4610 |  |  |               else if (src_related_elt && elt == 0)
 | 
      
         | 4611 |  |  |                 elt = src_related_elt;
 | 
      
         | 4612 |  |  |             }
 | 
      
         | 4613 |  |  |         }
 | 
      
         | 4614 |  |  |  
 | 
      
         | 4615 |  |  |       /* See if we have a CONST_INT that is already in a register in a
 | 
      
         | 4616 |  |  |          wider mode.  */
 | 
      
         | 4617 |  |  |  
 | 
      
         | 4618 |  |  |       if (src_const && src_related == 0 && CONST_INT_P (src_const)
 | 
      
         | 4619 |  |  |           && GET_MODE_CLASS (mode) == MODE_INT
 | 
      
         | 4620 |  |  |           && GET_MODE_PRECISION (mode) < BITS_PER_WORD)
 | 
      
         | 4621 |  |  |         {
 | 
      
         | 4622 |  |  |           enum machine_mode wider_mode;
 | 
      
         | 4623 |  |  |  
 | 
      
         | 4624 |  |  |           for (wider_mode = GET_MODE_WIDER_MODE (mode);
 | 
      
         | 4625 |  |  |                wider_mode != VOIDmode
 | 
      
         | 4626 |  |  |                && GET_MODE_PRECISION (wider_mode) <= BITS_PER_WORD
 | 
      
         | 4627 |  |  |                && src_related == 0;
 | 
      
         | 4628 |  |  |                wider_mode = GET_MODE_WIDER_MODE (wider_mode))
 | 
      
         | 4629 |  |  |             {
 | 
      
         | 4630 |  |  |               struct table_elt *const_elt
 | 
      
         | 4631 |  |  |                 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
 | 
      
         | 4632 |  |  |  
 | 
      
         | 4633 |  |  |               if (const_elt == 0)
 | 
      
         | 4634 |  |  |                 continue;
 | 
      
         | 4635 |  |  |  
 | 
      
         | 4636 |  |  |               for (const_elt = const_elt->first_same_value;
 | 
      
         | 4637 |  |  |                    const_elt; const_elt = const_elt->next_same_value)
 | 
      
         | 4638 |  |  |                 if (REG_P (const_elt->exp))
 | 
      
         | 4639 |  |  |                   {
 | 
      
         | 4640 |  |  |                     src_related = gen_lowpart (mode, const_elt->exp);
 | 
      
         | 4641 |  |  |                     break;
 | 
      
         | 4642 |  |  |                   }
 | 
      
         | 4643 |  |  |             }
 | 
      
         | 4644 |  |  |         }
 | 
      
         | 4645 |  |  |  
 | 
      
         | 4646 |  |  |       /* Another possibility is that we have an AND with a constant in
 | 
      
         | 4647 |  |  |          a mode narrower than a word.  If so, it might have been generated
 | 
      
         | 4648 |  |  |          as part of an "if" which would narrow the AND.  If we already
 | 
      
         | 4649 |  |  |          have done the AND in a wider mode, we can use a SUBREG of that
 | 
      
         | 4650 |  |  |          value.  */
 | 
      
         | 4651 |  |  |  
 | 
      
         | 4652 |  |  |       if (flag_expensive_optimizations && ! src_related
 | 
      
         | 4653 |  |  |           && GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1))
 | 
      
         | 4654 |  |  |           && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
 | 
      
         | 4655 |  |  |         {
 | 
      
         | 4656 |  |  |           enum machine_mode tmode;
 | 
      
         | 4657 |  |  |           rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
 | 
      
         | 4658 |  |  |  
 | 
      
         | 4659 |  |  |           for (tmode = GET_MODE_WIDER_MODE (mode);
 | 
      
         | 4660 |  |  |                GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
 | 
      
         | 4661 |  |  |                tmode = GET_MODE_WIDER_MODE (tmode))
 | 
      
         | 4662 |  |  |             {
 | 
      
         | 4663 |  |  |               rtx inner = gen_lowpart (tmode, XEXP (src, 0));
 | 
      
         | 4664 |  |  |               struct table_elt *larger_elt;
 | 
      
         | 4665 |  |  |  
 | 
      
         | 4666 |  |  |               if (inner)
 | 
      
         | 4667 |  |  |                 {
 | 
      
         | 4668 |  |  |                   PUT_MODE (new_and, tmode);
 | 
      
         | 4669 |  |  |                   XEXP (new_and, 0) = inner;
 | 
      
         | 4670 |  |  |                   larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
 | 
      
         | 4671 |  |  |                   if (larger_elt == 0)
 | 
      
         | 4672 |  |  |                     continue;
 | 
      
         | 4673 |  |  |  
 | 
      
         | 4674 |  |  |                   for (larger_elt = larger_elt->first_same_value;
 | 
      
         | 4675 |  |  |                        larger_elt; larger_elt = larger_elt->next_same_value)
 | 
      
         | 4676 |  |  |                     if (REG_P (larger_elt->exp))
 | 
      
         | 4677 |  |  |                       {
 | 
      
         | 4678 |  |  |                         src_related
 | 
      
         | 4679 |  |  |                           = gen_lowpart (mode, larger_elt->exp);
 | 
      
         | 4680 |  |  |                         break;
 | 
      
         | 4681 |  |  |                       }
 | 
      
         | 4682 |  |  |  
 | 
      
         | 4683 |  |  |                   if (src_related)
 | 
      
         | 4684 |  |  |                     break;
 | 
      
         | 4685 |  |  |                 }
 | 
      
         | 4686 |  |  |             }
 | 
      
         | 4687 |  |  |         }
 | 
      
         | 4688 |  |  |  
 | 
      
         | 4689 |  |  | #ifdef LOAD_EXTEND_OP
 | 
      
         | 4690 |  |  |       /* See if a MEM has already been loaded with a widening operation;
 | 
      
         | 4691 |  |  |          if it has, we can use a subreg of that.  Many CISC machines
 | 
      
         | 4692 |  |  |          also have such operations, but this is only likely to be
 | 
      
         | 4693 |  |  |          beneficial on these machines.  */
 | 
      
         | 4694 |  |  |  
 | 
      
         | 4695 |  |  |       if (flag_expensive_optimizations && src_related == 0
 | 
      
         | 4696 |  |  |           && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
 | 
      
         | 4697 |  |  |           && GET_MODE_CLASS (mode) == MODE_INT
 | 
      
         | 4698 |  |  |           && MEM_P (src) && ! do_not_record
 | 
      
         | 4699 |  |  |           && LOAD_EXTEND_OP (mode) != UNKNOWN)
 | 
      
         | 4700 |  |  |         {
 | 
      
         | 4701 |  |  |           struct rtx_def memory_extend_buf;
 | 
      
         | 4702 |  |  |           rtx memory_extend_rtx = &memory_extend_buf;
 | 
      
         | 4703 |  |  |           enum machine_mode tmode;
 | 
      
         | 4704 |  |  |  
 | 
      
         | 4705 |  |  |           /* Set what we are trying to extend and the operation it might
 | 
      
         | 4706 |  |  |              have been extended with.  */
 | 
      
         | 4707 |  |  |           memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
 | 
      
         | 4708 |  |  |           PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
 | 
      
         | 4709 |  |  |           XEXP (memory_extend_rtx, 0) = src;
 | 
      
         | 4710 |  |  |  
 | 
      
         | 4711 |  |  |           for (tmode = GET_MODE_WIDER_MODE (mode);
 | 
      
         | 4712 |  |  |                GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
 | 
      
         | 4713 |  |  |                tmode = GET_MODE_WIDER_MODE (tmode))
 | 
      
         | 4714 |  |  |             {
 | 
      
         | 4715 |  |  |               struct table_elt *larger_elt;
 | 
      
         | 4716 |  |  |  
 | 
      
         | 4717 |  |  |               PUT_MODE (memory_extend_rtx, tmode);
 | 
      
         | 4718 |  |  |               larger_elt = lookup (memory_extend_rtx,
 | 
      
         | 4719 |  |  |                                    HASH (memory_extend_rtx, tmode), tmode);
 | 
      
         | 4720 |  |  |               if (larger_elt == 0)
 | 
      
         | 4721 |  |  |                 continue;
 | 
      
         | 4722 |  |  |  
 | 
      
         | 4723 |  |  |               for (larger_elt = larger_elt->first_same_value;
 | 
      
         | 4724 |  |  |                    larger_elt; larger_elt = larger_elt->next_same_value)
 | 
      
         | 4725 |  |  |                 if (REG_P (larger_elt->exp))
 | 
      
         | 4726 |  |  |                   {
 | 
      
         | 4727 |  |  |                     src_related = gen_lowpart (mode, larger_elt->exp);
 | 
      
         | 4728 |  |  |                     break;
 | 
      
         | 4729 |  |  |                   }
 | 
      
         | 4730 |  |  |  
 | 
      
         | 4731 |  |  |               if (src_related)
 | 
      
         | 4732 |  |  |                 break;
 | 
      
         | 4733 |  |  |             }
 | 
      
         | 4734 |  |  |         }
 | 
      
         | 4735 |  |  | #endif /* LOAD_EXTEND_OP */
 | 
      
         | 4736 |  |  |  
 | 
      
         | 4737 |  |  |       /* Try to express the constant using a register+offset expression
 | 
      
         | 4738 |  |  |          derived from a constant anchor.  */
 | 
      
         | 4739 |  |  |  
 | 
      
         | 4740 |  |  |       if (targetm.const_anchor
 | 
      
         | 4741 |  |  |           && !src_related
 | 
      
         | 4742 |  |  |           && src_const
 | 
      
         | 4743 |  |  |           && GET_CODE (src_const) == CONST_INT)
 | 
      
         | 4744 |  |  |         {
 | 
      
         | 4745 |  |  |           src_related = try_const_anchors (src_const, mode);
 | 
      
         | 4746 |  |  |           src_related_is_const_anchor = src_related != NULL_RTX;
 | 
      
         | 4747 |  |  |         }
 | 
      
         | 4748 |  |  |  
 | 
      
         | 4749 |  |  |  
 | 
      
         | 4750 |  |  |       if (src == src_folded)
 | 
      
         | 4751 |  |  |         src_folded = 0;
 | 
      
         | 4752 |  |  |  
 | 
      
         | 4753 |  |  |       /* At this point, ELT, if nonzero, points to a class of expressions
 | 
      
         | 4754 |  |  |          equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
 | 
      
         | 4755 |  |  |          and SRC_RELATED, if nonzero, each contain additional equivalent
 | 
      
         | 4756 |  |  |          expressions.  Prune these latter expressions by deleting expressions
 | 
      
         | 4757 |  |  |          already in the equivalence class.
 | 
      
         | 4758 |  |  |  
 | 
      
         | 4759 |  |  |          Check for an equivalent identical to the destination.  If found,
 | 
      
         | 4760 |  |  |          this is the preferred equivalent since it will likely lead to
 | 
      
         | 4761 |  |  |          elimination of the insn.  Indicate this by placing it in
 | 
      
         | 4762 |  |  |          `src_related'.  */
 | 
      
         | 4763 |  |  |  
 | 
      
         | 4764 |  |  |       if (elt)
 | 
      
         | 4765 |  |  |         elt = elt->first_same_value;
 | 
      
         | 4766 |  |  |       for (p = elt; p; p = p->next_same_value)
 | 
      
         | 4767 |  |  |         {
 | 
      
         | 4768 |  |  |           enum rtx_code code = GET_CODE (p->exp);
 | 
      
         | 4769 |  |  |  
 | 
      
         | 4770 |  |  |           /* If the expression is not valid, ignore it.  Then we do not
 | 
      
         | 4771 |  |  |              have to check for validity below.  In most cases, we can use
 | 
      
         | 4772 |  |  |              `rtx_equal_p', since canonicalization has already been done.  */
 | 
      
         | 4773 |  |  |           if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
 | 
      
         | 4774 |  |  |             continue;
 | 
      
         | 4775 |  |  |  
 | 
      
         | 4776 |  |  |           /* Also skip paradoxical subregs, unless that's what we're
 | 
      
         | 4777 |  |  |              looking for.  */
 | 
      
         | 4778 |  |  |           if (paradoxical_subreg_p (p->exp)
 | 
      
         | 4779 |  |  |               && ! (src != 0
 | 
      
         | 4780 |  |  |                     && GET_CODE (src) == SUBREG
 | 
      
         | 4781 |  |  |                     && GET_MODE (src) == GET_MODE (p->exp)
 | 
      
         | 4782 |  |  |                     && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
 | 
      
         | 4783 |  |  |                         < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
 | 
      
         | 4784 |  |  |             continue;
 | 
      
         | 4785 |  |  |  
 | 
      
         | 4786 |  |  |           if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
 | 
      
         | 4787 |  |  |             src = 0;
 | 
      
         | 4788 |  |  |           else if (src_folded && GET_CODE (src_folded) == code
 | 
      
         | 4789 |  |  |                    && rtx_equal_p (src_folded, p->exp))
 | 
      
         | 4790 |  |  |             src_folded = 0;
 | 
      
         | 4791 |  |  |           else if (src_eqv_here && GET_CODE (src_eqv_here) == code
 | 
      
         | 4792 |  |  |                    && rtx_equal_p (src_eqv_here, p->exp))
 | 
      
         | 4793 |  |  |             src_eqv_here = 0;
 | 
      
         | 4794 |  |  |           else if (src_related && GET_CODE (src_related) == code
 | 
      
         | 4795 |  |  |                    && rtx_equal_p (src_related, p->exp))
 | 
      
         | 4796 |  |  |             src_related = 0;
 | 
      
         | 4797 |  |  |  
 | 
      
         | 4798 |  |  |           /* This is the same as the destination of the insns, we want
 | 
      
         | 4799 |  |  |              to prefer it.  Copy it to src_related.  The code below will
 | 
      
         | 4800 |  |  |              then give it a negative cost.  */
 | 
      
         | 4801 |  |  |           if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
 | 
      
         | 4802 |  |  |             src_related = dest;
 | 
      
         | 4803 |  |  |         }
 | 
      
         | 4804 |  |  |  
 | 
      
         | 4805 |  |  |       /* Find the cheapest valid equivalent, trying all the available
 | 
      
         | 4806 |  |  |          possibilities.  Prefer items not in the hash table to ones
 | 
      
         | 4807 |  |  |          that are when they are equal cost.  Note that we can never
 | 
      
         | 4808 |  |  |          worsen an insn as the current contents will also succeed.
 | 
      
         | 4809 |  |  |          If we find an equivalent identical to the destination, use it as best,
 | 
      
         | 4810 |  |  |          since this insn will probably be eliminated in that case.  */
 | 
      
         | 4811 |  |  |       if (src)
 | 
      
         | 4812 |  |  |         {
 | 
      
         | 4813 |  |  |           if (rtx_equal_p (src, dest))
 | 
      
         | 4814 |  |  |             src_cost = src_regcost = -1;
 | 
      
         | 4815 |  |  |           else
 | 
      
         | 4816 |  |  |             {
 | 
      
         | 4817 |  |  |               src_cost = COST (src);
 | 
      
         | 4818 |  |  |               src_regcost = approx_reg_cost (src);
 | 
      
         | 4819 |  |  |             }
 | 
      
         | 4820 |  |  |         }
 | 
      
         | 4821 |  |  |  
 | 
      
         | 4822 |  |  |       if (src_eqv_here)
 | 
      
         | 4823 |  |  |         {
 | 
      
         | 4824 |  |  |           if (rtx_equal_p (src_eqv_here, dest))
 | 
      
         | 4825 |  |  |             src_eqv_cost = src_eqv_regcost = -1;
 | 
      
         | 4826 |  |  |           else
 | 
      
         | 4827 |  |  |             {
 | 
      
         | 4828 |  |  |               src_eqv_cost = COST (src_eqv_here);
 | 
      
         | 4829 |  |  |               src_eqv_regcost = approx_reg_cost (src_eqv_here);
 | 
      
         | 4830 |  |  |             }
 | 
      
         | 4831 |  |  |         }
 | 
      
         | 4832 |  |  |  
 | 
      
         | 4833 |  |  |       if (src_folded)
 | 
      
         | 4834 |  |  |         {
 | 
      
         | 4835 |  |  |           if (rtx_equal_p (src_folded, dest))
 | 
      
         | 4836 |  |  |             src_folded_cost = src_folded_regcost = -1;
 | 
      
         | 4837 |  |  |           else
 | 
      
         | 4838 |  |  |             {
 | 
      
         | 4839 |  |  |               src_folded_cost = COST (src_folded);
 | 
      
         | 4840 |  |  |               src_folded_regcost = approx_reg_cost (src_folded);
 | 
      
         | 4841 |  |  |             }
 | 
      
         | 4842 |  |  |         }
 | 
      
         | 4843 |  |  |  
 | 
      
         | 4844 |  |  |       if (src_related)
 | 
      
         | 4845 |  |  |         {
 | 
      
         | 4846 |  |  |           if (rtx_equal_p (src_related, dest))
 | 
      
         | 4847 |  |  |             src_related_cost = src_related_regcost = -1;
 | 
      
         | 4848 |  |  |           else
 | 
      
         | 4849 |  |  |             {
 | 
      
         | 4850 |  |  |               src_related_cost = COST (src_related);
 | 
      
         | 4851 |  |  |               src_related_regcost = approx_reg_cost (src_related);
 | 
      
         | 4852 |  |  |  
 | 
      
         | 4853 |  |  |               /* If a const-anchor is used to synthesize a constant that
 | 
      
         | 4854 |  |  |                  normally requires multiple instructions then slightly prefer
 | 
      
         | 4855 |  |  |                  it over the original sequence.  These instructions are likely
 | 
      
         | 4856 |  |  |                  to become redundant now.  We can't compare against the cost
 | 
      
         | 4857 |  |  |                  of src_eqv_here because, on MIPS for example, multi-insn
 | 
      
         | 4858 |  |  |                  constants have zero cost; they are assumed to be hoisted from
 | 
      
         | 4859 |  |  |                  loops.  */
 | 
      
         | 4860 |  |  |               if (src_related_is_const_anchor
 | 
      
         | 4861 |  |  |                   && src_related_cost == src_cost
 | 
      
         | 4862 |  |  |                   && src_eqv_here)
 | 
      
         | 4863 |  |  |                 src_related_cost--;
 | 
      
         | 4864 |  |  |             }
 | 
      
         | 4865 |  |  |         }
 | 
      
         | 4866 |  |  |  
 | 
      
         | 4867 |  |  |       /* If this was an indirect jump insn, a known label will really be
 | 
      
         | 4868 |  |  |          cheaper even though it looks more expensive.  */
 | 
      
         | 4869 |  |  |       if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
 | 
      
         | 4870 |  |  |         src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
 | 
      
         | 4871 |  |  |  
 | 
      
         | 4872 |  |  |       /* Terminate loop when replacement made.  This must terminate since
 | 
      
         | 4873 |  |  |          the current contents will be tested and will always be valid.  */
 | 
      
         | 4874 |  |  |       while (1)
 | 
      
         | 4875 |  |  |         {
 | 
      
         | 4876 |  |  |           rtx trial;
 | 
      
         | 4877 |  |  |  
 | 
      
         | 4878 |  |  |           /* Skip invalid entries.  */
 | 
      
         | 4879 |  |  |           while (elt && !REG_P (elt->exp)
 | 
      
         | 4880 |  |  |                  && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
 | 
      
         | 4881 |  |  |             elt = elt->next_same_value;
 | 
      
         | 4882 |  |  |  
 | 
      
         | 4883 |  |  |           /* A paradoxical subreg would be bad here: it'll be the right
 | 
      
         | 4884 |  |  |              size, but later may be adjusted so that the upper bits aren't
 | 
      
         | 4885 |  |  |              what we want.  So reject it.  */
 | 
      
         | 4886 |  |  |           if (elt != 0
 | 
      
         | 4887 |  |  |               && paradoxical_subreg_p (elt->exp)
 | 
      
         | 4888 |  |  |               /* It is okay, though, if the rtx we're trying to match
 | 
      
         | 4889 |  |  |                  will ignore any of the bits we can't predict.  */
 | 
      
         | 4890 |  |  |               && ! (src != 0
 | 
      
         | 4891 |  |  |                     && GET_CODE (src) == SUBREG
 | 
      
         | 4892 |  |  |                     && GET_MODE (src) == GET_MODE (elt->exp)
 | 
      
         | 4893 |  |  |                     && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
 | 
      
         | 4894 |  |  |                         < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
 | 
      
         | 4895 |  |  |             {
 | 
      
         | 4896 |  |  |               elt = elt->next_same_value;
 | 
      
         | 4897 |  |  |               continue;
 | 
      
         | 4898 |  |  |             }
 | 
      
         | 4899 |  |  |  
 | 
      
         | 4900 |  |  |           if (elt)
 | 
      
         | 4901 |  |  |             {
 | 
      
         | 4902 |  |  |               src_elt_cost = elt->cost;
 | 
      
         | 4903 |  |  |               src_elt_regcost = elt->regcost;
 | 
      
         | 4904 |  |  |             }
 | 
      
         | 4905 |  |  |  
 | 
      
         | 4906 |  |  |           /* Find cheapest and skip it for the next time.   For items
 | 
      
         | 4907 |  |  |              of equal cost, use this order:
 | 
      
         | 4908 |  |  |              src_folded, src, src_eqv, src_related and hash table entry.  */
 | 
      
         | 4909 |  |  |           if (src_folded
 | 
      
         | 4910 |  |  |               && preferable (src_folded_cost, src_folded_regcost,
 | 
      
         | 4911 |  |  |                              src_cost, src_regcost) <= 0
 | 
      
         | 4912 |  |  |               && preferable (src_folded_cost, src_folded_regcost,
 | 
      
         | 4913 |  |  |                              src_eqv_cost, src_eqv_regcost) <= 0
 | 
      
         | 4914 |  |  |               && preferable (src_folded_cost, src_folded_regcost,
 | 
      
         | 4915 |  |  |                              src_related_cost, src_related_regcost) <= 0
 | 
      
         | 4916 |  |  |               && preferable (src_folded_cost, src_folded_regcost,
 | 
      
         | 4917 |  |  |                              src_elt_cost, src_elt_regcost) <= 0)
 | 
      
         | 4918 |  |  |             {
 | 
      
         | 4919 |  |  |               trial = src_folded, src_folded_cost = MAX_COST;
 | 
      
         | 4920 |  |  |               if (src_folded_force_flag)
 | 
      
         | 4921 |  |  |                 {
 | 
      
         | 4922 |  |  |                   rtx forced = force_const_mem (mode, trial);
 | 
      
         | 4923 |  |  |                   if (forced)
 | 
      
         | 4924 |  |  |                     trial = forced;
 | 
      
         | 4925 |  |  |                 }
 | 
      
         | 4926 |  |  |             }
 | 
      
         | 4927 |  |  |           else if (src
 | 
      
         | 4928 |  |  |                    && preferable (src_cost, src_regcost,
 | 
      
         | 4929 |  |  |                                   src_eqv_cost, src_eqv_regcost) <= 0
 | 
      
         | 4930 |  |  |                    && preferable (src_cost, src_regcost,
 | 
      
         | 4931 |  |  |                                   src_related_cost, src_related_regcost) <= 0
 | 
      
         | 4932 |  |  |                    && preferable (src_cost, src_regcost,
 | 
      
         | 4933 |  |  |                                   src_elt_cost, src_elt_regcost) <= 0)
 | 
      
         | 4934 |  |  |             trial = src, src_cost = MAX_COST;
 | 
      
         | 4935 |  |  |           else if (src_eqv_here
 | 
      
         | 4936 |  |  |                    && preferable (src_eqv_cost, src_eqv_regcost,
 | 
      
         | 4937 |  |  |                                   src_related_cost, src_related_regcost) <= 0
 | 
      
         | 4938 |  |  |                    && preferable (src_eqv_cost, src_eqv_regcost,
 | 
      
         | 4939 |  |  |                                   src_elt_cost, src_elt_regcost) <= 0)
 | 
      
         | 4940 |  |  |             trial = src_eqv_here, src_eqv_cost = MAX_COST;
 | 
      
         | 4941 |  |  |           else if (src_related
 | 
      
         | 4942 |  |  |                    && preferable (src_related_cost, src_related_regcost,
 | 
      
         | 4943 |  |  |                                   src_elt_cost, src_elt_regcost) <= 0)
 | 
      
         | 4944 |  |  |             trial = src_related, src_related_cost = MAX_COST;
 | 
      
         | 4945 |  |  |           else
 | 
      
         | 4946 |  |  |             {
 | 
      
         | 4947 |  |  |               trial = elt->exp;
 | 
      
         | 4948 |  |  |               elt = elt->next_same_value;
 | 
      
         | 4949 |  |  |               src_elt_cost = MAX_COST;
 | 
      
         | 4950 |  |  |             }
 | 
      
         | 4951 |  |  |  
 | 
      
         | 4952 |  |  |           /* Avoid creation of overlapping memory moves.  */
 | 
      
         | 4953 |  |  |           if (MEM_P (trial) && MEM_P (SET_DEST (sets[i].rtl)))
 | 
      
         | 4954 |  |  |             {
 | 
      
         | 4955 |  |  |               rtx src, dest;
 | 
      
         | 4956 |  |  |  
 | 
      
         | 4957 |  |  |               /* BLKmode moves are not handled by cse anyway.  */
 | 
      
         | 4958 |  |  |               if (GET_MODE (trial) == BLKmode)
 | 
      
         | 4959 |  |  |                 break;
 | 
      
         | 4960 |  |  |  
 | 
      
         | 4961 |  |  |               src = canon_rtx (trial);
 | 
      
         | 4962 |  |  |               dest = canon_rtx (SET_DEST (sets[i].rtl));
 | 
      
         | 4963 |  |  |  
 | 
      
         | 4964 |  |  |               if (!MEM_P (src) || !MEM_P (dest)
 | 
      
         | 4965 |  |  |                   || !nonoverlapping_memrefs_p (src, dest, false))
 | 
      
         | 4966 |  |  |                 break;
 | 
      
         | 4967 |  |  |             }
 | 
      
         | 4968 |  |  |  
 | 
      
         | 4969 |  |  |           /* Try to optimize
 | 
      
         | 4970 |  |  |              (set (reg:M N) (const_int A))
 | 
      
         | 4971 |  |  |              (set (reg:M2 O) (const_int B))
 | 
      
         | 4972 |  |  |              (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
 | 
      
         | 4973 |  |  |                   (reg:M2 O)).  */
 | 
      
         | 4974 |  |  |           if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
 | 
      
         | 4975 |  |  |               && CONST_INT_P (trial)
 | 
      
         | 4976 |  |  |               && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1))
 | 
      
         | 4977 |  |  |               && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2))
 | 
      
         | 4978 |  |  |               && REG_P (XEXP (SET_DEST (sets[i].rtl), 0))
 | 
      
         | 4979 |  |  |               && (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets[i].rtl)))
 | 
      
         | 4980 |  |  |                   >= INTVAL (XEXP (SET_DEST (sets[i].rtl), 1)))
 | 
      
         | 4981 |  |  |               && ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))
 | 
      
         | 4982 |  |  |                   + (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2))
 | 
      
         | 4983 |  |  |                   <= HOST_BITS_PER_WIDE_INT))
 | 
      
         | 4984 |  |  |             {
 | 
      
         | 4985 |  |  |               rtx dest_reg = XEXP (SET_DEST (sets[i].rtl), 0);
 | 
      
         | 4986 |  |  |               rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
 | 
      
         | 4987 |  |  |               rtx pos = XEXP (SET_DEST (sets[i].rtl), 2);
 | 
      
         | 4988 |  |  |               unsigned int dest_hash = HASH (dest_reg, GET_MODE (dest_reg));
 | 
      
         | 4989 |  |  |               struct table_elt *dest_elt
 | 
      
         | 4990 |  |  |                 = lookup (dest_reg, dest_hash, GET_MODE (dest_reg));
 | 
      
         | 4991 |  |  |               rtx dest_cst = NULL;
 | 
      
         | 4992 |  |  |  
 | 
      
         | 4993 |  |  |               if (dest_elt)
 | 
      
         | 4994 |  |  |                 for (p = dest_elt->first_same_value; p; p = p->next_same_value)
 | 
      
         | 4995 |  |  |                   if (p->is_const && CONST_INT_P (p->exp))
 | 
      
         | 4996 |  |  |                     {
 | 
      
         | 4997 |  |  |                       dest_cst = p->exp;
 | 
      
         | 4998 |  |  |                       break;
 | 
      
         | 4999 |  |  |                     }
 | 
      
         | 5000 |  |  |               if (dest_cst)
 | 
      
         | 5001 |  |  |                 {
 | 
      
         | 5002 |  |  |                   HOST_WIDE_INT val = INTVAL (dest_cst);
 | 
      
         | 5003 |  |  |                   HOST_WIDE_INT mask;
 | 
      
         | 5004 |  |  |                   unsigned int shift;
 | 
      
         | 5005 |  |  |                   if (BITS_BIG_ENDIAN)
 | 
      
         | 5006 |  |  |                     shift = GET_MODE_PRECISION (GET_MODE (dest_reg))
 | 
      
         | 5007 |  |  |                             - INTVAL (pos) - INTVAL (width);
 | 
      
         | 5008 |  |  |                   else
 | 
      
         | 5009 |  |  |                     shift = INTVAL (pos);
 | 
      
         | 5010 |  |  |                   if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
 | 
      
         | 5011 |  |  |                     mask = ~(HOST_WIDE_INT) 0;
 | 
      
         | 5012 |  |  |                   else
 | 
      
         | 5013 |  |  |                     mask = ((HOST_WIDE_INT) 1 << INTVAL (width)) - 1;
 | 
      
         | 5014 |  |  |                   val &= ~(mask << shift);
 | 
      
         | 5015 |  |  |                   val |= (INTVAL (trial) & mask) << shift;
 | 
      
         | 5016 |  |  |                   val = trunc_int_for_mode (val, GET_MODE (dest_reg));
 | 
      
         | 5017 |  |  |                   validate_unshare_change (insn, &SET_DEST (sets[i].rtl),
 | 
      
         | 5018 |  |  |                                            dest_reg, 1);
 | 
      
         | 5019 |  |  |                   validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
 | 
      
         | 5020 |  |  |                                            GEN_INT (val), 1);
 | 
      
         | 5021 |  |  |                   if (apply_change_group ())
 | 
      
         | 5022 |  |  |                     {
 | 
      
         | 5023 |  |  |                       rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
 | 
      
         | 5024 |  |  |                       if (note)
 | 
      
         | 5025 |  |  |                         {
 | 
      
         | 5026 |  |  |                           remove_note (insn, note);
 | 
      
         | 5027 |  |  |                           df_notes_rescan (insn);
 | 
      
         | 5028 |  |  |                         }
 | 
      
         | 5029 |  |  |                       src_eqv = NULL_RTX;
 | 
      
         | 5030 |  |  |                       src_eqv_elt = NULL;
 | 
      
         | 5031 |  |  |                       src_eqv_volatile = 0;
 | 
      
         | 5032 |  |  |                       src_eqv_in_memory = 0;
 | 
      
         | 5033 |  |  |                       src_eqv_hash = 0;
 | 
      
         | 5034 |  |  |                       repeat = true;
 | 
      
         | 5035 |  |  |                       break;
 | 
      
         | 5036 |  |  |                     }
 | 
      
         | 5037 |  |  |                 }
 | 
      
         | 5038 |  |  |             }
 | 
      
         | 5039 |  |  |  
 | 
      
         | 5040 |  |  |           /* We don't normally have an insn matching (set (pc) (pc)), so
 | 
      
         | 5041 |  |  |              check for this separately here.  We will delete such an
 | 
      
         | 5042 |  |  |              insn below.
 | 
      
         | 5043 |  |  |  
 | 
      
         | 5044 |  |  |              For other cases such as a table jump or conditional jump
 | 
      
         | 5045 |  |  |              where we know the ultimate target, go ahead and replace the
 | 
      
         | 5046 |  |  |              operand.  While that may not make a valid insn, we will
 | 
      
         | 5047 |  |  |              reemit the jump below (and also insert any necessary
 | 
      
         | 5048 |  |  |              barriers).  */
 | 
      
         | 5049 |  |  |           if (n_sets == 1 && dest == pc_rtx
 | 
      
         | 5050 |  |  |               && (trial == pc_rtx
 | 
      
         | 5051 |  |  |                   || (GET_CODE (trial) == LABEL_REF
 | 
      
         | 5052 |  |  |                       && ! condjump_p (insn))))
 | 
      
         | 5053 |  |  |             {
 | 
      
         | 5054 |  |  |               /* Don't substitute non-local labels, this confuses CFG.  */
 | 
      
         | 5055 |  |  |               if (GET_CODE (trial) == LABEL_REF
 | 
      
         | 5056 |  |  |                   && LABEL_REF_NONLOCAL_P (trial))
 | 
      
         | 5057 |  |  |                 continue;
 | 
      
         | 5058 |  |  |  
 | 
      
         | 5059 |  |  |               SET_SRC (sets[i].rtl) = trial;
 | 
      
         | 5060 |  |  |               cse_jumps_altered = true;
 | 
      
         | 5061 |  |  |               break;
 | 
      
         | 5062 |  |  |             }
 | 
      
         | 5063 |  |  |  
 | 
      
         | 5064 |  |  |           /* Reject certain invalid forms of CONST that we create.  */
 | 
      
         | 5065 |  |  |           else if (CONSTANT_P (trial)
 | 
      
         | 5066 |  |  |                    && GET_CODE (trial) == CONST
 | 
      
         | 5067 |  |  |                    /* Reject cases that will cause decode_rtx_const to
 | 
      
         | 5068 |  |  |                       die.  On the alpha when simplifying a switch, we
 | 
      
         | 5069 |  |  |                       get (const (truncate (minus (label_ref)
 | 
      
         | 5070 |  |  |                       (label_ref)))).  */
 | 
      
         | 5071 |  |  |                    && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
 | 
      
         | 5072 |  |  |                        /* Likewise on IA-64, except without the
 | 
      
         | 5073 |  |  |                           truncate.  */
 | 
      
         | 5074 |  |  |                        || (GET_CODE (XEXP (trial, 0)) == MINUS
 | 
      
         | 5075 |  |  |                            && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
 | 
      
         | 5076 |  |  |                            && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
 | 
      
         | 5077 |  |  |             /* Do nothing for this case.  */
 | 
      
         | 5078 |  |  |             ;
 | 
      
         | 5079 |  |  |  
 | 
      
         | 5080 |  |  |           /* Look for a substitution that makes a valid insn.  */
 | 
      
         | 5081 |  |  |           else if (validate_unshare_change
 | 
      
         | 5082 |  |  |                      (insn, &SET_SRC (sets[i].rtl), trial, 0))
 | 
      
         | 5083 |  |  |             {
 | 
      
         | 5084 |  |  |               rtx new_rtx = canon_reg (SET_SRC (sets[i].rtl), insn);
 | 
      
         | 5085 |  |  |  
 | 
      
         | 5086 |  |  |               /* The result of apply_change_group can be ignored; see
 | 
      
         | 5087 |  |  |                  canon_reg.  */
 | 
      
         | 5088 |  |  |  
 | 
      
         | 5089 |  |  |               validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
 | 
      
         | 5090 |  |  |               apply_change_group ();
 | 
      
         | 5091 |  |  |  
 | 
      
         | 5092 |  |  |               break;
 | 
      
         | 5093 |  |  |             }
 | 
      
         | 5094 |  |  |  
 | 
      
         | 5095 |  |  |           /* If we previously found constant pool entries for
 | 
      
         | 5096 |  |  |              constants and this is a constant, try making a
 | 
      
         | 5097 |  |  |              pool entry.  Put it in src_folded unless we already have done
 | 
      
         | 5098 |  |  |              this since that is where it likely came from.  */
 | 
      
         | 5099 |  |  |  
 | 
      
         | 5100 |  |  |           else if (constant_pool_entries_cost
 | 
      
         | 5101 |  |  |                    && CONSTANT_P (trial)
 | 
      
         | 5102 |  |  |                    && (src_folded == 0
 | 
      
         | 5103 |  |  |                        || (!MEM_P (src_folded)
 | 
      
         | 5104 |  |  |                            && ! src_folded_force_flag))
 | 
      
         | 5105 |  |  |                    && GET_MODE_CLASS (mode) != MODE_CC
 | 
      
         | 5106 |  |  |                    && mode != VOIDmode)
 | 
      
         | 5107 |  |  |             {
 | 
      
         | 5108 |  |  |               src_folded_force_flag = 1;
 | 
      
         | 5109 |  |  |               src_folded = trial;
 | 
      
         | 5110 |  |  |               src_folded_cost = constant_pool_entries_cost;
 | 
      
         | 5111 |  |  |               src_folded_regcost = constant_pool_entries_regcost;
 | 
      
         | 5112 |  |  |             }
 | 
      
         | 5113 |  |  |         }
 | 
      
         | 5114 |  |  |  
 | 
      
         | 5115 |  |  |       /* If we changed the insn too much, handle this set from scratch.  */
 | 
      
         | 5116 |  |  |       if (repeat)
 | 
      
         | 5117 |  |  |         {
 | 
      
         | 5118 |  |  |           i--;
 | 
      
         | 5119 |  |  |           continue;
 | 
      
         | 5120 |  |  |         }
 | 
      
         | 5121 |  |  |  
 | 
      
         | 5122 |  |  |       src = SET_SRC (sets[i].rtl);
 | 
      
         | 5123 |  |  |  
 | 
      
         | 5124 |  |  |       /* In general, it is good to have a SET with SET_SRC == SET_DEST.
 | 
      
         | 5125 |  |  |          However, there is an important exception:  If both are registers
 | 
      
         | 5126 |  |  |          that are not the head of their equivalence class, replace SET_SRC
 | 
      
         | 5127 |  |  |          with the head of the class.  If we do not do this, we will have
 | 
      
         | 5128 |  |  |          both registers live over a portion of the basic block.  This way,
 | 
      
         | 5129 |  |  |          their lifetimes will likely abut instead of overlapping.  */
 | 
      
         | 5130 |  |  |       if (REG_P (dest)
 | 
      
         | 5131 |  |  |           && REGNO_QTY_VALID_P (REGNO (dest)))
 | 
      
         | 5132 |  |  |         {
 | 
      
         | 5133 |  |  |           int dest_q = REG_QTY (REGNO (dest));
 | 
      
         | 5134 |  |  |           struct qty_table_elem *dest_ent = &qty_table[dest_q];
 | 
      
         | 5135 |  |  |  
 | 
      
         | 5136 |  |  |           if (dest_ent->mode == GET_MODE (dest)
 | 
      
         | 5137 |  |  |               && dest_ent->first_reg != REGNO (dest)
 | 
      
         | 5138 |  |  |               && REG_P (src) && REGNO (src) == REGNO (dest)
 | 
      
         | 5139 |  |  |               /* Don't do this if the original insn had a hard reg as
 | 
      
         | 5140 |  |  |                  SET_SRC or SET_DEST.  */
 | 
      
         | 5141 |  |  |               && (!REG_P (sets[i].src)
 | 
      
         | 5142 |  |  |                   || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
 | 
      
         | 5143 |  |  |               && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
 | 
      
         | 5144 |  |  |             /* We can't call canon_reg here because it won't do anything if
 | 
      
         | 5145 |  |  |                SRC is a hard register.  */
 | 
      
         | 5146 |  |  |             {
 | 
      
         | 5147 |  |  |               int src_q = REG_QTY (REGNO (src));
 | 
      
         | 5148 |  |  |               struct qty_table_elem *src_ent = &qty_table[src_q];
 | 
      
         | 5149 |  |  |               int first = src_ent->first_reg;
 | 
      
         | 5150 |  |  |               rtx new_src
 | 
      
         | 5151 |  |  |                 = (first >= FIRST_PSEUDO_REGISTER
 | 
      
         | 5152 |  |  |                    ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
 | 
      
         | 5153 |  |  |  
 | 
      
         | 5154 |  |  |               /* We must use validate-change even for this, because this
 | 
      
         | 5155 |  |  |                  might be a special no-op instruction, suitable only to
 | 
      
         | 5156 |  |  |                  tag notes onto.  */
 | 
      
         | 5157 |  |  |               if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
 | 
      
         | 5158 |  |  |                 {
 | 
      
         | 5159 |  |  |                   src = new_src;
 | 
      
         | 5160 |  |  |                   /* If we had a constant that is cheaper than what we are now
 | 
      
         | 5161 |  |  |                      setting SRC to, use that constant.  We ignored it when we
 | 
      
         | 5162 |  |  |                      thought we could make this into a no-op.  */
 | 
      
         | 5163 |  |  |                   if (src_const && COST (src_const) < COST (src)
 | 
      
         | 5164 |  |  |                       && validate_change (insn, &SET_SRC (sets[i].rtl),
 | 
      
         | 5165 |  |  |                                           src_const, 0))
 | 
      
         | 5166 |  |  |                     src = src_const;
 | 
      
         | 5167 |  |  |                 }
 | 
      
         | 5168 |  |  |             }
 | 
      
         | 5169 |  |  |         }
 | 
      
         | 5170 |  |  |  
 | 
      
         | 5171 |  |  |       /* If we made a change, recompute SRC values.  */
 | 
      
         | 5172 |  |  |       if (src != sets[i].src)
 | 
      
         | 5173 |  |  |         {
 | 
      
         | 5174 |  |  |           do_not_record = 0;
 | 
      
         | 5175 |  |  |           hash_arg_in_memory = 0;
 | 
      
         | 5176 |  |  |           sets[i].src = src;
 | 
      
         | 5177 |  |  |           sets[i].src_hash = HASH (src, mode);
 | 
      
         | 5178 |  |  |           sets[i].src_volatile = do_not_record;
 | 
      
         | 5179 |  |  |           sets[i].src_in_memory = hash_arg_in_memory;
 | 
      
         | 5180 |  |  |           sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
 | 
      
         | 5181 |  |  |         }
 | 
      
         | 5182 |  |  |  
 | 
      
         | 5183 |  |  |       /* If this is a single SET, we are setting a register, and we have an
 | 
      
         | 5184 |  |  |          equivalent constant, we want to add a REG_NOTE.   We don't want
 | 
      
         | 5185 |  |  |          to write a REG_EQUAL note for a constant pseudo since verifying that
 | 
      
         | 5186 |  |  |          that pseudo hasn't been eliminated is a pain.  Such a note also
 | 
      
         | 5187 |  |  |          won't help anything.
 | 
      
         | 5188 |  |  |  
 | 
      
         | 5189 |  |  |          Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
 | 
      
         | 5190 |  |  |          which can be created for a reference to a compile time computable
 | 
      
         | 5191 |  |  |          entry in a jump table.  */
 | 
      
         | 5192 |  |  |  
 | 
      
         | 5193 |  |  |       if (n_sets == 1 && src_const && REG_P (dest)
 | 
      
         | 5194 |  |  |           && !REG_P (src_const)
 | 
      
         | 5195 |  |  |           && ! (GET_CODE (src_const) == CONST
 | 
      
         | 5196 |  |  |                 && GET_CODE (XEXP (src_const, 0)) == MINUS
 | 
      
         | 5197 |  |  |                 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
 | 
      
         | 5198 |  |  |                 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
 | 
      
         | 5199 |  |  |         {
 | 
      
         | 5200 |  |  |           /* We only want a REG_EQUAL note if src_const != src.  */
 | 
      
         | 5201 |  |  |           if (! rtx_equal_p (src, src_const))
 | 
      
         | 5202 |  |  |             {
 | 
      
         | 5203 |  |  |               /* Make sure that the rtx is not shared.  */
 | 
      
         | 5204 |  |  |               src_const = copy_rtx (src_const);
 | 
      
         | 5205 |  |  |  
 | 
      
         | 5206 |  |  |               /* Record the actual constant value in a REG_EQUAL note,
 | 
      
         | 5207 |  |  |                  making a new one if one does not already exist.  */
 | 
      
         | 5208 |  |  |               set_unique_reg_note (insn, REG_EQUAL, src_const);
 | 
      
         | 5209 |  |  |               df_notes_rescan (insn);
 | 
      
         | 5210 |  |  |             }
 | 
      
         | 5211 |  |  |         }
 | 
      
         | 5212 |  |  |  
 | 
      
         | 5213 |  |  |       /* Now deal with the destination.  */
 | 
      
         | 5214 |  |  |       do_not_record = 0;
 | 
      
         | 5215 |  |  |  
 | 
      
         | 5216 |  |  |       /* Look within any ZERO_EXTRACT to the MEM or REG within it.  */
 | 
      
         | 5217 |  |  |       while (GET_CODE (dest) == SUBREG
 | 
      
         | 5218 |  |  |              || GET_CODE (dest) == ZERO_EXTRACT
 | 
      
         | 5219 |  |  |              || GET_CODE (dest) == STRICT_LOW_PART)
 | 
      
         | 5220 |  |  |         dest = XEXP (dest, 0);
 | 
      
         | 5221 |  |  |  
 | 
      
         | 5222 |  |  |       sets[i].inner_dest = dest;
 | 
      
         | 5223 |  |  |  
 | 
      
         | 5224 |  |  |       if (MEM_P (dest))
 | 
      
         | 5225 |  |  |         {
 | 
      
         | 5226 |  |  | #ifdef PUSH_ROUNDING
 | 
      
         | 5227 |  |  |           /* Stack pushes invalidate the stack pointer.  */
 | 
      
         | 5228 |  |  |           rtx addr = XEXP (dest, 0);
 | 
      
         | 5229 |  |  |           if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
 | 
      
         | 5230 |  |  |               && XEXP (addr, 0) == stack_pointer_rtx)
 | 
      
         | 5231 |  |  |             invalidate (stack_pointer_rtx, VOIDmode);
 | 
      
         | 5232 |  |  | #endif
 | 
      
         | 5233 |  |  |           dest = fold_rtx (dest, insn);
 | 
      
         | 5234 |  |  |         }
 | 
      
         | 5235 |  |  |  
 | 
      
         | 5236 |  |  |       /* Compute the hash code of the destination now,
 | 
      
         | 5237 |  |  |          before the effects of this instruction are recorded,
 | 
      
         | 5238 |  |  |          since the register values used in the address computation
 | 
      
         | 5239 |  |  |          are those before this instruction.  */
 | 
      
         | 5240 |  |  |       sets[i].dest_hash = HASH (dest, mode);
 | 
      
         | 5241 |  |  |  
 | 
      
         | 5242 |  |  |       /* Don't enter a bit-field in the hash table
 | 
      
         | 5243 |  |  |          because the value in it after the store
 | 
      
         | 5244 |  |  |          may not equal what was stored, due to truncation.  */
 | 
      
         | 5245 |  |  |  
 | 
      
         | 5246 |  |  |       if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
 | 
      
         | 5247 |  |  |         {
 | 
      
         | 5248 |  |  |           rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
 | 
      
         | 5249 |  |  |  
 | 
      
         | 5250 |  |  |           if (src_const != 0 && CONST_INT_P (src_const)
 | 
      
         | 5251 |  |  |               && CONST_INT_P (width)
 | 
      
         | 5252 |  |  |               && INTVAL (width) < HOST_BITS_PER_WIDE_INT
 | 
      
         | 5253 |  |  |               && ! (INTVAL (src_const)
 | 
      
         | 5254 |  |  |                     & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
 | 
      
         | 5255 |  |  |             /* Exception: if the value is constant,
 | 
      
         | 5256 |  |  |                and it won't be truncated, record it.  */
 | 
      
         | 5257 |  |  |             ;
 | 
      
         | 5258 |  |  |           else
 | 
      
         | 5259 |  |  |             {
 | 
      
         | 5260 |  |  |               /* This is chosen so that the destination will be invalidated
 | 
      
         | 5261 |  |  |                  but no new value will be recorded.
 | 
      
         | 5262 |  |  |                  We must invalidate because sometimes constant
 | 
      
         | 5263 |  |  |                  values can be recorded for bitfields.  */
 | 
      
         | 5264 |  |  |               sets[i].src_elt = 0;
 | 
      
         | 5265 |  |  |               sets[i].src_volatile = 1;
 | 
      
         | 5266 |  |  |               src_eqv = 0;
 | 
      
         | 5267 |  |  |               src_eqv_elt = 0;
 | 
      
         | 5268 |  |  |             }
 | 
      
         | 5269 |  |  |         }
 | 
      
         | 5270 |  |  |  
 | 
      
         | 5271 |  |  |       /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
 | 
      
         | 5272 |  |  |          the insn.  */
 | 
      
         | 5273 |  |  |       else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
 | 
      
         | 5274 |  |  |         {
 | 
      
         | 5275 |  |  |           /* One less use of the label this insn used to jump to.  */
 | 
      
         | 5276 |  |  |           delete_insn_and_edges (insn);
 | 
      
         | 5277 |  |  |           cse_jumps_altered = true;
 | 
      
         | 5278 |  |  |           /* No more processing for this set.  */
 | 
      
         | 5279 |  |  |           sets[i].rtl = 0;
 | 
      
         | 5280 |  |  |         }
 | 
      
         | 5281 |  |  |  
 | 
      
         | 5282 |  |  |       /* If this SET is now setting PC to a label, we know it used to
 | 
      
         | 5283 |  |  |          be a conditional or computed branch.  */
 | 
      
         | 5284 |  |  |       else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
 | 
      
         | 5285 |  |  |                && !LABEL_REF_NONLOCAL_P (src))
 | 
      
         | 5286 |  |  |         {
 | 
      
         | 5287 |  |  |           /* We reemit the jump in as many cases as possible just in
 | 
      
         | 5288 |  |  |              case the form of an unconditional jump is significantly
 | 
      
         | 5289 |  |  |              different than a computed jump or conditional jump.
 | 
      
         | 5290 |  |  |  
 | 
      
         | 5291 |  |  |              If this insn has multiple sets, then reemitting the
 | 
      
         | 5292 |  |  |              jump is nontrivial.  So instead we just force rerecognition
 | 
      
         | 5293 |  |  |              and hope for the best.  */
 | 
      
         | 5294 |  |  |           if (n_sets == 1)
 | 
      
         | 5295 |  |  |             {
 | 
      
         | 5296 |  |  |               rtx new_rtx, note;
 | 
      
         | 5297 |  |  |  
 | 
      
         | 5298 |  |  |               new_rtx = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
 | 
      
         | 5299 |  |  |               JUMP_LABEL (new_rtx) = XEXP (src, 0);
 | 
      
         | 5300 |  |  |               LABEL_NUSES (XEXP (src, 0))++;
 | 
      
         | 5301 |  |  |  
 | 
      
         | 5302 |  |  |               /* Make sure to copy over REG_NON_LOCAL_GOTO.  */
 | 
      
         | 5303 |  |  |               note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
 | 
      
         | 5304 |  |  |               if (note)
 | 
      
         | 5305 |  |  |                 {
 | 
      
         | 5306 |  |  |                   XEXP (note, 1) = NULL_RTX;
 | 
      
         | 5307 |  |  |                   REG_NOTES (new_rtx) = note;
 | 
      
         | 5308 |  |  |                 }
 | 
      
         | 5309 |  |  |  
 | 
      
         | 5310 |  |  |               delete_insn_and_edges (insn);
 | 
      
         | 5311 |  |  |               insn = new_rtx;
 | 
      
         | 5312 |  |  |             }
 | 
      
         | 5313 |  |  |           else
 | 
      
         | 5314 |  |  |             INSN_CODE (insn) = -1;
 | 
      
         | 5315 |  |  |  
 | 
      
         | 5316 |  |  |           /* Do not bother deleting any unreachable code, let jump do it.  */
 | 
      
         | 5317 |  |  |           cse_jumps_altered = true;
 | 
      
         | 5318 |  |  |           sets[i].rtl = 0;
 | 
      
         | 5319 |  |  |         }
 | 
      
         | 5320 |  |  |  
 | 
      
         | 5321 |  |  |       /* If destination is volatile, invalidate it and then do no further
 | 
      
         | 5322 |  |  |          processing for this assignment.  */
 | 
      
         | 5323 |  |  |  
 | 
      
         | 5324 |  |  |       else if (do_not_record)
 | 
      
         | 5325 |  |  |         {
 | 
      
         | 5326 |  |  |           if (REG_P (dest) || GET_CODE (dest) == SUBREG)
 | 
      
         | 5327 |  |  |             invalidate (dest, VOIDmode);
 | 
      
         | 5328 |  |  |           else if (MEM_P (dest))
 | 
      
         | 5329 |  |  |             invalidate (dest, VOIDmode);
 | 
      
         | 5330 |  |  |           else if (GET_CODE (dest) == STRICT_LOW_PART
 | 
      
         | 5331 |  |  |                    || GET_CODE (dest) == ZERO_EXTRACT)
 | 
      
         | 5332 |  |  |             invalidate (XEXP (dest, 0), GET_MODE (dest));
 | 
      
         | 5333 |  |  |           sets[i].rtl = 0;
 | 
      
         | 5334 |  |  |         }
 | 
      
         | 5335 |  |  |  
 | 
      
         | 5336 |  |  |       if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
 | 
      
         | 5337 |  |  |         sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
 | 
      
         | 5338 |  |  |  
 | 
      
         | 5339 |  |  | #ifdef HAVE_cc0
 | 
      
         | 5340 |  |  |       /* If setting CC0, record what it was set to, or a constant, if it
 | 
      
         | 5341 |  |  |          is equivalent to a constant.  If it is being set to a floating-point
 | 
      
         | 5342 |  |  |          value, make a COMPARE with the appropriate constant of 0.  If we
 | 
      
         | 5343 |  |  |          don't do this, later code can interpret this as a test against
 | 
      
         | 5344 |  |  |          const0_rtx, which can cause problems if we try to put it into an
 | 
      
         | 5345 |  |  |          insn as a floating-point operand.  */
 | 
      
         | 5346 |  |  |       if (dest == cc0_rtx)
 | 
      
         | 5347 |  |  |         {
 | 
      
         | 5348 |  |  |           this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
 | 
      
         | 5349 |  |  |           this_insn_cc0_mode = mode;
 | 
      
         | 5350 |  |  |           if (FLOAT_MODE_P (mode))
 | 
      
         | 5351 |  |  |             this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
 | 
      
         | 5352 |  |  |                                              CONST0_RTX (mode));
 | 
      
         | 5353 |  |  |         }
 | 
      
         | 5354 |  |  | #endif
 | 
      
         | 5355 |  |  |     }
 | 
      
         | 5356 |  |  |  
 | 
      
         | 5357 |  |  |   /* Now enter all non-volatile source expressions in the hash table
 | 
      
         | 5358 |  |  |      if they are not already present.
 | 
      
         | 5359 |  |  |      Record their equivalence classes in src_elt.
 | 
      
         | 5360 |  |  |      This way we can insert the corresponding destinations into
 | 
      
         | 5361 |  |  |      the same classes even if the actual sources are no longer in them
 | 
      
         | 5362 |  |  |      (having been invalidated).  */
 | 
      
         | 5363 |  |  |  
 | 
      
         | 5364 |  |  |   if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
 | 
      
         | 5365 |  |  |       && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
 | 
      
         | 5366 |  |  |     {
 | 
      
         | 5367 |  |  |       struct table_elt *elt;
 | 
      
         | 5368 |  |  |       struct table_elt *classp = sets[0].src_elt;
 | 
      
         | 5369 |  |  |       rtx dest = SET_DEST (sets[0].rtl);
 | 
      
         | 5370 |  |  |       enum machine_mode eqvmode = GET_MODE (dest);
 | 
      
         | 5371 |  |  |  
 | 
      
         | 5372 |  |  |       if (GET_CODE (dest) == STRICT_LOW_PART)
 | 
      
         | 5373 |  |  |         {
 | 
      
         | 5374 |  |  |           eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
 | 
      
         | 5375 |  |  |           classp = 0;
 | 
      
         | 5376 |  |  |         }
 | 
      
         | 5377 |  |  |       if (insert_regs (src_eqv, classp, 0))
 | 
      
         | 5378 |  |  |         {
 | 
      
         | 5379 |  |  |           rehash_using_reg (src_eqv);
 | 
      
         | 5380 |  |  |           src_eqv_hash = HASH (src_eqv, eqvmode);
 | 
      
         | 5381 |  |  |         }
 | 
      
         | 5382 |  |  |       elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
 | 
      
         | 5383 |  |  |       elt->in_memory = src_eqv_in_memory;
 | 
      
         | 5384 |  |  |       src_eqv_elt = elt;
 | 
      
         | 5385 |  |  |  
 | 
      
         | 5386 |  |  |       /* Check to see if src_eqv_elt is the same as a set source which
 | 
      
         | 5387 |  |  |          does not yet have an elt, and if so set the elt of the set source
 | 
      
         | 5388 |  |  |          to src_eqv_elt.  */
 | 
      
         | 5389 |  |  |       for (i = 0; i < n_sets; i++)
 | 
      
         | 5390 |  |  |         if (sets[i].rtl && sets[i].src_elt == 0
 | 
      
         | 5391 |  |  |             && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
 | 
      
         | 5392 |  |  |           sets[i].src_elt = src_eqv_elt;
 | 
      
         | 5393 |  |  |     }
 | 
      
         | 5394 |  |  |  
 | 
      
         | 5395 |  |  |   for (i = 0; i < n_sets; i++)
 | 
      
         | 5396 |  |  |     if (sets[i].rtl && ! sets[i].src_volatile
 | 
      
         | 5397 |  |  |         && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
 | 
      
         | 5398 |  |  |       {
 | 
      
         | 5399 |  |  |         if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
 | 
      
         | 5400 |  |  |           {
 | 
      
         | 5401 |  |  |             /* REG_EQUAL in setting a STRICT_LOW_PART
 | 
      
         | 5402 |  |  |                gives an equivalent for the entire destination register,
 | 
      
         | 5403 |  |  |                not just for the subreg being stored in now.
 | 
      
         | 5404 |  |  |                This is a more interesting equivalence, so we arrange later
 | 
      
         | 5405 |  |  |                to treat the entire reg as the destination.  */
 | 
      
         | 5406 |  |  |             sets[i].src_elt = src_eqv_elt;
 | 
      
         | 5407 |  |  |             sets[i].src_hash = src_eqv_hash;
 | 
      
         | 5408 |  |  |           }
 | 
      
         | 5409 |  |  |         else
 | 
      
         | 5410 |  |  |           {
 | 
      
         | 5411 |  |  |             /* Insert source and constant equivalent into hash table, if not
 | 
      
         | 5412 |  |  |                already present.  */
 | 
      
         | 5413 |  |  |             struct table_elt *classp = src_eqv_elt;
 | 
      
         | 5414 |  |  |             rtx src = sets[i].src;
 | 
      
         | 5415 |  |  |             rtx dest = SET_DEST (sets[i].rtl);
 | 
      
         | 5416 |  |  |             enum machine_mode mode
 | 
      
         | 5417 |  |  |               = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
 | 
      
         | 5418 |  |  |  
 | 
      
         | 5419 |  |  |             /* It's possible that we have a source value known to be
 | 
      
         | 5420 |  |  |                constant but don't have a REG_EQUAL note on the insn.
 | 
      
         | 5421 |  |  |                Lack of a note will mean src_eqv_elt will be NULL.  This
 | 
      
         | 5422 |  |  |                can happen where we've generated a SUBREG to access a
 | 
      
         | 5423 |  |  |                CONST_INT that is already in a register in a wider mode.
 | 
      
         | 5424 |  |  |                Ensure that the source expression is put in the proper
 | 
      
         | 5425 |  |  |                constant class.  */
 | 
      
         | 5426 |  |  |             if (!classp)
 | 
      
         | 5427 |  |  |               classp = sets[i].src_const_elt;
 | 
      
         | 5428 |  |  |  
 | 
      
         | 5429 |  |  |             if (sets[i].src_elt == 0)
 | 
      
         | 5430 |  |  |               {
 | 
      
         | 5431 |  |  |                 struct table_elt *elt;
 | 
      
         | 5432 |  |  |  
 | 
      
         | 5433 |  |  |                 /* Note that these insert_regs calls cannot remove
 | 
      
         | 5434 |  |  |                    any of the src_elt's, because they would have failed to
 | 
      
         | 5435 |  |  |                    match if not still valid.  */
 | 
      
         | 5436 |  |  |                 if (insert_regs (src, classp, 0))
 | 
      
         | 5437 |  |  |                   {
 | 
      
         | 5438 |  |  |                     rehash_using_reg (src);
 | 
      
         | 5439 |  |  |                     sets[i].src_hash = HASH (src, mode);
 | 
      
         | 5440 |  |  |                   }
 | 
      
         | 5441 |  |  |                 elt = insert (src, classp, sets[i].src_hash, mode);
 | 
      
         | 5442 |  |  |                 elt->in_memory = sets[i].src_in_memory;
 | 
      
         | 5443 |  |  |                 sets[i].src_elt = classp = elt;
 | 
      
         | 5444 |  |  |               }
 | 
      
         | 5445 |  |  |             if (sets[i].src_const && sets[i].src_const_elt == 0
 | 
      
         | 5446 |  |  |                 && src != sets[i].src_const
 | 
      
         | 5447 |  |  |                 && ! rtx_equal_p (sets[i].src_const, src))
 | 
      
         | 5448 |  |  |               sets[i].src_elt = insert (sets[i].src_const, classp,
 | 
      
         | 5449 |  |  |                                         sets[i].src_const_hash, mode);
 | 
      
         | 5450 |  |  |           }
 | 
      
         | 5451 |  |  |       }
 | 
      
         | 5452 |  |  |     else if (sets[i].src_elt == 0)
 | 
      
         | 5453 |  |  |       /* If we did not insert the source into the hash table (e.g., it was
 | 
      
         | 5454 |  |  |          volatile), note the equivalence class for the REG_EQUAL value, if any,
 | 
      
         | 5455 |  |  |          so that the destination goes into that class.  */
 | 
      
         | 5456 |  |  |       sets[i].src_elt = src_eqv_elt;
 | 
      
         | 5457 |  |  |  
 | 
      
         | 5458 |  |  |   /* Record destination addresses in the hash table.  This allows us to
 | 
      
         | 5459 |  |  |      check if they are invalidated by other sets.  */
 | 
      
         | 5460 |  |  |   for (i = 0; i < n_sets; i++)
 | 
      
         | 5461 |  |  |     {
 | 
      
         | 5462 |  |  |       if (sets[i].rtl)
 | 
      
         | 5463 |  |  |         {
 | 
      
         | 5464 |  |  |           rtx x = sets[i].inner_dest;
 | 
      
         | 5465 |  |  |           struct table_elt *elt;
 | 
      
         | 5466 |  |  |           enum machine_mode mode;
 | 
      
         | 5467 |  |  |           unsigned hash;
 | 
      
         | 5468 |  |  |  
 | 
      
         | 5469 |  |  |           if (MEM_P (x))
 | 
      
         | 5470 |  |  |             {
 | 
      
         | 5471 |  |  |               x = XEXP (x, 0);
 | 
      
         | 5472 |  |  |               mode = GET_MODE (x);
 | 
      
         | 5473 |  |  |               hash = HASH (x, mode);
 | 
      
         | 5474 |  |  |               elt = lookup (x, hash, mode);
 | 
      
         | 5475 |  |  |               if (!elt)
 | 
      
         | 5476 |  |  |                 {
 | 
      
         | 5477 |  |  |                   if (insert_regs (x, NULL, 0))
 | 
      
         | 5478 |  |  |                     {
 | 
      
         | 5479 |  |  |                       rtx dest = SET_DEST (sets[i].rtl);
 | 
      
         | 5480 |  |  |  
 | 
      
         | 5481 |  |  |                       rehash_using_reg (x);
 | 
      
         | 5482 |  |  |                       hash = HASH (x, mode);
 | 
      
         | 5483 |  |  |                       sets[i].dest_hash = HASH (dest, GET_MODE (dest));
 | 
      
         | 5484 |  |  |                     }
 | 
      
         | 5485 |  |  |                   elt = insert (x, NULL, hash, mode);
 | 
      
         | 5486 |  |  |                 }
 | 
      
         | 5487 |  |  |  
 | 
      
         | 5488 |  |  |               sets[i].dest_addr_elt = elt;
 | 
      
         | 5489 |  |  |             }
 | 
      
         | 5490 |  |  |           else
 | 
      
         | 5491 |  |  |             sets[i].dest_addr_elt = NULL;
 | 
      
         | 5492 |  |  |         }
 | 
      
         | 5493 |  |  |     }
 | 
      
         | 5494 |  |  |  
 | 
      
         | 5495 |  |  |   invalidate_from_clobbers (x);
 | 
      
         | 5496 |  |  |  
 | 
      
         | 5497 |  |  |   /* Some registers are invalidated by subroutine calls.  Memory is
 | 
      
         | 5498 |  |  |      invalidated by non-constant calls.  */
 | 
      
         | 5499 |  |  |  
 | 
      
         | 5500 |  |  |   if (CALL_P (insn))
 | 
      
         | 5501 |  |  |     {
 | 
      
         | 5502 |  |  |       if (!(RTL_CONST_OR_PURE_CALL_P (insn)))
 | 
      
         | 5503 |  |  |         invalidate_memory ();
 | 
      
         | 5504 |  |  |       invalidate_for_call ();
 | 
      
         | 5505 |  |  |     }
 | 
      
         | 5506 |  |  |  
 | 
      
         | 5507 |  |  |   /* Now invalidate everything set by this instruction.
 | 
      
         | 5508 |  |  |      If a SUBREG or other funny destination is being set,
 | 
      
         | 5509 |  |  |      sets[i].rtl is still nonzero, so here we invalidate the reg
 | 
      
         | 5510 |  |  |      a part of which is being set.  */
 | 
      
         | 5511 |  |  |  
 | 
      
         | 5512 |  |  |   for (i = 0; i < n_sets; i++)
 | 
      
         | 5513 |  |  |     if (sets[i].rtl)
 | 
      
         | 5514 |  |  |       {
 | 
      
         | 5515 |  |  |         /* We can't use the inner dest, because the mode associated with
 | 
      
         | 5516 |  |  |            a ZERO_EXTRACT is significant.  */
 | 
      
         | 5517 |  |  |         rtx dest = SET_DEST (sets[i].rtl);
 | 
      
         | 5518 |  |  |  
 | 
      
         | 5519 |  |  |         /* Needed for registers to remove the register from its
 | 
      
         | 5520 |  |  |            previous quantity's chain.
 | 
      
         | 5521 |  |  |            Needed for memory if this is a nonvarying address, unless
 | 
      
         | 5522 |  |  |            we have just done an invalidate_memory that covers even those.  */
 | 
      
         | 5523 |  |  |         if (REG_P (dest) || GET_CODE (dest) == SUBREG)
 | 
      
         | 5524 |  |  |           invalidate (dest, VOIDmode);
 | 
      
         | 5525 |  |  |         else if (MEM_P (dest))
 | 
      
         | 5526 |  |  |           invalidate (dest, VOIDmode);
 | 
      
         | 5527 |  |  |         else if (GET_CODE (dest) == STRICT_LOW_PART
 | 
      
         | 5528 |  |  |                  || GET_CODE (dest) == ZERO_EXTRACT)
 | 
      
         | 5529 |  |  |           invalidate (XEXP (dest, 0), GET_MODE (dest));
 | 
      
         | 5530 |  |  |       }
 | 
      
         | 5531 |  |  |  
 | 
      
         | 5532 |  |  |   /* A volatile ASM invalidates everything.  */
 | 
      
         | 5533 |  |  |   if (NONJUMP_INSN_P (insn)
 | 
      
         | 5534 |  |  |       && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
 | 
      
         | 5535 |  |  |       && MEM_VOLATILE_P (PATTERN (insn)))
 | 
      
         | 5536 |  |  |     flush_hash_table ();
 | 
      
         | 5537 |  |  |  
 | 
      
         | 5538 |  |  |   /* Don't cse over a call to setjmp; on some machines (eg VAX)
 | 
      
         | 5539 |  |  |      the regs restored by the longjmp come from a later time
 | 
      
         | 5540 |  |  |      than the setjmp.  */
 | 
      
         | 5541 |  |  |   if (CALL_P (insn) && find_reg_note (insn, REG_SETJMP, NULL))
 | 
      
         | 5542 |  |  |     {
 | 
      
         | 5543 |  |  |       flush_hash_table ();
 | 
      
         | 5544 |  |  |       goto done;
 | 
      
         | 5545 |  |  |     }
 | 
      
         | 5546 |  |  |  
 | 
      
         | 5547 |  |  |   /* Make sure registers mentioned in destinations
 | 
      
         | 5548 |  |  |      are safe for use in an expression to be inserted.
 | 
      
         | 5549 |  |  |      This removes from the hash table
 | 
      
         | 5550 |  |  |      any invalid entry that refers to one of these registers.
 | 
      
         | 5551 |  |  |  
 | 
      
         | 5552 |  |  |      We don't care about the return value from mention_regs because
 | 
      
         | 5553 |  |  |      we are going to hash the SET_DEST values unconditionally.  */
 | 
      
         | 5554 |  |  |  
 | 
      
         | 5555 |  |  |   for (i = 0; i < n_sets; i++)
 | 
      
         | 5556 |  |  |     {
 | 
      
         | 5557 |  |  |       if (sets[i].rtl)
 | 
      
         | 5558 |  |  |         {
 | 
      
         | 5559 |  |  |           rtx x = SET_DEST (sets[i].rtl);
 | 
      
         | 5560 |  |  |  
 | 
      
         | 5561 |  |  |           if (!REG_P (x))
 | 
      
         | 5562 |  |  |             mention_regs (x);
 | 
      
         | 5563 |  |  |           else
 | 
      
         | 5564 |  |  |             {
 | 
      
         | 5565 |  |  |               /* We used to rely on all references to a register becoming
 | 
      
         | 5566 |  |  |                  inaccessible when a register changes to a new quantity,
 | 
      
         | 5567 |  |  |                  since that changes the hash code.  However, that is not
 | 
      
         | 5568 |  |  |                  safe, since after HASH_SIZE new quantities we get a
 | 
      
         | 5569 |  |  |                  hash 'collision' of a register with its own invalid
 | 
      
         | 5570 |  |  |                  entries.  And since SUBREGs have been changed not to
 | 
      
         | 5571 |  |  |                  change their hash code with the hash code of the register,
 | 
      
         | 5572 |  |  |                  it wouldn't work any longer at all.  So we have to check
 | 
      
         | 5573 |  |  |                  for any invalid references lying around now.
 | 
      
         | 5574 |  |  |                  This code is similar to the REG case in mention_regs,
 | 
      
         | 5575 |  |  |                  but it knows that reg_tick has been incremented, and
 | 
      
         | 5576 |  |  |                  it leaves reg_in_table as -1 .  */
 | 
      
         | 5577 |  |  |               unsigned int regno = REGNO (x);
 | 
      
         | 5578 |  |  |               unsigned int endregno = END_REGNO (x);
 | 
      
         | 5579 |  |  |               unsigned int i;
 | 
      
         | 5580 |  |  |  
 | 
      
         | 5581 |  |  |               for (i = regno; i < endregno; i++)
 | 
      
         | 5582 |  |  |                 {
 | 
      
         | 5583 |  |  |                   if (REG_IN_TABLE (i) >= 0)
 | 
      
         | 5584 |  |  |                     {
 | 
      
         | 5585 |  |  |                       remove_invalid_refs (i);
 | 
      
         | 5586 |  |  |                       REG_IN_TABLE (i) = -1;
 | 
      
         | 5587 |  |  |                     }
 | 
      
         | 5588 |  |  |                 }
 | 
      
         | 5589 |  |  |             }
 | 
      
         | 5590 |  |  |         }
 | 
      
         | 5591 |  |  |     }
 | 
      
         | 5592 |  |  |  
 | 
      
         | 5593 |  |  |   /* We may have just removed some of the src_elt's from the hash table.
 | 
      
         | 5594 |  |  |      So replace each one with the current head of the same class.
 | 
      
         | 5595 |  |  |      Also check if destination addresses have been removed.  */
 | 
      
         | 5596 |  |  |  
 | 
      
         | 5597 |  |  |   for (i = 0; i < n_sets; i++)
 | 
      
         | 5598 |  |  |     if (sets[i].rtl)
 | 
      
         | 5599 |  |  |       {
 | 
      
         | 5600 |  |  |         if (sets[i].dest_addr_elt
 | 
      
         | 5601 |  |  |             && sets[i].dest_addr_elt->first_same_value == 0)
 | 
      
         | 5602 |  |  |           {
 | 
      
         | 5603 |  |  |             /* The elt was removed, which means this destination is not
 | 
      
         | 5604 |  |  |                valid after this instruction.  */
 | 
      
         | 5605 |  |  |             sets[i].rtl = NULL_RTX;
 | 
      
         | 5606 |  |  |           }
 | 
      
         | 5607 |  |  |         else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
 | 
      
         | 5608 |  |  |           /* If elt was removed, find current head of same class,
 | 
      
         | 5609 |  |  |              or 0 if nothing remains of that class.  */
 | 
      
         | 5610 |  |  |           {
 | 
      
         | 5611 |  |  |             struct table_elt *elt = sets[i].src_elt;
 | 
      
         | 5612 |  |  |  
 | 
      
         | 5613 |  |  |             while (elt && elt->prev_same_value)
 | 
      
         | 5614 |  |  |               elt = elt->prev_same_value;
 | 
      
         | 5615 |  |  |  
 | 
      
         | 5616 |  |  |             while (elt && elt->first_same_value == 0)
 | 
      
         | 5617 |  |  |               elt = elt->next_same_value;
 | 
      
         | 5618 |  |  |             sets[i].src_elt = elt ? elt->first_same_value : 0;
 | 
      
         | 5619 |  |  |           }
 | 
      
         | 5620 |  |  |       }
 | 
      
         | 5621 |  |  |  
 | 
      
         | 5622 |  |  |   /* Now insert the destinations into their equivalence classes.  */
 | 
      
         | 5623 |  |  |  
 | 
      
         | 5624 |  |  |   for (i = 0; i < n_sets; i++)
 | 
      
         | 5625 |  |  |     if (sets[i].rtl)
 | 
      
         | 5626 |  |  |       {
 | 
      
         | 5627 |  |  |         rtx dest = SET_DEST (sets[i].rtl);
 | 
      
         | 5628 |  |  |         struct table_elt *elt;
 | 
      
         | 5629 |  |  |  
 | 
      
         | 5630 |  |  |         /* Don't record value if we are not supposed to risk allocating
 | 
      
         | 5631 |  |  |            floating-point values in registers that might be wider than
 | 
      
         | 5632 |  |  |            memory.  */
 | 
      
         | 5633 |  |  |         if ((flag_float_store
 | 
      
         | 5634 |  |  |              && MEM_P (dest)
 | 
      
         | 5635 |  |  |              && FLOAT_MODE_P (GET_MODE (dest)))
 | 
      
         | 5636 |  |  |             /* Don't record BLKmode values, because we don't know the
 | 
      
         | 5637 |  |  |                size of it, and can't be sure that other BLKmode values
 | 
      
         | 5638 |  |  |                have the same or smaller size.  */
 | 
      
         | 5639 |  |  |             || GET_MODE (dest) == BLKmode
 | 
      
         | 5640 |  |  |             /* If we didn't put a REG_EQUAL value or a source into the hash
 | 
      
         | 5641 |  |  |                table, there is no point is recording DEST.  */
 | 
      
         | 5642 |  |  |             || sets[i].src_elt == 0
 | 
      
         | 5643 |  |  |             /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
 | 
      
         | 5644 |  |  |                or SIGN_EXTEND, don't record DEST since it can cause
 | 
      
         | 5645 |  |  |                some tracking to be wrong.
 | 
      
         | 5646 |  |  |  
 | 
      
         | 5647 |  |  |                ??? Think about this more later.  */
 | 
      
         | 5648 |  |  |             || (paradoxical_subreg_p (dest)
 | 
      
         | 5649 |  |  |                 && (GET_CODE (sets[i].src) == SIGN_EXTEND
 | 
      
         | 5650 |  |  |                     || GET_CODE (sets[i].src) == ZERO_EXTEND)))
 | 
      
         | 5651 |  |  |           continue;
 | 
      
         | 5652 |  |  |  
 | 
      
         | 5653 |  |  |         /* STRICT_LOW_PART isn't part of the value BEING set,
 | 
      
         | 5654 |  |  |            and neither is the SUBREG inside it.
 | 
      
         | 5655 |  |  |            Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
 | 
      
         | 5656 |  |  |         if (GET_CODE (dest) == STRICT_LOW_PART)
 | 
      
         | 5657 |  |  |           dest = SUBREG_REG (XEXP (dest, 0));
 | 
      
         | 5658 |  |  |  
 | 
      
         | 5659 |  |  |         if (REG_P (dest) || GET_CODE (dest) == SUBREG)
 | 
      
         | 5660 |  |  |           /* Registers must also be inserted into chains for quantities.  */
 | 
      
         | 5661 |  |  |           if (insert_regs (dest, sets[i].src_elt, 1))
 | 
      
         | 5662 |  |  |             {
 | 
      
         | 5663 |  |  |               /* If `insert_regs' changes something, the hash code must be
 | 
      
         | 5664 |  |  |                  recalculated.  */
 | 
      
         | 5665 |  |  |               rehash_using_reg (dest);
 | 
      
         | 5666 |  |  |               sets[i].dest_hash = HASH (dest, GET_MODE (dest));
 | 
      
         | 5667 |  |  |             }
 | 
      
         | 5668 |  |  |  
 | 
      
         | 5669 |  |  |         elt = insert (dest, sets[i].src_elt,
 | 
      
         | 5670 |  |  |                       sets[i].dest_hash, GET_MODE (dest));
 | 
      
         | 5671 |  |  |  
 | 
      
         | 5672 |  |  |         /* If this is a constant, insert the constant anchors with the
 | 
      
         | 5673 |  |  |            equivalent register-offset expressions using register DEST.  */
 | 
      
         | 5674 |  |  |         if (targetm.const_anchor
 | 
      
         | 5675 |  |  |             && REG_P (dest)
 | 
      
         | 5676 |  |  |             && SCALAR_INT_MODE_P (GET_MODE (dest))
 | 
      
         | 5677 |  |  |             && GET_CODE (sets[i].src_elt->exp) == CONST_INT)
 | 
      
         | 5678 |  |  |           insert_const_anchors (dest, sets[i].src_elt->exp, GET_MODE (dest));
 | 
      
         | 5679 |  |  |  
 | 
      
         | 5680 |  |  |         elt->in_memory = (MEM_P (sets[i].inner_dest)
 | 
      
         | 5681 |  |  |                           && !MEM_READONLY_P (sets[i].inner_dest));
 | 
      
         | 5682 |  |  |  
 | 
      
         | 5683 |  |  |         /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
 | 
      
         | 5684 |  |  |            narrower than M2, and both M1 and M2 are the same number of words,
 | 
      
         | 5685 |  |  |            we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
 | 
      
         | 5686 |  |  |            make that equivalence as well.
 | 
      
         | 5687 |  |  |  
 | 
      
         | 5688 |  |  |            However, BAR may have equivalences for which gen_lowpart
 | 
      
         | 5689 |  |  |            will produce a simpler value than gen_lowpart applied to
 | 
      
         | 5690 |  |  |            BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
 | 
      
         | 5691 |  |  |            BAR's equivalences.  If we don't get a simplified form, make
 | 
      
         | 5692 |  |  |            the SUBREG.  It will not be used in an equivalence, but will
 | 
      
         | 5693 |  |  |            cause two similar assignments to be detected.
 | 
      
         | 5694 |  |  |  
 | 
      
         | 5695 |  |  |            Note the loop below will find SUBREG_REG (DEST) since we have
 | 
      
         | 5696 |  |  |            already entered SRC and DEST of the SET in the table.  */
 | 
      
         | 5697 |  |  |  
 | 
      
         | 5698 |  |  |         if (GET_CODE (dest) == SUBREG
 | 
      
         | 5699 |  |  |             && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
 | 
      
         | 5700 |  |  |                  / UNITS_PER_WORD)
 | 
      
         | 5701 |  |  |                 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
 | 
      
         | 5702 |  |  |             && (GET_MODE_SIZE (GET_MODE (dest))
 | 
      
         | 5703 |  |  |                 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
 | 
      
         | 5704 |  |  |             && sets[i].src_elt != 0)
 | 
      
         | 5705 |  |  |           {
 | 
      
         | 5706 |  |  |             enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
 | 
      
         | 5707 |  |  |             struct table_elt *elt, *classp = 0;
 | 
      
         | 5708 |  |  |  
 | 
      
         | 5709 |  |  |             for (elt = sets[i].src_elt->first_same_value; elt;
 | 
      
         | 5710 |  |  |                  elt = elt->next_same_value)
 | 
      
         | 5711 |  |  |               {
 | 
      
         | 5712 |  |  |                 rtx new_src = 0;
 | 
      
         | 5713 |  |  |                 unsigned src_hash;
 | 
      
         | 5714 |  |  |                 struct table_elt *src_elt;
 | 
      
         | 5715 |  |  |                 int byte = 0;
 | 
      
         | 5716 |  |  |  
 | 
      
         | 5717 |  |  |                 /* Ignore invalid entries.  */
 | 
      
         | 5718 |  |  |                 if (!REG_P (elt->exp)
 | 
      
         | 5719 |  |  |                     && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
 | 
      
         | 5720 |  |  |                   continue;
 | 
      
         | 5721 |  |  |  
 | 
      
         | 5722 |  |  |                 /* We may have already been playing subreg games.  If the
 | 
      
         | 5723 |  |  |                    mode is already correct for the destination, use it.  */
 | 
      
         | 5724 |  |  |                 if (GET_MODE (elt->exp) == new_mode)
 | 
      
         | 5725 |  |  |                   new_src = elt->exp;
 | 
      
         | 5726 |  |  |                 else
 | 
      
         | 5727 |  |  |                   {
 | 
      
         | 5728 |  |  |                     /* Calculate big endian correction for the SUBREG_BYTE.
 | 
      
         | 5729 |  |  |                        We have already checked that M1 (GET_MODE (dest))
 | 
      
         | 5730 |  |  |                        is not narrower than M2 (new_mode).  */
 | 
      
         | 5731 |  |  |                     if (BYTES_BIG_ENDIAN)
 | 
      
         | 5732 |  |  |                       byte = (GET_MODE_SIZE (GET_MODE (dest))
 | 
      
         | 5733 |  |  |                               - GET_MODE_SIZE (new_mode));
 | 
      
         | 5734 |  |  |  
 | 
      
         | 5735 |  |  |                     new_src = simplify_gen_subreg (new_mode, elt->exp,
 | 
      
         | 5736 |  |  |                                                    GET_MODE (dest), byte);
 | 
      
         | 5737 |  |  |                   }
 | 
      
         | 5738 |  |  |  
 | 
      
         | 5739 |  |  |                 /* The call to simplify_gen_subreg fails if the value
 | 
      
         | 5740 |  |  |                    is VOIDmode, yet we can't do any simplification, e.g.
 | 
      
         | 5741 |  |  |                    for EXPR_LISTs denoting function call results.
 | 
      
         | 5742 |  |  |                    It is invalid to construct a SUBREG with a VOIDmode
 | 
      
         | 5743 |  |  |                    SUBREG_REG, hence a zero new_src means we can't do
 | 
      
         | 5744 |  |  |                    this substitution.  */
 | 
      
         | 5745 |  |  |                 if (! new_src)
 | 
      
         | 5746 |  |  |                   continue;
 | 
      
         | 5747 |  |  |  
 | 
      
         | 5748 |  |  |                 src_hash = HASH (new_src, new_mode);
 | 
      
         | 5749 |  |  |                 src_elt = lookup (new_src, src_hash, new_mode);
 | 
      
         | 5750 |  |  |  
 | 
      
         | 5751 |  |  |                 /* Put the new source in the hash table is if isn't
 | 
      
         | 5752 |  |  |                    already.  */
 | 
      
         | 5753 |  |  |                 if (src_elt == 0)
 | 
      
         | 5754 |  |  |                   {
 | 
      
         | 5755 |  |  |                     if (insert_regs (new_src, classp, 0))
 | 
      
         | 5756 |  |  |                       {
 | 
      
         | 5757 |  |  |                         rehash_using_reg (new_src);
 | 
      
         | 5758 |  |  |                         src_hash = HASH (new_src, new_mode);
 | 
      
         | 5759 |  |  |                       }
 | 
      
         | 5760 |  |  |                     src_elt = insert (new_src, classp, src_hash, new_mode);
 | 
      
         | 5761 |  |  |                     src_elt->in_memory = elt->in_memory;
 | 
      
         | 5762 |  |  |                   }
 | 
      
         | 5763 |  |  |                 else if (classp && classp != src_elt->first_same_value)
 | 
      
         | 5764 |  |  |                   /* Show that two things that we've seen before are
 | 
      
         | 5765 |  |  |                      actually the same.  */
 | 
      
         | 5766 |  |  |                   merge_equiv_classes (src_elt, classp);
 | 
      
         | 5767 |  |  |  
 | 
      
         | 5768 |  |  |                 classp = src_elt->first_same_value;
 | 
      
         | 5769 |  |  |                 /* Ignore invalid entries.  */
 | 
      
         | 5770 |  |  |                 while (classp
 | 
      
         | 5771 |  |  |                        && !REG_P (classp->exp)
 | 
      
         | 5772 |  |  |                        && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
 | 
      
         | 5773 |  |  |                   classp = classp->next_same_value;
 | 
      
         | 5774 |  |  |               }
 | 
      
         | 5775 |  |  |           }
 | 
      
         | 5776 |  |  |       }
 | 
      
         | 5777 |  |  |  
 | 
      
         | 5778 |  |  |   /* Special handling for (set REG0 REG1) where REG0 is the
 | 
      
         | 5779 |  |  |      "cheapest", cheaper than REG1.  After cse, REG1 will probably not
 | 
      
         | 5780 |  |  |      be used in the sequel, so (if easily done) change this insn to
 | 
      
         | 5781 |  |  |      (set REG1 REG0) and replace REG1 with REG0 in the previous insn
 | 
      
         | 5782 |  |  |      that computed their value.  Then REG1 will become a dead store
 | 
      
         | 5783 |  |  |      and won't cloud the situation for later optimizations.
 | 
      
         | 5784 |  |  |  
 | 
      
         | 5785 |  |  |      Do not make this change if REG1 is a hard register, because it will
 | 
      
         | 5786 |  |  |      then be used in the sequel and we may be changing a two-operand insn
 | 
      
         | 5787 |  |  |      into a three-operand insn.
 | 
      
         | 5788 |  |  |  
 | 
      
         | 5789 |  |  |      Also do not do this if we are operating on a copy of INSN.  */
 | 
      
         | 5790 |  |  |  
 | 
      
         | 5791 |  |  |   if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
 | 
      
         | 5792 |  |  |       && NEXT_INSN (PREV_INSN (insn)) == insn
 | 
      
         | 5793 |  |  |       && REG_P (SET_SRC (sets[0].rtl))
 | 
      
         | 5794 |  |  |       && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
 | 
      
         | 5795 |  |  |       && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
 | 
      
         | 5796 |  |  |     {
 | 
      
         | 5797 |  |  |       int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
 | 
      
         | 5798 |  |  |       struct qty_table_elem *src_ent = &qty_table[src_q];
 | 
      
         | 5799 |  |  |  
 | 
      
         | 5800 |  |  |       if (src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
 | 
      
         | 5801 |  |  |         {
 | 
      
         | 5802 |  |  |           /* Scan for the previous nonnote insn, but stop at a basic
 | 
      
         | 5803 |  |  |              block boundary.  */
 | 
      
         | 5804 |  |  |           rtx prev = insn;
 | 
      
         | 5805 |  |  |           rtx bb_head = BB_HEAD (BLOCK_FOR_INSN (insn));
 | 
      
         | 5806 |  |  |           do
 | 
      
         | 5807 |  |  |             {
 | 
      
         | 5808 |  |  |               prev = PREV_INSN (prev);
 | 
      
         | 5809 |  |  |             }
 | 
      
         | 5810 |  |  |           while (prev != bb_head && (NOTE_P (prev) || DEBUG_INSN_P (prev)));
 | 
      
         | 5811 |  |  |  
 | 
      
         | 5812 |  |  |           /* Do not swap the registers around if the previous instruction
 | 
      
         | 5813 |  |  |              attaches a REG_EQUIV note to REG1.
 | 
      
         | 5814 |  |  |  
 | 
      
         | 5815 |  |  |              ??? It's not entirely clear whether we can transfer a REG_EQUIV
 | 
      
         | 5816 |  |  |              from the pseudo that originally shadowed an incoming argument
 | 
      
         | 5817 |  |  |              to another register.  Some uses of REG_EQUIV might rely on it
 | 
      
         | 5818 |  |  |              being attached to REG1 rather than REG2.
 | 
      
         | 5819 |  |  |  
 | 
      
         | 5820 |  |  |              This section previously turned the REG_EQUIV into a REG_EQUAL
 | 
      
         | 5821 |  |  |              note.  We cannot do that because REG_EQUIV may provide an
 | 
      
         | 5822 |  |  |              uninitialized stack slot when REG_PARM_STACK_SPACE is used.  */
 | 
      
         | 5823 |  |  |           if (NONJUMP_INSN_P (prev)
 | 
      
         | 5824 |  |  |               && GET_CODE (PATTERN (prev)) == SET
 | 
      
         | 5825 |  |  |               && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
 | 
      
         | 5826 |  |  |               && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
 | 
      
         | 5827 |  |  |             {
 | 
      
         | 5828 |  |  |               rtx dest = SET_DEST (sets[0].rtl);
 | 
      
         | 5829 |  |  |               rtx src = SET_SRC (sets[0].rtl);
 | 
      
         | 5830 |  |  |               rtx note;
 | 
      
         | 5831 |  |  |  
 | 
      
         | 5832 |  |  |               validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
 | 
      
         | 5833 |  |  |               validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
 | 
      
         | 5834 |  |  |               validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
 | 
      
         | 5835 |  |  |               apply_change_group ();
 | 
      
         | 5836 |  |  |  
 | 
      
         | 5837 |  |  |               /* If INSN has a REG_EQUAL note, and this note mentions
 | 
      
         | 5838 |  |  |                  REG0, then we must delete it, because the value in
 | 
      
         | 5839 |  |  |                  REG0 has changed.  If the note's value is REG1, we must
 | 
      
         | 5840 |  |  |                  also delete it because that is now this insn's dest.  */
 | 
      
         | 5841 |  |  |               note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
 | 
      
         | 5842 |  |  |               if (note != 0
 | 
      
         | 5843 |  |  |                   && (reg_mentioned_p (dest, XEXP (note, 0))
 | 
      
         | 5844 |  |  |                       || rtx_equal_p (src, XEXP (note, 0))))
 | 
      
         | 5845 |  |  |                 remove_note (insn, note);
 | 
      
         | 5846 |  |  |             }
 | 
      
         | 5847 |  |  |         }
 | 
      
         | 5848 |  |  |     }
 | 
      
         | 5849 |  |  |  
 | 
      
         | 5850 |  |  | done:;
 | 
      
         | 5851 |  |  | }
 | 
      
         | 5852 |  |  |  
 | 
      
         | 5853 |  |  | /* Remove from the hash table all expressions that reference memory.  */
 | 
      
         | 5854 |  |  |  
 | 
      
         | 5855 |  |  | static void
 | 
      
         | 5856 |  |  | invalidate_memory (void)
 | 
      
         | 5857 |  |  | {
 | 
      
         | 5858 |  |  |   int i;
 | 
      
         | 5859 |  |  |   struct table_elt *p, *next;
 | 
      
         | 5860 |  |  |  
 | 
      
         | 5861 |  |  |   for (i = 0; i < HASH_SIZE; i++)
 | 
      
         | 5862 |  |  |     for (p = table[i]; p; p = next)
 | 
      
         | 5863 |  |  |       {
 | 
      
         | 5864 |  |  |         next = p->next_same_hash;
 | 
      
         | 5865 |  |  |         if (p->in_memory)
 | 
      
         | 5866 |  |  |           remove_from_table (p, i);
 | 
      
         | 5867 |  |  |       }
 | 
      
         | 5868 |  |  | }
 | 
      
         | 5869 |  |  |  
 | 
      
         | 5870 |  |  | /* Perform invalidation on the basis of everything about an insn
 | 
      
         | 5871 |  |  |    except for invalidating the actual places that are SET in it.
 | 
      
         | 5872 |  |  |    This includes the places CLOBBERed, and anything that might
 | 
      
         | 5873 |  |  |    alias with something that is SET or CLOBBERed.
 | 
      
         | 5874 |  |  |  
 | 
      
         | 5875 |  |  |    X is the pattern of the insn.  */
 | 
      
         | 5876 |  |  |  
 | 
      
         | 5877 |  |  | static void
 | 
      
         | 5878 |  |  | invalidate_from_clobbers (rtx x)
 | 
      
         | 5879 |  |  | {
 | 
      
         | 5880 |  |  |   if (GET_CODE (x) == CLOBBER)
 | 
      
         | 5881 |  |  |     {
 | 
      
         | 5882 |  |  |       rtx ref = XEXP (x, 0);
 | 
      
         | 5883 |  |  |       if (ref)
 | 
      
         | 5884 |  |  |         {
 | 
      
         | 5885 |  |  |           if (REG_P (ref) || GET_CODE (ref) == SUBREG
 | 
      
         | 5886 |  |  |               || MEM_P (ref))
 | 
      
         | 5887 |  |  |             invalidate (ref, VOIDmode);
 | 
      
         | 5888 |  |  |           else if (GET_CODE (ref) == STRICT_LOW_PART
 | 
      
         | 5889 |  |  |                    || GET_CODE (ref) == ZERO_EXTRACT)
 | 
      
         | 5890 |  |  |             invalidate (XEXP (ref, 0), GET_MODE (ref));
 | 
      
         | 5891 |  |  |         }
 | 
      
         | 5892 |  |  |     }
 | 
      
         | 5893 |  |  |   else if (GET_CODE (x) == PARALLEL)
 | 
      
         | 5894 |  |  |     {
 | 
      
         | 5895 |  |  |       int i;
 | 
      
         | 5896 |  |  |       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
 | 
      
         | 5897 |  |  |         {
 | 
      
         | 5898 |  |  |           rtx y = XVECEXP (x, 0, i);
 | 
      
         | 5899 |  |  |           if (GET_CODE (y) == CLOBBER)
 | 
      
         | 5900 |  |  |             {
 | 
      
         | 5901 |  |  |               rtx ref = XEXP (y, 0);
 | 
      
         | 5902 |  |  |               if (REG_P (ref) || GET_CODE (ref) == SUBREG
 | 
      
         | 5903 |  |  |                   || MEM_P (ref))
 | 
      
         | 5904 |  |  |                 invalidate (ref, VOIDmode);
 | 
      
         | 5905 |  |  |               else if (GET_CODE (ref) == STRICT_LOW_PART
 | 
      
         | 5906 |  |  |                        || GET_CODE (ref) == ZERO_EXTRACT)
 | 
      
         | 5907 |  |  |                 invalidate (XEXP (ref, 0), GET_MODE (ref));
 | 
      
         | 5908 |  |  |             }
 | 
      
         | 5909 |  |  |         }
 | 
      
         | 5910 |  |  |     }
 | 
      
         | 5911 |  |  | }
 | 
      
         | 5912 |  |  |  
 | 
      
         | 5913 |  |  | /* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
 | 
      
         | 5914 |  |  |    and replace any registers in them with either an equivalent constant
 | 
      
         | 5915 |  |  |    or the canonical form of the register.  If we are inside an address,
 | 
      
         | 5916 |  |  |    only do this if the address remains valid.
 | 
      
         | 5917 |  |  |  
 | 
      
         | 5918 |  |  |    OBJECT is 0 except when within a MEM in which case it is the MEM.
 | 
      
         | 5919 |  |  |  
 | 
      
         | 5920 |  |  |    Return the replacement for X.  */
 | 
      
         | 5921 |  |  |  
 | 
      
         | 5922 |  |  | static rtx
 | 
      
         | 5923 |  |  | cse_process_notes_1 (rtx x, rtx object, bool *changed)
 | 
      
         | 5924 |  |  | {
 | 
      
         | 5925 |  |  |   enum rtx_code code = GET_CODE (x);
 | 
      
         | 5926 |  |  |   const char *fmt = GET_RTX_FORMAT (code);
 | 
      
         | 5927 |  |  |   int i;
 | 
      
         | 5928 |  |  |  
 | 
      
         | 5929 |  |  |   switch (code)
 | 
      
         | 5930 |  |  |     {
 | 
      
         | 5931 |  |  |     case CONST_INT:
 | 
      
         | 5932 |  |  |     case CONST:
 | 
      
         | 5933 |  |  |     case SYMBOL_REF:
 | 
      
         | 5934 |  |  |     case LABEL_REF:
 | 
      
         | 5935 |  |  |     case CONST_DOUBLE:
 | 
      
         | 5936 |  |  |     case CONST_FIXED:
 | 
      
         | 5937 |  |  |     case CONST_VECTOR:
 | 
      
         | 5938 |  |  |     case PC:
 | 
      
         | 5939 |  |  |     case CC0:
 | 
      
         | 5940 |  |  |     case LO_SUM:
 | 
      
         | 5941 |  |  |       return x;
 | 
      
         | 5942 |  |  |  
 | 
      
         | 5943 |  |  |     case MEM:
 | 
      
         | 5944 |  |  |       validate_change (x, &XEXP (x, 0),
 | 
      
         | 5945 |  |  |                        cse_process_notes (XEXP (x, 0), x, changed), 0);
 | 
      
         | 5946 |  |  |       return x;
 | 
      
         | 5947 |  |  |  
 | 
      
         | 5948 |  |  |     case EXPR_LIST:
 | 
      
         | 5949 |  |  |     case INSN_LIST:
 | 
      
         | 5950 |  |  |       if (REG_NOTE_KIND (x) == REG_EQUAL)
 | 
      
         | 5951 |  |  |         XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX, changed);
 | 
      
         | 5952 |  |  |       if (XEXP (x, 1))
 | 
      
         | 5953 |  |  |         XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX, changed);
 | 
      
         | 5954 |  |  |       return x;
 | 
      
         | 5955 |  |  |  
 | 
      
         | 5956 |  |  |     case SIGN_EXTEND:
 | 
      
         | 5957 |  |  |     case ZERO_EXTEND:
 | 
      
         | 5958 |  |  |     case SUBREG:
 | 
      
         | 5959 |  |  |       {
 | 
      
         | 5960 |  |  |         rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed);
 | 
      
         | 5961 |  |  |         /* We don't substitute VOIDmode constants into these rtx,
 | 
      
         | 5962 |  |  |            since they would impede folding.  */
 | 
      
         | 5963 |  |  |         if (GET_MODE (new_rtx) != VOIDmode)
 | 
      
         | 5964 |  |  |           validate_change (object, &XEXP (x, 0), new_rtx, 0);
 | 
      
         | 5965 |  |  |         return x;
 | 
      
         | 5966 |  |  |       }
 | 
      
         | 5967 |  |  |  
 | 
      
         | 5968 |  |  |     case REG:
 | 
      
         | 5969 |  |  |       i = REG_QTY (REGNO (x));
 | 
      
         | 5970 |  |  |  
 | 
      
         | 5971 |  |  |       /* Return a constant or a constant register.  */
 | 
      
         | 5972 |  |  |       if (REGNO_QTY_VALID_P (REGNO (x)))
 | 
      
         | 5973 |  |  |         {
 | 
      
         | 5974 |  |  |           struct qty_table_elem *ent = &qty_table[i];
 | 
      
         | 5975 |  |  |  
 | 
      
         | 5976 |  |  |           if (ent->const_rtx != NULL_RTX
 | 
      
         | 5977 |  |  |               && (CONSTANT_P (ent->const_rtx)
 | 
      
         | 5978 |  |  |                   || REG_P (ent->const_rtx)))
 | 
      
         | 5979 |  |  |             {
 | 
      
         | 5980 |  |  |               rtx new_rtx = gen_lowpart (GET_MODE (x), ent->const_rtx);
 | 
      
         | 5981 |  |  |               if (new_rtx)
 | 
      
         | 5982 |  |  |                 return copy_rtx (new_rtx);
 | 
      
         | 5983 |  |  |             }
 | 
      
         | 5984 |  |  |         }
 | 
      
         | 5985 |  |  |  
 | 
      
         | 5986 |  |  |       /* Otherwise, canonicalize this register.  */
 | 
      
         | 5987 |  |  |       return canon_reg (x, NULL_RTX);
 | 
      
         | 5988 |  |  |  
 | 
      
         | 5989 |  |  |     default:
 | 
      
         | 5990 |  |  |       break;
 | 
      
         | 5991 |  |  |     }
 | 
      
         | 5992 |  |  |  
 | 
      
         | 5993 |  |  |   for (i = 0; i < GET_RTX_LENGTH (code); i++)
 | 
      
         | 5994 |  |  |     if (fmt[i] == 'e')
 | 
      
         | 5995 |  |  |       validate_change (object, &XEXP (x, i),
 | 
      
         | 5996 |  |  |                        cse_process_notes (XEXP (x, i), object, changed), 0);
 | 
      
         | 5997 |  |  |  
 | 
      
         | 5998 |  |  |   return x;
 | 
      
         | 5999 |  |  | }
 | 
      
         | 6000 |  |  |  
 | 
      
         | 6001 |  |  | static rtx
 | 
      
         | 6002 |  |  | cse_process_notes (rtx x, rtx object, bool *changed)
 | 
      
         | 6003 |  |  | {
 | 
      
         | 6004 |  |  |   rtx new_rtx = cse_process_notes_1 (x, object, changed);
 | 
      
         | 6005 |  |  |   if (new_rtx != x)
 | 
      
         | 6006 |  |  |     *changed = true;
 | 
      
         | 6007 |  |  |   return new_rtx;
 | 
      
         | 6008 |  |  | }
 | 
      
         | 6009 |  |  |  
 | 
      
         | 6010 |  |  |  
 | 
      
         | 6011 |  |  | /* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
 | 
      
         | 6012 |  |  |  
 | 
      
         | 6013 |  |  |    DATA is a pointer to a struct cse_basic_block_data, that is used to
 | 
      
         | 6014 |  |  |    describe the path.
 | 
      
         | 6015 |  |  |    It is filled with a queue of basic blocks, starting with FIRST_BB
 | 
      
         | 6016 |  |  |    and following a trace through the CFG.
 | 
      
         | 6017 |  |  |  
 | 
      
         | 6018 |  |  |    If all paths starting at FIRST_BB have been followed, or no new path
 | 
      
         | 6019 |  |  |    starting at FIRST_BB can be constructed, this function returns FALSE.
 | 
      
         | 6020 |  |  |    Otherwise, DATA->path is filled and the function returns TRUE indicating
 | 
      
         | 6021 |  |  |    that a path to follow was found.
 | 
      
         | 6022 |  |  |  
 | 
      
         | 6023 |  |  |    If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
 | 
      
         | 6024 |  |  |    block in the path will be FIRST_BB.  */
 | 
      
         | 6025 |  |  |  
 | 
      
         | 6026 |  |  | static bool
 | 
      
         | 6027 |  |  | cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
 | 
      
         | 6028 |  |  |                int follow_jumps)
 | 
      
         | 6029 |  |  | {
 | 
      
         | 6030 |  |  |   basic_block bb;
 | 
      
         | 6031 |  |  |   edge e;
 | 
      
         | 6032 |  |  |   int path_size;
 | 
      
         | 6033 |  |  |  
 | 
      
         | 6034 |  |  |   SET_BIT (cse_visited_basic_blocks, first_bb->index);
 | 
      
         | 6035 |  |  |  
 | 
      
         | 6036 |  |  |   /* See if there is a previous path.  */
 | 
      
         | 6037 |  |  |   path_size = data->path_size;
 | 
      
         | 6038 |  |  |  
 | 
      
         | 6039 |  |  |   /* There is a previous path.  Make sure it started with FIRST_BB.  */
 | 
      
         | 6040 |  |  |   if (path_size)
 | 
      
         | 6041 |  |  |     gcc_assert (data->path[0].bb == first_bb);
 | 
      
         | 6042 |  |  |  
 | 
      
         | 6043 |  |  |   /* There was only one basic block in the last path.  Clear the path and
 | 
      
         | 6044 |  |  |      return, so that paths starting at another basic block can be tried.  */
 | 
      
         | 6045 |  |  |   if (path_size == 1)
 | 
      
         | 6046 |  |  |     {
 | 
      
         | 6047 |  |  |       path_size = 0;
 | 
      
         | 6048 |  |  |       goto done;
 | 
      
         | 6049 |  |  |     }
 | 
      
         | 6050 |  |  |  
 | 
      
         | 6051 |  |  |   /* If the path was empty from the beginning, construct a new path.  */
 | 
      
         | 6052 |  |  |   if (path_size == 0)
 | 
      
         | 6053 |  |  |     data->path[path_size++].bb = first_bb;
 | 
      
         | 6054 |  |  |   else
 | 
      
         | 6055 |  |  |     {
 | 
      
         | 6056 |  |  |       /* Otherwise, path_size must be equal to or greater than 2, because
 | 
      
         | 6057 |  |  |          a previous path exists that is at least two basic blocks long.
 | 
      
         | 6058 |  |  |  
 | 
      
         | 6059 |  |  |          Update the previous branch path, if any.  If the last branch was
 | 
      
         | 6060 |  |  |          previously along the branch edge, take the fallthrough edge now.  */
 | 
      
         | 6061 |  |  |       while (path_size >= 2)
 | 
      
         | 6062 |  |  |         {
 | 
      
         | 6063 |  |  |           basic_block last_bb_in_path, previous_bb_in_path;
 | 
      
         | 6064 |  |  |           edge e;
 | 
      
         | 6065 |  |  |  
 | 
      
         | 6066 |  |  |           --path_size;
 | 
      
         | 6067 |  |  |           last_bb_in_path = data->path[path_size].bb;
 | 
      
         | 6068 |  |  |           previous_bb_in_path = data->path[path_size - 1].bb;
 | 
      
         | 6069 |  |  |  
 | 
      
         | 6070 |  |  |           /* If we previously followed a path along the branch edge, try
 | 
      
         | 6071 |  |  |              the fallthru edge now.  */
 | 
      
         | 6072 |  |  |           if (EDGE_COUNT (previous_bb_in_path->succs) == 2
 | 
      
         | 6073 |  |  |               && any_condjump_p (BB_END (previous_bb_in_path))
 | 
      
         | 6074 |  |  |               && (e = find_edge (previous_bb_in_path, last_bb_in_path))
 | 
      
         | 6075 |  |  |               && e == BRANCH_EDGE (previous_bb_in_path))
 | 
      
         | 6076 |  |  |             {
 | 
      
         | 6077 |  |  |               bb = FALLTHRU_EDGE (previous_bb_in_path)->dest;
 | 
      
         | 6078 |  |  |               if (bb != EXIT_BLOCK_PTR
 | 
      
         | 6079 |  |  |                   && single_pred_p (bb)
 | 
      
         | 6080 |  |  |                   /* We used to assert here that we would only see blocks
 | 
      
         | 6081 |  |  |                      that we have not visited yet.  But we may end up
 | 
      
         | 6082 |  |  |                      visiting basic blocks twice if the CFG has changed
 | 
      
         | 6083 |  |  |                      in this run of cse_main, because when the CFG changes
 | 
      
         | 6084 |  |  |                      the topological sort of the CFG also changes.  A basic
 | 
      
         | 6085 |  |  |                      blocks that previously had more than two predecessors
 | 
      
         | 6086 |  |  |                      may now have a single predecessor, and become part of
 | 
      
         | 6087 |  |  |                      a path that starts at another basic block.
 | 
      
         | 6088 |  |  |  
 | 
      
         | 6089 |  |  |                      We still want to visit each basic block only once, so
 | 
      
         | 6090 |  |  |                      halt the path here if we have already visited BB.  */
 | 
      
         | 6091 |  |  |                   && !TEST_BIT (cse_visited_basic_blocks, bb->index))
 | 
      
         | 6092 |  |  |                 {
 | 
      
         | 6093 |  |  |                   SET_BIT (cse_visited_basic_blocks, bb->index);
 | 
      
         | 6094 |  |  |                   data->path[path_size++].bb = bb;
 | 
      
         | 6095 |  |  |                   break;
 | 
      
         | 6096 |  |  |                 }
 | 
      
         | 6097 |  |  |             }
 | 
      
         | 6098 |  |  |  
 | 
      
         | 6099 |  |  |           data->path[path_size].bb = NULL;
 | 
      
         | 6100 |  |  |         }
 | 
      
         | 6101 |  |  |  
 | 
      
         | 6102 |  |  |       /* If only one block remains in the path, bail.  */
 | 
      
         | 6103 |  |  |       if (path_size == 1)
 | 
      
         | 6104 |  |  |         {
 | 
      
         | 6105 |  |  |           path_size = 0;
 | 
      
         | 6106 |  |  |           goto done;
 | 
      
         | 6107 |  |  |         }
 | 
      
         | 6108 |  |  |     }
 | 
      
         | 6109 |  |  |  
 | 
      
         | 6110 |  |  |   /* Extend the path if possible.  */
 | 
      
         | 6111 |  |  |   if (follow_jumps)
 | 
      
         | 6112 |  |  |     {
 | 
      
         | 6113 |  |  |       bb = data->path[path_size - 1].bb;
 | 
      
         | 6114 |  |  |       while (bb && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH))
 | 
      
         | 6115 |  |  |         {
 | 
      
         | 6116 |  |  |           if (single_succ_p (bb))
 | 
      
         | 6117 |  |  |             e = single_succ_edge (bb);
 | 
      
         | 6118 |  |  |           else if (EDGE_COUNT (bb->succs) == 2
 | 
      
         | 6119 |  |  |                    && any_condjump_p (BB_END (bb)))
 | 
      
         | 6120 |  |  |             {
 | 
      
         | 6121 |  |  |               /* First try to follow the branch.  If that doesn't lead
 | 
      
         | 6122 |  |  |                  to a useful path, follow the fallthru edge.  */
 | 
      
         | 6123 |  |  |               e = BRANCH_EDGE (bb);
 | 
      
         | 6124 |  |  |               if (!single_pred_p (e->dest))
 | 
      
         | 6125 |  |  |                 e = FALLTHRU_EDGE (bb);
 | 
      
         | 6126 |  |  |             }
 | 
      
         | 6127 |  |  |           else
 | 
      
         | 6128 |  |  |             e = NULL;
 | 
      
         | 6129 |  |  |  
 | 
      
         | 6130 |  |  |           if (e
 | 
      
         | 6131 |  |  |               && !((e->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
 | 
      
         | 6132 |  |  |               && e->dest != EXIT_BLOCK_PTR
 | 
      
         | 6133 |  |  |               && single_pred_p (e->dest)
 | 
      
         | 6134 |  |  |               /* Avoid visiting basic blocks twice.  The large comment
 | 
      
         | 6135 |  |  |                  above explains why this can happen.  */
 | 
      
         | 6136 |  |  |               && !TEST_BIT (cse_visited_basic_blocks, e->dest->index))
 | 
      
         | 6137 |  |  |             {
 | 
      
         | 6138 |  |  |               basic_block bb2 = e->dest;
 | 
      
         | 6139 |  |  |               SET_BIT (cse_visited_basic_blocks, bb2->index);
 | 
      
         | 6140 |  |  |               data->path[path_size++].bb = bb2;
 | 
      
         | 6141 |  |  |               bb = bb2;
 | 
      
         | 6142 |  |  |             }
 | 
      
         | 6143 |  |  |           else
 | 
      
         | 6144 |  |  |             bb = NULL;
 | 
      
         | 6145 |  |  |         }
 | 
      
         | 6146 |  |  |     }
 | 
      
         | 6147 |  |  |  
 | 
      
         | 6148 |  |  | done:
 | 
      
         | 6149 |  |  |   data->path_size = path_size;
 | 
      
         | 6150 |  |  |   return path_size != 0;
 | 
      
         | 6151 |  |  | }
 | 
      
         | 6152 |  |  |  
 | 
      
         | 6153 |  |  | /* Dump the path in DATA to file F.  NSETS is the number of sets
 | 
      
         | 6154 |  |  |    in the path.  */
 | 
      
         | 6155 |  |  |  
 | 
      
         | 6156 |  |  | static void
 | 
      
         | 6157 |  |  | cse_dump_path (struct cse_basic_block_data *data, int nsets, FILE *f)
 | 
      
         | 6158 |  |  | {
 | 
      
         | 6159 |  |  |   int path_entry;
 | 
      
         | 6160 |  |  |  
 | 
      
         | 6161 |  |  |   fprintf (f, ";; Following path with %d sets: ", nsets);
 | 
      
         | 6162 |  |  |   for (path_entry = 0; path_entry < data->path_size; path_entry++)
 | 
      
         | 6163 |  |  |     fprintf (f, "%d ", (data->path[path_entry].bb)->index);
 | 
      
         | 6164 |  |  |   fputc ('\n', dump_file);
 | 
      
         | 6165 |  |  |   fflush (f);
 | 
      
         | 6166 |  |  | }
 | 
      
         | 6167 |  |  |  
 | 
      
         | 6168 |  |  |  
 | 
      
         | 6169 |  |  | /* Return true if BB has exception handling successor edges.  */
 | 
      
         | 6170 |  |  |  
 | 
      
         | 6171 |  |  | static bool
 | 
      
         | 6172 |  |  | have_eh_succ_edges (basic_block bb)
 | 
      
         | 6173 |  |  | {
 | 
      
         | 6174 |  |  |   edge e;
 | 
      
         | 6175 |  |  |   edge_iterator ei;
 | 
      
         | 6176 |  |  |  
 | 
      
         | 6177 |  |  |   FOR_EACH_EDGE (e, ei, bb->succs)
 | 
      
         | 6178 |  |  |     if (e->flags & EDGE_EH)
 | 
      
         | 6179 |  |  |       return true;
 | 
      
         | 6180 |  |  |  
 | 
      
         | 6181 |  |  |   return false;
 | 
      
         | 6182 |  |  | }
 | 
      
         | 6183 |  |  |  
 | 
      
         | 6184 |  |  |  
 | 
      
         | 6185 |  |  | /* Scan to the end of the path described by DATA.  Return an estimate of
 | 
      
         | 6186 |  |  |    the total number of SETs of all insns in the path.  */
 | 
      
         | 6187 |  |  |  
 | 
      
         | 6188 |  |  | static void
 | 
      
         | 6189 |  |  | cse_prescan_path (struct cse_basic_block_data *data)
 | 
      
         | 6190 |  |  | {
 | 
      
         | 6191 |  |  |   int nsets = 0;
 | 
      
         | 6192 |  |  |   int path_size = data->path_size;
 | 
      
         | 6193 |  |  |   int path_entry;
 | 
      
         | 6194 |  |  |  
 | 
      
         | 6195 |  |  |   /* Scan to end of each basic block in the path.  */
 | 
      
         | 6196 |  |  |   for (path_entry = 0; path_entry < path_size; path_entry++)
 | 
      
         | 6197 |  |  |     {
 | 
      
         | 6198 |  |  |       basic_block bb;
 | 
      
         | 6199 |  |  |       rtx insn;
 | 
      
         | 6200 |  |  |  
 | 
      
         | 6201 |  |  |       bb = data->path[path_entry].bb;
 | 
      
         | 6202 |  |  |  
 | 
      
         | 6203 |  |  |       FOR_BB_INSNS (bb, insn)
 | 
      
         | 6204 |  |  |         {
 | 
      
         | 6205 |  |  |           if (!INSN_P (insn))
 | 
      
         | 6206 |  |  |             continue;
 | 
      
         | 6207 |  |  |  
 | 
      
         | 6208 |  |  |           /* A PARALLEL can have lots of SETs in it,
 | 
      
         | 6209 |  |  |              especially if it is really an ASM_OPERANDS.  */
 | 
      
         | 6210 |  |  |           if (GET_CODE (PATTERN (insn)) == PARALLEL)
 | 
      
         | 6211 |  |  |             nsets += XVECLEN (PATTERN (insn), 0);
 | 
      
         | 6212 |  |  |           else
 | 
      
         | 6213 |  |  |             nsets += 1;
 | 
      
         | 6214 |  |  |         }
 | 
      
         | 6215 |  |  |     }
 | 
      
         | 6216 |  |  |  
 | 
      
         | 6217 |  |  |   data->nsets = nsets;
 | 
      
         | 6218 |  |  | }
 | 
      
         | 6219 |  |  |  
 | 
      
         | 6220 |  |  | /* Process a single extended basic block described by EBB_DATA.  */
 | 
      
         | 6221 |  |  |  
 | 
      
         | 6222 |  |  | static void
 | 
      
         | 6223 |  |  | cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
 | 
      
         | 6224 |  |  | {
 | 
      
         | 6225 |  |  |   int path_size = ebb_data->path_size;
 | 
      
         | 6226 |  |  |   int path_entry;
 | 
      
         | 6227 |  |  |   int num_insns = 0;
 | 
      
         | 6228 |  |  |  
 | 
      
         | 6229 |  |  |   /* Allocate the space needed by qty_table.  */
 | 
      
         | 6230 |  |  |   qty_table = XNEWVEC (struct qty_table_elem, max_qty);
 | 
      
         | 6231 |  |  |  
 | 
      
         | 6232 |  |  |   new_basic_block ();
 | 
      
         | 6233 |  |  |   cse_ebb_live_in = df_get_live_in (ebb_data->path[0].bb);
 | 
      
         | 6234 |  |  |   cse_ebb_live_out = df_get_live_out (ebb_data->path[path_size - 1].bb);
 | 
      
         | 6235 |  |  |   for (path_entry = 0; path_entry < path_size; path_entry++)
 | 
      
         | 6236 |  |  |     {
 | 
      
         | 6237 |  |  |       basic_block bb;
 | 
      
         | 6238 |  |  |       rtx insn;
 | 
      
         | 6239 |  |  |  
 | 
      
         | 6240 |  |  |       bb = ebb_data->path[path_entry].bb;
 | 
      
         | 6241 |  |  |  
 | 
      
         | 6242 |  |  |       /* Invalidate recorded information for eh regs if there is an EH
 | 
      
         | 6243 |  |  |          edge pointing to that bb.  */
 | 
      
         | 6244 |  |  |       if (bb_has_eh_pred (bb))
 | 
      
         | 6245 |  |  |         {
 | 
      
         | 6246 |  |  |           df_ref *def_rec;
 | 
      
         | 6247 |  |  |  
 | 
      
         | 6248 |  |  |           for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
 | 
      
         | 6249 |  |  |             {
 | 
      
         | 6250 |  |  |               df_ref def = *def_rec;
 | 
      
         | 6251 |  |  |               if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
 | 
      
         | 6252 |  |  |                 invalidate (DF_REF_REG (def), GET_MODE (DF_REF_REG (def)));
 | 
      
         | 6253 |  |  |             }
 | 
      
         | 6254 |  |  |         }
 | 
      
         | 6255 |  |  |  
 | 
      
         | 6256 |  |  |       optimize_this_for_speed_p = optimize_bb_for_speed_p (bb);
 | 
      
         | 6257 |  |  |       FOR_BB_INSNS (bb, insn)
 | 
      
         | 6258 |  |  |         {
 | 
      
         | 6259 |  |  |           /* If we have processed 1,000 insns, flush the hash table to
 | 
      
         | 6260 |  |  |              avoid extreme quadratic behavior.  We must not include NOTEs
 | 
      
         | 6261 |  |  |              in the count since there may be more of them when generating
 | 
      
         | 6262 |  |  |              debugging information.  If we clear the table at different
 | 
      
         | 6263 |  |  |              times, code generated with -g -O might be different than code
 | 
      
         | 6264 |  |  |              generated with -O but not -g.
 | 
      
         | 6265 |  |  |  
 | 
      
         | 6266 |  |  |              FIXME: This is a real kludge and needs to be done some other
 | 
      
         | 6267 |  |  |                     way.  */
 | 
      
         | 6268 |  |  |           if (NONDEBUG_INSN_P (insn)
 | 
      
         | 6269 |  |  |               && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
 | 
      
         | 6270 |  |  |             {
 | 
      
         | 6271 |  |  |               flush_hash_table ();
 | 
      
         | 6272 |  |  |               num_insns = 0;
 | 
      
         | 6273 |  |  |             }
 | 
      
         | 6274 |  |  |  
 | 
      
         | 6275 |  |  |           if (INSN_P (insn))
 | 
      
         | 6276 |  |  |             {
 | 
      
         | 6277 |  |  |               /* Process notes first so we have all notes in canonical forms
 | 
      
         | 6278 |  |  |                  when looking for duplicate operations.  */
 | 
      
         | 6279 |  |  |               if (REG_NOTES (insn))
 | 
      
         | 6280 |  |  |                 {
 | 
      
         | 6281 |  |  |                   bool changed = false;
 | 
      
         | 6282 |  |  |                   REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn),
 | 
      
         | 6283 |  |  |                                                         NULL_RTX, &changed);
 | 
      
         | 6284 |  |  |                   if (changed)
 | 
      
         | 6285 |  |  |                     df_notes_rescan (insn);
 | 
      
         | 6286 |  |  |                 }
 | 
      
         | 6287 |  |  |  
 | 
      
         | 6288 |  |  |               cse_insn (insn);
 | 
      
         | 6289 |  |  |  
 | 
      
         | 6290 |  |  |               /* If we haven't already found an insn where we added a LABEL_REF,
 | 
      
         | 6291 |  |  |                  check this one.  */
 | 
      
         | 6292 |  |  |               if (INSN_P (insn) && !recorded_label_ref
 | 
      
         | 6293 |  |  |                   && for_each_rtx (&PATTERN (insn), check_for_label_ref,
 | 
      
         | 6294 |  |  |                                    (void *) insn))
 | 
      
         | 6295 |  |  |                 recorded_label_ref = true;
 | 
      
         | 6296 |  |  |  
 | 
      
         | 6297 |  |  | #ifdef HAVE_cc0
 | 
      
         | 6298 |  |  |               if (NONDEBUG_INSN_P (insn))
 | 
      
         | 6299 |  |  |                 {
 | 
      
         | 6300 |  |  |                   /* If the previous insn sets CC0 and this insn no
 | 
      
         | 6301 |  |  |                      longer references CC0, delete the previous insn.
 | 
      
         | 6302 |  |  |                      Here we use fact that nothing expects CC0 to be
 | 
      
         | 6303 |  |  |                      valid over an insn, which is true until the final
 | 
      
         | 6304 |  |  |                      pass.  */
 | 
      
         | 6305 |  |  |                   rtx prev_insn, tem;
 | 
      
         | 6306 |  |  |  
 | 
      
         | 6307 |  |  |                   prev_insn = prev_nonnote_nondebug_insn (insn);
 | 
      
         | 6308 |  |  |                   if (prev_insn && NONJUMP_INSN_P (prev_insn)
 | 
      
         | 6309 |  |  |                       && (tem = single_set (prev_insn)) != NULL_RTX
 | 
      
         | 6310 |  |  |                       && SET_DEST (tem) == cc0_rtx
 | 
      
         | 6311 |  |  |                       && ! reg_mentioned_p (cc0_rtx, PATTERN (insn)))
 | 
      
         | 6312 |  |  |                     delete_insn (prev_insn);
 | 
      
         | 6313 |  |  |  
 | 
      
         | 6314 |  |  |                   /* If this insn is not the last insn in the basic
 | 
      
         | 6315 |  |  |                      block, it will be PREV_INSN(insn) in the next
 | 
      
         | 6316 |  |  |                      iteration.  If we recorded any CC0-related
 | 
      
         | 6317 |  |  |                      information for this insn, remember it.  */
 | 
      
         | 6318 |  |  |                   if (insn != BB_END (bb))
 | 
      
         | 6319 |  |  |                     {
 | 
      
         | 6320 |  |  |                       prev_insn_cc0 = this_insn_cc0;
 | 
      
         | 6321 |  |  |                       prev_insn_cc0_mode = this_insn_cc0_mode;
 | 
      
         | 6322 |  |  |                     }
 | 
      
         | 6323 |  |  |                 }
 | 
      
         | 6324 |  |  | #endif
 | 
      
         | 6325 |  |  |             }
 | 
      
         | 6326 |  |  |         }
 | 
      
         | 6327 |  |  |  
 | 
      
         | 6328 |  |  |       /* With non-call exceptions, we are not always able to update
 | 
      
         | 6329 |  |  |          the CFG properly inside cse_insn.  So clean up possibly
 | 
      
         | 6330 |  |  |          redundant EH edges here.  */
 | 
      
         | 6331 |  |  |       if (cfun->can_throw_non_call_exceptions && have_eh_succ_edges (bb))
 | 
      
         | 6332 |  |  |         cse_cfg_altered |= purge_dead_edges (bb);
 | 
      
         | 6333 |  |  |  
 | 
      
         | 6334 |  |  |       /* If we changed a conditional jump, we may have terminated
 | 
      
         | 6335 |  |  |          the path we are following.  Check that by verifying that
 | 
      
         | 6336 |  |  |          the edge we would take still exists.  If the edge does
 | 
      
         | 6337 |  |  |          not exist anymore, purge the remainder of the path.
 | 
      
         | 6338 |  |  |          Note that this will cause us to return to the caller.  */
 | 
      
         | 6339 |  |  |       if (path_entry < path_size - 1)
 | 
      
         | 6340 |  |  |         {
 | 
      
         | 6341 |  |  |           basic_block next_bb = ebb_data->path[path_entry + 1].bb;
 | 
      
         | 6342 |  |  |           if (!find_edge (bb, next_bb))
 | 
      
         | 6343 |  |  |             {
 | 
      
         | 6344 |  |  |               do
 | 
      
         | 6345 |  |  |                 {
 | 
      
         | 6346 |  |  |                   path_size--;
 | 
      
         | 6347 |  |  |  
 | 
      
         | 6348 |  |  |                   /* If we truncate the path, we must also reset the
 | 
      
         | 6349 |  |  |                      visited bit on the remaining blocks in the path,
 | 
      
         | 6350 |  |  |                      or we will never visit them at all.  */
 | 
      
         | 6351 |  |  |                   RESET_BIT (cse_visited_basic_blocks,
 | 
      
         | 6352 |  |  |                              ebb_data->path[path_size].bb->index);
 | 
      
         | 6353 |  |  |                   ebb_data->path[path_size].bb = NULL;
 | 
      
         | 6354 |  |  |                 }
 | 
      
         | 6355 |  |  |               while (path_size - 1 != path_entry);
 | 
      
         | 6356 |  |  |               ebb_data->path_size = path_size;
 | 
      
         | 6357 |  |  |             }
 | 
      
         | 6358 |  |  |         }
 | 
      
         | 6359 |  |  |  
 | 
      
         | 6360 |  |  |       /* If this is a conditional jump insn, record any known
 | 
      
         | 6361 |  |  |          equivalences due to the condition being tested.  */
 | 
      
         | 6362 |  |  |       insn = BB_END (bb);
 | 
      
         | 6363 |  |  |       if (path_entry < path_size - 1
 | 
      
         | 6364 |  |  |           && JUMP_P (insn)
 | 
      
         | 6365 |  |  |           && single_set (insn)
 | 
      
         | 6366 |  |  |           && any_condjump_p (insn))
 | 
      
         | 6367 |  |  |         {
 | 
      
         | 6368 |  |  |           basic_block next_bb = ebb_data->path[path_entry + 1].bb;
 | 
      
         | 6369 |  |  |           bool taken = (next_bb == BRANCH_EDGE (bb)->dest);
 | 
      
         | 6370 |  |  |           record_jump_equiv (insn, taken);
 | 
      
         | 6371 |  |  |         }
 | 
      
         | 6372 |  |  |  
 | 
      
         | 6373 |  |  | #ifdef HAVE_cc0
 | 
      
         | 6374 |  |  |       /* Clear the CC0-tracking related insns, they can't provide
 | 
      
         | 6375 |  |  |          useful information across basic block boundaries.  */
 | 
      
         | 6376 |  |  |       prev_insn_cc0 = 0;
 | 
      
         | 6377 |  |  | #endif
 | 
      
         | 6378 |  |  |     }
 | 
      
         | 6379 |  |  |  
 | 
      
         | 6380 |  |  |   gcc_assert (next_qty <= max_qty);
 | 
      
         | 6381 |  |  |  
 | 
      
         | 6382 |  |  |   free (qty_table);
 | 
      
         | 6383 |  |  | }
 | 
      
         | 6384 |  |  |  
 | 
      
         | 6385 |  |  |  
 | 
      
         | 6386 |  |  | /* Perform cse on the instructions of a function.
 | 
      
         | 6387 |  |  |    F is the first instruction.
 | 
      
         | 6388 |  |  |    NREGS is one plus the highest pseudo-reg number used in the instruction.
 | 
      
         | 6389 |  |  |  
 | 
      
         | 6390 |  |  |    Return 2 if jump optimizations should be redone due to simplifications
 | 
      
         | 6391 |  |  |    in conditional jump instructions.
 | 
      
         | 6392 |  |  |    Return 1 if the CFG should be cleaned up because it has been modified.
 | 
      
         | 6393 |  |  |    Return 0 otherwise.  */
 | 
      
         | 6394 |  |  |  
 | 
      
         | 6395 |  |  | int
 | 
      
         | 6396 |  |  | cse_main (rtx f ATTRIBUTE_UNUSED, int nregs)
 | 
      
         | 6397 |  |  | {
 | 
      
         | 6398 |  |  |   struct cse_basic_block_data ebb_data;
 | 
      
         | 6399 |  |  |   basic_block bb;
 | 
      
         | 6400 |  |  |   int *rc_order = XNEWVEC (int, last_basic_block);
 | 
      
         | 6401 |  |  |   int i, n_blocks;
 | 
      
         | 6402 |  |  |  
 | 
      
         | 6403 |  |  |   df_set_flags (DF_LR_RUN_DCE);
 | 
      
         | 6404 |  |  |   df_analyze ();
 | 
      
         | 6405 |  |  |   df_set_flags (DF_DEFER_INSN_RESCAN);
 | 
      
         | 6406 |  |  |  
 | 
      
         | 6407 |  |  |   reg_scan (get_insns (), max_reg_num ());
 | 
      
         | 6408 |  |  |   init_cse_reg_info (nregs);
 | 
      
         | 6409 |  |  |  
 | 
      
         | 6410 |  |  |   ebb_data.path = XNEWVEC (struct branch_path,
 | 
      
         | 6411 |  |  |                            PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
 | 
      
         | 6412 |  |  |  
 | 
      
         | 6413 |  |  |   cse_cfg_altered = false;
 | 
      
         | 6414 |  |  |   cse_jumps_altered = false;
 | 
      
         | 6415 |  |  |   recorded_label_ref = false;
 | 
      
         | 6416 |  |  |   constant_pool_entries_cost = 0;
 | 
      
         | 6417 |  |  |   constant_pool_entries_regcost = 0;
 | 
      
         | 6418 |  |  |   ebb_data.path_size = 0;
 | 
      
         | 6419 |  |  |   ebb_data.nsets = 0;
 | 
      
         | 6420 |  |  |   rtl_hooks = cse_rtl_hooks;
 | 
      
         | 6421 |  |  |  
 | 
      
         | 6422 |  |  |   init_recog ();
 | 
      
         | 6423 |  |  |   init_alias_analysis ();
 | 
      
         | 6424 |  |  |  
 | 
      
         | 6425 |  |  |   reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
 | 
      
         | 6426 |  |  |  
 | 
      
         | 6427 |  |  |   /* Set up the table of already visited basic blocks.  */
 | 
      
         | 6428 |  |  |   cse_visited_basic_blocks = sbitmap_alloc (last_basic_block);
 | 
      
         | 6429 |  |  |   sbitmap_zero (cse_visited_basic_blocks);
 | 
      
         | 6430 |  |  |  
 | 
      
         | 6431 |  |  |   /* Loop over basic blocks in reverse completion order (RPO),
 | 
      
         | 6432 |  |  |      excluding the ENTRY and EXIT blocks.  */
 | 
      
         | 6433 |  |  |   n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false);
 | 
      
         | 6434 |  |  |   i = 0;
 | 
      
         | 6435 |  |  |   while (i < n_blocks)
 | 
      
         | 6436 |  |  |     {
 | 
      
         | 6437 |  |  |       /* Find the first block in the RPO queue that we have not yet
 | 
      
         | 6438 |  |  |          processed before.  */
 | 
      
         | 6439 |  |  |       do
 | 
      
         | 6440 |  |  |         {
 | 
      
         | 6441 |  |  |           bb = BASIC_BLOCK (rc_order[i++]);
 | 
      
         | 6442 |  |  |         }
 | 
      
         | 6443 |  |  |       while (TEST_BIT (cse_visited_basic_blocks, bb->index)
 | 
      
         | 6444 |  |  |              && i < n_blocks);
 | 
      
         | 6445 |  |  |  
 | 
      
         | 6446 |  |  |       /* Find all paths starting with BB, and process them.  */
 | 
      
         | 6447 |  |  |       while (cse_find_path (bb, &ebb_data, flag_cse_follow_jumps))
 | 
      
         | 6448 |  |  |         {
 | 
      
         | 6449 |  |  |           /* Pre-scan the path.  */
 | 
      
         | 6450 |  |  |           cse_prescan_path (&ebb_data);
 | 
      
         | 6451 |  |  |  
 | 
      
         | 6452 |  |  |           /* If this basic block has no sets, skip it.  */
 | 
      
         | 6453 |  |  |           if (ebb_data.nsets == 0)
 | 
      
         | 6454 |  |  |             continue;
 | 
      
         | 6455 |  |  |  
 | 
      
         | 6456 |  |  |           /* Get a reasonable estimate for the maximum number of qty's
 | 
      
         | 6457 |  |  |              needed for this path.  For this, we take the number of sets
 | 
      
         | 6458 |  |  |              and multiply that by MAX_RECOG_OPERANDS.  */
 | 
      
         | 6459 |  |  |           max_qty = ebb_data.nsets * MAX_RECOG_OPERANDS;
 | 
      
         | 6460 |  |  |  
 | 
      
         | 6461 |  |  |           /* Dump the path we're about to process.  */
 | 
      
         | 6462 |  |  |           if (dump_file)
 | 
      
         | 6463 |  |  |             cse_dump_path (&ebb_data, ebb_data.nsets, dump_file);
 | 
      
         | 6464 |  |  |  
 | 
      
         | 6465 |  |  |           cse_extended_basic_block (&ebb_data);
 | 
      
         | 6466 |  |  |         }
 | 
      
         | 6467 |  |  |     }
 | 
      
         | 6468 |  |  |  
 | 
      
         | 6469 |  |  |   /* Clean up.  */
 | 
      
         | 6470 |  |  |   end_alias_analysis ();
 | 
      
         | 6471 |  |  |   free (reg_eqv_table);
 | 
      
         | 6472 |  |  |   free (ebb_data.path);
 | 
      
         | 6473 |  |  |   sbitmap_free (cse_visited_basic_blocks);
 | 
      
         | 6474 |  |  |   free (rc_order);
 | 
      
         | 6475 |  |  |   rtl_hooks = general_rtl_hooks;
 | 
      
         | 6476 |  |  |  
 | 
      
         | 6477 |  |  |   if (cse_jumps_altered || recorded_label_ref)
 | 
      
         | 6478 |  |  |     return 2;
 | 
      
         | 6479 |  |  |   else if (cse_cfg_altered)
 | 
      
         | 6480 |  |  |     return 1;
 | 
      
         | 6481 |  |  |   else
 | 
      
         | 6482 |  |  |     return 0;
 | 
      
         | 6483 |  |  | }
 | 
      
         | 6484 |  |  |  
 | 
      
         | 6485 |  |  | /* Called via for_each_rtx to see if an insn is using a LABEL_REF for
 | 
      
         | 6486 |  |  |    which there isn't a REG_LABEL_OPERAND note.
 | 
      
         | 6487 |  |  |    Return one if so.  DATA is the insn.  */
 | 
      
         | 6488 |  |  |  
 | 
      
         | 6489 |  |  | static int
 | 
      
         | 6490 |  |  | check_for_label_ref (rtx *rtl, void *data)
 | 
      
         | 6491 |  |  | {
 | 
      
         | 6492 |  |  |   rtx insn = (rtx) data;
 | 
      
         | 6493 |  |  |  
 | 
      
         | 6494 |  |  |   /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
 | 
      
         | 6495 |  |  |      note for it, we must rerun jump since it needs to place the note.  If
 | 
      
         | 6496 |  |  |      this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
 | 
      
         | 6497 |  |  |      don't do this since no REG_LABEL_OPERAND will be added.  */
 | 
      
         | 6498 |  |  |   return (GET_CODE (*rtl) == LABEL_REF
 | 
      
         | 6499 |  |  |           && ! LABEL_REF_NONLOCAL_P (*rtl)
 | 
      
         | 6500 |  |  |           && (!JUMP_P (insn)
 | 
      
         | 6501 |  |  |               || !label_is_jump_target_p (XEXP (*rtl, 0), insn))
 | 
      
         | 6502 |  |  |           && LABEL_P (XEXP (*rtl, 0))
 | 
      
         | 6503 |  |  |           && INSN_UID (XEXP (*rtl, 0)) != 0
 | 
      
         | 6504 |  |  |           && ! find_reg_note (insn, REG_LABEL_OPERAND, XEXP (*rtl, 0)));
 | 
      
         | 6505 |  |  | }
 | 
      
         | 6506 |  |  |  
 | 
      
         | 6507 |  |  | /* Count the number of times registers are used (not set) in X.
 | 
      
         | 6508 |  |  |    COUNTS is an array in which we accumulate the count, INCR is how much
 | 
      
         | 6509 |  |  |    we count each register usage.
 | 
      
         | 6510 |  |  |  
 | 
      
         | 6511 |  |  |    Don't count a usage of DEST, which is the SET_DEST of a SET which
 | 
      
         | 6512 |  |  |    contains X in its SET_SRC.  This is because such a SET does not
 | 
      
         | 6513 |  |  |    modify the liveness of DEST.
 | 
      
         | 6514 |  |  |    DEST is set to pc_rtx for a trapping insn, or for an insn with side effects.
 | 
      
         | 6515 |  |  |    We must then count uses of a SET_DEST regardless, because the insn can't be
 | 
      
         | 6516 |  |  |    deleted here.  */
 | 
      
         | 6517 |  |  |  
 | 
      
         | 6518 |  |  | static void
 | 
      
         | 6519 |  |  | count_reg_usage (rtx x, int *counts, rtx dest, int incr)
 | 
      
         | 6520 |  |  | {
 | 
      
         | 6521 |  |  |   enum rtx_code code;
 | 
      
         | 6522 |  |  |   rtx note;
 | 
      
         | 6523 |  |  |   const char *fmt;
 | 
      
         | 6524 |  |  |   int i, j;
 | 
      
         | 6525 |  |  |  
 | 
      
         | 6526 |  |  |   if (x == 0)
 | 
      
         | 6527 |  |  |     return;
 | 
      
         | 6528 |  |  |  
 | 
      
         | 6529 |  |  |   switch (code = GET_CODE (x))
 | 
      
         | 6530 |  |  |     {
 | 
      
         | 6531 |  |  |     case REG:
 | 
      
         | 6532 |  |  |       if (x != dest)
 | 
      
         | 6533 |  |  |         counts[REGNO (x)] += incr;
 | 
      
         | 6534 |  |  |       return;
 | 
      
         | 6535 |  |  |  
 | 
      
         | 6536 |  |  |     case PC:
 | 
      
         | 6537 |  |  |     case CC0:
 | 
      
         | 6538 |  |  |     case CONST:
 | 
      
         | 6539 |  |  |     case CONST_INT:
 | 
      
         | 6540 |  |  |     case CONST_DOUBLE:
 | 
      
         | 6541 |  |  |     case CONST_FIXED:
 | 
      
         | 6542 |  |  |     case CONST_VECTOR:
 | 
      
         | 6543 |  |  |     case SYMBOL_REF:
 | 
      
         | 6544 |  |  |     case LABEL_REF:
 | 
      
         | 6545 |  |  |       return;
 | 
      
         | 6546 |  |  |  
 | 
      
         | 6547 |  |  |     case CLOBBER:
 | 
      
         | 6548 |  |  |       /* If we are clobbering a MEM, mark any registers inside the address
 | 
      
         | 6549 |  |  |          as being used.  */
 | 
      
         | 6550 |  |  |       if (MEM_P (XEXP (x, 0)))
 | 
      
         | 6551 |  |  |         count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
 | 
      
         | 6552 |  |  |       return;
 | 
      
         | 6553 |  |  |  
 | 
      
         | 6554 |  |  |     case SET:
 | 
      
         | 6555 |  |  |       /* Unless we are setting a REG, count everything in SET_DEST.  */
 | 
      
         | 6556 |  |  |       if (!REG_P (SET_DEST (x)))
 | 
      
         | 6557 |  |  |         count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
 | 
      
         | 6558 |  |  |       count_reg_usage (SET_SRC (x), counts,
 | 
      
         | 6559 |  |  |                        dest ? dest : SET_DEST (x),
 | 
      
         | 6560 |  |  |                        incr);
 | 
      
         | 6561 |  |  |       return;
 | 
      
         | 6562 |  |  |  
 | 
      
         | 6563 |  |  |     case DEBUG_INSN:
 | 
      
         | 6564 |  |  |       return;
 | 
      
         | 6565 |  |  |  
 | 
      
         | 6566 |  |  |     case CALL_INSN:
 | 
      
         | 6567 |  |  |     case INSN:
 | 
      
         | 6568 |  |  |     case JUMP_INSN:
 | 
      
         | 6569 |  |  |       /* We expect dest to be NULL_RTX here.  If the insn may trap,
 | 
      
         | 6570 |  |  |          or if it cannot be deleted due to side-effects, mark this fact
 | 
      
         | 6571 |  |  |          by setting DEST to pc_rtx.  */
 | 
      
         | 6572 |  |  |       if (insn_could_throw_p (x) || side_effects_p (PATTERN (x)))
 | 
      
         | 6573 |  |  |         dest = pc_rtx;
 | 
      
         | 6574 |  |  |       if (code == CALL_INSN)
 | 
      
         | 6575 |  |  |         count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
 | 
      
         | 6576 |  |  |       count_reg_usage (PATTERN (x), counts, dest, incr);
 | 
      
         | 6577 |  |  |  
 | 
      
         | 6578 |  |  |       /* Things used in a REG_EQUAL note aren't dead since loop may try to
 | 
      
         | 6579 |  |  |          use them.  */
 | 
      
         | 6580 |  |  |  
 | 
      
         | 6581 |  |  |       note = find_reg_equal_equiv_note (x);
 | 
      
         | 6582 |  |  |       if (note)
 | 
      
         | 6583 |  |  |         {
 | 
      
         | 6584 |  |  |           rtx eqv = XEXP (note, 0);
 | 
      
         | 6585 |  |  |  
 | 
      
         | 6586 |  |  |           if (GET_CODE (eqv) == EXPR_LIST)
 | 
      
         | 6587 |  |  |           /* This REG_EQUAL note describes the result of a function call.
 | 
      
         | 6588 |  |  |              Process all the arguments.  */
 | 
      
         | 6589 |  |  |             do
 | 
      
         | 6590 |  |  |               {
 | 
      
         | 6591 |  |  |                 count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
 | 
      
         | 6592 |  |  |                 eqv = XEXP (eqv, 1);
 | 
      
         | 6593 |  |  |               }
 | 
      
         | 6594 |  |  |             while (eqv && GET_CODE (eqv) == EXPR_LIST);
 | 
      
         | 6595 |  |  |           else
 | 
      
         | 6596 |  |  |             count_reg_usage (eqv, counts, dest, incr);
 | 
      
         | 6597 |  |  |         }
 | 
      
         | 6598 |  |  |       return;
 | 
      
         | 6599 |  |  |  
 | 
      
         | 6600 |  |  |     case EXPR_LIST:
 | 
      
         | 6601 |  |  |       if (REG_NOTE_KIND (x) == REG_EQUAL
 | 
      
         | 6602 |  |  |           || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
 | 
      
         | 6603 |  |  |           /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
 | 
      
         | 6604 |  |  |              involving registers in the address.  */
 | 
      
         | 6605 |  |  |           || GET_CODE (XEXP (x, 0)) == CLOBBER)
 | 
      
         | 6606 |  |  |         count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
 | 
      
         | 6607 |  |  |  
 | 
      
         | 6608 |  |  |       count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
 | 
      
         | 6609 |  |  |       return;
 | 
      
         | 6610 |  |  |  
 | 
      
         | 6611 |  |  |     case ASM_OPERANDS:
 | 
      
         | 6612 |  |  |       /* Iterate over just the inputs, not the constraints as well.  */
 | 
      
         | 6613 |  |  |       for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
 | 
      
         | 6614 |  |  |         count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
 | 
      
         | 6615 |  |  |       return;
 | 
      
         | 6616 |  |  |  
 | 
      
         | 6617 |  |  |     case INSN_LIST:
 | 
      
         | 6618 |  |  |       gcc_unreachable ();
 | 
      
         | 6619 |  |  |  
 | 
      
         | 6620 |  |  |     default:
 | 
      
         | 6621 |  |  |       break;
 | 
      
         | 6622 |  |  |     }
 | 
      
         | 6623 |  |  |  
 | 
      
         | 6624 |  |  |   fmt = GET_RTX_FORMAT (code);
 | 
      
         | 6625 |  |  |   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
 | 
      
         | 6626 |  |  |     {
 | 
      
         | 6627 |  |  |       if (fmt[i] == 'e')
 | 
      
         | 6628 |  |  |         count_reg_usage (XEXP (x, i), counts, dest, incr);
 | 
      
         | 6629 |  |  |       else if (fmt[i] == 'E')
 | 
      
         | 6630 |  |  |         for (j = XVECLEN (x, i) - 1; j >= 0; j--)
 | 
      
         | 6631 |  |  |           count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
 | 
      
         | 6632 |  |  |     }
 | 
      
         | 6633 |  |  | }
 | 
      
         | 6634 |  |  |  
 | 
      
         | 6635 |  |  | /* Return true if X is a dead register.  */
 | 
      
         | 6636 |  |  |  
 | 
      
         | 6637 |  |  | static inline int
 | 
      
         | 6638 |  |  | is_dead_reg (rtx x, int *counts)
 | 
      
         | 6639 |  |  | {
 | 
      
         | 6640 |  |  |   return (REG_P (x)
 | 
      
         | 6641 |  |  |           && REGNO (x) >= FIRST_PSEUDO_REGISTER
 | 
      
         | 6642 |  |  |           && counts[REGNO (x)] == 0);
 | 
      
         | 6643 |  |  | }
 | 
      
         | 6644 |  |  |  
 | 
      
         | 6645 |  |  | /* Return true if set is live.  */
 | 
      
         | 6646 |  |  | static bool
 | 
      
         | 6647 |  |  | set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0.  */
 | 
      
         | 6648 |  |  |             int *counts)
 | 
      
         | 6649 |  |  | {
 | 
      
         | 6650 |  |  | #ifdef HAVE_cc0
 | 
      
         | 6651 |  |  |   rtx tem;
 | 
      
         | 6652 |  |  | #endif
 | 
      
         | 6653 |  |  |  
 | 
      
         | 6654 |  |  |   if (set_noop_p (set))
 | 
      
         | 6655 |  |  |     ;
 | 
      
         | 6656 |  |  |  
 | 
      
         | 6657 |  |  | #ifdef HAVE_cc0
 | 
      
         | 6658 |  |  |   else if (GET_CODE (SET_DEST (set)) == CC0
 | 
      
         | 6659 |  |  |            && !side_effects_p (SET_SRC (set))
 | 
      
         | 6660 |  |  |            && ((tem = next_nonnote_nondebug_insn (insn)) == NULL_RTX
 | 
      
         | 6661 |  |  |                || !INSN_P (tem)
 | 
      
         | 6662 |  |  |                || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
 | 
      
         | 6663 |  |  |     return false;
 | 
      
         | 6664 |  |  | #endif
 | 
      
         | 6665 |  |  |   else if (!is_dead_reg (SET_DEST (set), counts)
 | 
      
         | 6666 |  |  |            || side_effects_p (SET_SRC (set)))
 | 
      
         | 6667 |  |  |     return true;
 | 
      
         | 6668 |  |  |   return false;
 | 
      
         | 6669 |  |  | }
 | 
      
         | 6670 |  |  |  
 | 
      
         | 6671 |  |  | /* Return true if insn is live.  */
 | 
      
         | 6672 |  |  |  
 | 
      
         | 6673 |  |  | static bool
 | 
      
         | 6674 |  |  | insn_live_p (rtx insn, int *counts)
 | 
      
         | 6675 |  |  | {
 | 
      
         | 6676 |  |  |   int i;
 | 
      
         | 6677 |  |  |   if (insn_could_throw_p (insn))
 | 
      
         | 6678 |  |  |     return true;
 | 
      
         | 6679 |  |  |   else if (GET_CODE (PATTERN (insn)) == SET)
 | 
      
         | 6680 |  |  |     return set_live_p (PATTERN (insn), insn, counts);
 | 
      
         | 6681 |  |  |   else if (GET_CODE (PATTERN (insn)) == PARALLEL)
 | 
      
         | 6682 |  |  |     {
 | 
      
         | 6683 |  |  |       for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
 | 
      
         | 6684 |  |  |         {
 | 
      
         | 6685 |  |  |           rtx elt = XVECEXP (PATTERN (insn), 0, i);
 | 
      
         | 6686 |  |  |  
 | 
      
         | 6687 |  |  |           if (GET_CODE (elt) == SET)
 | 
      
         | 6688 |  |  |             {
 | 
      
         | 6689 |  |  |               if (set_live_p (elt, insn, counts))
 | 
      
         | 6690 |  |  |                 return true;
 | 
      
         | 6691 |  |  |             }
 | 
      
         | 6692 |  |  |           else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
 | 
      
         | 6693 |  |  |             return true;
 | 
      
         | 6694 |  |  |         }
 | 
      
         | 6695 |  |  |       return false;
 | 
      
         | 6696 |  |  |     }
 | 
      
         | 6697 |  |  |   else if (DEBUG_INSN_P (insn))
 | 
      
         | 6698 |  |  |     {
 | 
      
         | 6699 |  |  |       rtx next;
 | 
      
         | 6700 |  |  |  
 | 
      
         | 6701 |  |  |       for (next = NEXT_INSN (insn); next; next = NEXT_INSN (next))
 | 
      
         | 6702 |  |  |         if (NOTE_P (next))
 | 
      
         | 6703 |  |  |           continue;
 | 
      
         | 6704 |  |  |         else if (!DEBUG_INSN_P (next))
 | 
      
         | 6705 |  |  |           return true;
 | 
      
         | 6706 |  |  |         else if (INSN_VAR_LOCATION_DECL (insn) == INSN_VAR_LOCATION_DECL (next))
 | 
      
         | 6707 |  |  |           return false;
 | 
      
         | 6708 |  |  |  
 | 
      
         | 6709 |  |  |       return true;
 | 
      
         | 6710 |  |  |     }
 | 
      
         | 6711 |  |  |   else
 | 
      
         | 6712 |  |  |     return true;
 | 
      
         | 6713 |  |  | }
 | 
      
         | 6714 |  |  |  
 | 
      
         | 6715 |  |  | /* Count the number of stores into pseudo.  Callback for note_stores.  */
 | 
      
         | 6716 |  |  |  
 | 
      
         | 6717 |  |  | static void
 | 
      
         | 6718 |  |  | count_stores (rtx x, const_rtx set ATTRIBUTE_UNUSED, void *data)
 | 
      
         | 6719 |  |  | {
 | 
      
         | 6720 |  |  |   int *counts = (int *) data;
 | 
      
         | 6721 |  |  |   if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
 | 
      
         | 6722 |  |  |     counts[REGNO (x)]++;
 | 
      
         | 6723 |  |  | }
 | 
      
         | 6724 |  |  |  
 | 
      
         | 6725 |  |  | struct dead_debug_insn_data
 | 
      
         | 6726 |  |  | {
 | 
      
         | 6727 |  |  |   int *counts;
 | 
      
         | 6728 |  |  |   rtx *replacements;
 | 
      
         | 6729 |  |  |   bool seen_repl;
 | 
      
         | 6730 |  |  | };
 | 
      
         | 6731 |  |  |  
 | 
      
         | 6732 |  |  | /* Return if a DEBUG_INSN needs to be reset because some dead
 | 
      
         | 6733 |  |  |    pseudo doesn't have a replacement.  Callback for for_each_rtx.  */
 | 
      
         | 6734 |  |  |  
 | 
      
         | 6735 |  |  | static int
 | 
      
         | 6736 |  |  | is_dead_debug_insn (rtx *loc, void *data)
 | 
      
         | 6737 |  |  | {
 | 
      
         | 6738 |  |  |   rtx x = *loc;
 | 
      
         | 6739 |  |  |   struct dead_debug_insn_data *ddid = (struct dead_debug_insn_data *) data;
 | 
      
         | 6740 |  |  |  
 | 
      
         | 6741 |  |  |   if (is_dead_reg (x, ddid->counts))
 | 
      
         | 6742 |  |  |     {
 | 
      
         | 6743 |  |  |       if (ddid->replacements && ddid->replacements[REGNO (x)] != NULL_RTX)
 | 
      
         | 6744 |  |  |         ddid->seen_repl = true;
 | 
      
         | 6745 |  |  |       else
 | 
      
         | 6746 |  |  |         return 1;
 | 
      
         | 6747 |  |  |     }
 | 
      
         | 6748 |  |  |   return 0;
 | 
      
         | 6749 |  |  | }
 | 
      
         | 6750 |  |  |  
 | 
      
         | 6751 |  |  | /* Replace a dead pseudo in a DEBUG_INSN with replacement DEBUG_EXPR.
 | 
      
         | 6752 |  |  |    Callback for simplify_replace_fn_rtx.  */
 | 
      
         | 6753 |  |  |  
 | 
      
         | 6754 |  |  | static rtx
 | 
      
         | 6755 |  |  | replace_dead_reg (rtx x, const_rtx old_rtx ATTRIBUTE_UNUSED, void *data)
 | 
      
         | 6756 |  |  | {
 | 
      
         | 6757 |  |  |   rtx *replacements = (rtx *) data;
 | 
      
         | 6758 |  |  |  
 | 
      
         | 6759 |  |  |   if (REG_P (x)
 | 
      
         | 6760 |  |  |       && REGNO (x) >= FIRST_PSEUDO_REGISTER
 | 
      
         | 6761 |  |  |       && replacements[REGNO (x)] != NULL_RTX)
 | 
      
         | 6762 |  |  |     {
 | 
      
         | 6763 |  |  |       if (GET_MODE (x) == GET_MODE (replacements[REGNO (x)]))
 | 
      
         | 6764 |  |  |         return replacements[REGNO (x)];
 | 
      
         | 6765 |  |  |       return lowpart_subreg (GET_MODE (x), replacements[REGNO (x)],
 | 
      
         | 6766 |  |  |                              GET_MODE (replacements[REGNO (x)]));
 | 
      
         | 6767 |  |  |     }
 | 
      
         | 6768 |  |  |   return NULL_RTX;
 | 
      
         | 6769 |  |  | }
 | 
      
         | 6770 |  |  |  
 | 
      
         | 6771 |  |  | /* Scan all the insns and delete any that are dead; i.e., they store a register
 | 
      
         | 6772 |  |  |    that is never used or they copy a register to itself.
 | 
      
         | 6773 |  |  |  
 | 
      
         | 6774 |  |  |    This is used to remove insns made obviously dead by cse, loop or other
 | 
      
         | 6775 |  |  |    optimizations.  It improves the heuristics in loop since it won't try to
 | 
      
         | 6776 |  |  |    move dead invariants out of loops or make givs for dead quantities.  The
 | 
      
         | 6777 |  |  |    remaining passes of the compilation are also sped up.  */
 | 
      
         | 6778 |  |  |  
 | 
      
         | 6779 |  |  | int
 | 
      
         | 6780 |  |  | delete_trivially_dead_insns (rtx insns, int nreg)
 | 
      
         | 6781 |  |  | {
 | 
      
         | 6782 |  |  |   int *counts;
 | 
      
         | 6783 |  |  |   rtx insn, prev;
 | 
      
         | 6784 |  |  |   rtx *replacements = NULL;
 | 
      
         | 6785 |  |  |   int ndead = 0;
 | 
      
         | 6786 |  |  |  
 | 
      
         | 6787 |  |  |   timevar_push (TV_DELETE_TRIVIALLY_DEAD);
 | 
      
         | 6788 |  |  |   /* First count the number of times each register is used.  */
 | 
      
         | 6789 |  |  |   if (MAY_HAVE_DEBUG_INSNS)
 | 
      
         | 6790 |  |  |     {
 | 
      
         | 6791 |  |  |       counts = XCNEWVEC (int, nreg * 3);
 | 
      
         | 6792 |  |  |       for (insn = insns; insn; insn = NEXT_INSN (insn))
 | 
      
         | 6793 |  |  |         if (DEBUG_INSN_P (insn))
 | 
      
         | 6794 |  |  |           count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
 | 
      
         | 6795 |  |  |                            NULL_RTX, 1);
 | 
      
         | 6796 |  |  |         else if (INSN_P (insn))
 | 
      
         | 6797 |  |  |           {
 | 
      
         | 6798 |  |  |             count_reg_usage (insn, counts, NULL_RTX, 1);
 | 
      
         | 6799 |  |  |             note_stores (PATTERN (insn), count_stores, counts + nreg * 2);
 | 
      
         | 6800 |  |  |           }
 | 
      
         | 6801 |  |  |       /* If there can be debug insns, COUNTS are 3 consecutive arrays.
 | 
      
         | 6802 |  |  |          First one counts how many times each pseudo is used outside
 | 
      
         | 6803 |  |  |          of debug insns, second counts how many times each pseudo is
 | 
      
         | 6804 |  |  |          used in debug insns and third counts how many times a pseudo
 | 
      
         | 6805 |  |  |          is stored.  */
 | 
      
         | 6806 |  |  |     }
 | 
      
         | 6807 |  |  |   else
 | 
      
         | 6808 |  |  |     {
 | 
      
         | 6809 |  |  |       counts = XCNEWVEC (int, nreg);
 | 
      
         | 6810 |  |  |       for (insn = insns; insn; insn = NEXT_INSN (insn))
 | 
      
         | 6811 |  |  |         if (INSN_P (insn))
 | 
      
         | 6812 |  |  |           count_reg_usage (insn, counts, NULL_RTX, 1);
 | 
      
         | 6813 |  |  |       /* If no debug insns can be present, COUNTS is just an array
 | 
      
         | 6814 |  |  |          which counts how many times each pseudo is used.  */
 | 
      
         | 6815 |  |  |     }
 | 
      
         | 6816 |  |  |   /* Go from the last insn to the first and delete insns that only set unused
 | 
      
         | 6817 |  |  |      registers or copy a register to itself.  As we delete an insn, remove
 | 
      
         | 6818 |  |  |      usage counts for registers it uses.
 | 
      
         | 6819 |  |  |  
 | 
      
         | 6820 |  |  |      The first jump optimization pass may leave a real insn as the last
 | 
      
         | 6821 |  |  |      insn in the function.   We must not skip that insn or we may end
 | 
      
         | 6822 |  |  |      up deleting code that is not really dead.
 | 
      
         | 6823 |  |  |  
 | 
      
         | 6824 |  |  |      If some otherwise unused register is only used in DEBUG_INSNs,
 | 
      
         | 6825 |  |  |      try to create a DEBUG_EXPR temporary and emit a DEBUG_INSN before
 | 
      
         | 6826 |  |  |      the setter.  Then go through DEBUG_INSNs and if a DEBUG_EXPR
 | 
      
         | 6827 |  |  |      has been created for the unused register, replace it with
 | 
      
         | 6828 |  |  |      the DEBUG_EXPR, otherwise reset the DEBUG_INSN.  */
 | 
      
         | 6829 |  |  |   for (insn = get_last_insn (); insn; insn = prev)
 | 
      
         | 6830 |  |  |     {
 | 
      
         | 6831 |  |  |       int live_insn = 0;
 | 
      
         | 6832 |  |  |  
 | 
      
         | 6833 |  |  |       prev = PREV_INSN (insn);
 | 
      
         | 6834 |  |  |       if (!INSN_P (insn))
 | 
      
         | 6835 |  |  |         continue;
 | 
      
         | 6836 |  |  |  
 | 
      
         | 6837 |  |  |       live_insn = insn_live_p (insn, counts);
 | 
      
         | 6838 |  |  |  
 | 
      
         | 6839 |  |  |       /* If this is a dead insn, delete it and show registers in it aren't
 | 
      
         | 6840 |  |  |          being used.  */
 | 
      
         | 6841 |  |  |  
 | 
      
         | 6842 |  |  |       if (! live_insn && dbg_cnt (delete_trivial_dead))
 | 
      
         | 6843 |  |  |         {
 | 
      
         | 6844 |  |  |           if (DEBUG_INSN_P (insn))
 | 
      
         | 6845 |  |  |             count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
 | 
      
         | 6846 |  |  |                              NULL_RTX, -1);
 | 
      
         | 6847 |  |  |           else
 | 
      
         | 6848 |  |  |             {
 | 
      
         | 6849 |  |  |               rtx set;
 | 
      
         | 6850 |  |  |               if (MAY_HAVE_DEBUG_INSNS
 | 
      
         | 6851 |  |  |                   && (set = single_set (insn)) != NULL_RTX
 | 
      
         | 6852 |  |  |                   && is_dead_reg (SET_DEST (set), counts)
 | 
      
         | 6853 |  |  |                   /* Used at least once in some DEBUG_INSN.  */
 | 
      
         | 6854 |  |  |                   && counts[REGNO (SET_DEST (set)) + nreg] > 0
 | 
      
         | 6855 |  |  |                   /* And set exactly once.  */
 | 
      
         | 6856 |  |  |                   && counts[REGNO (SET_DEST (set)) + nreg * 2] == 1
 | 
      
         | 6857 |  |  |                   && !side_effects_p (SET_SRC (set))
 | 
      
         | 6858 |  |  |                   && asm_noperands (PATTERN (insn)) < 0)
 | 
      
         | 6859 |  |  |                 {
 | 
      
         | 6860 |  |  |                   rtx dval, bind;
 | 
      
         | 6861 |  |  |  
 | 
      
         | 6862 |  |  |                   /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
 | 
      
         | 6863 |  |  |                   dval = make_debug_expr_from_rtl (SET_DEST (set));
 | 
      
         | 6864 |  |  |  
 | 
      
         | 6865 |  |  |                   /* Emit a debug bind insn before the insn in which
 | 
      
         | 6866 |  |  |                      reg dies.  */
 | 
      
         | 6867 |  |  |                   bind = gen_rtx_VAR_LOCATION (GET_MODE (SET_DEST (set)),
 | 
      
         | 6868 |  |  |                                                DEBUG_EXPR_TREE_DECL (dval),
 | 
      
         | 6869 |  |  |                                                SET_SRC (set),
 | 
      
         | 6870 |  |  |                                                VAR_INIT_STATUS_INITIALIZED);
 | 
      
         | 6871 |  |  |                   count_reg_usage (bind, counts + nreg, NULL_RTX, 1);
 | 
      
         | 6872 |  |  |  
 | 
      
         | 6873 |  |  |                   bind = emit_debug_insn_before (bind, insn);
 | 
      
         | 6874 |  |  |                   df_insn_rescan (bind);
 | 
      
         | 6875 |  |  |  
 | 
      
         | 6876 |  |  |                   if (replacements == NULL)
 | 
      
         | 6877 |  |  |                     replacements = XCNEWVEC (rtx, nreg);
 | 
      
         | 6878 |  |  |                   replacements[REGNO (SET_DEST (set))] = dval;
 | 
      
         | 6879 |  |  |                 }
 | 
      
         | 6880 |  |  |  
 | 
      
         | 6881 |  |  |               count_reg_usage (insn, counts, NULL_RTX, -1);
 | 
      
         | 6882 |  |  |               ndead++;
 | 
      
         | 6883 |  |  |             }
 | 
      
         | 6884 |  |  |           delete_insn_and_edges (insn);
 | 
      
         | 6885 |  |  |         }
 | 
      
         | 6886 |  |  |     }
 | 
      
         | 6887 |  |  |  
 | 
      
         | 6888 |  |  |   if (MAY_HAVE_DEBUG_INSNS)
 | 
      
         | 6889 |  |  |     {
 | 
      
         | 6890 |  |  |       struct dead_debug_insn_data ddid;
 | 
      
         | 6891 |  |  |       ddid.counts = counts;
 | 
      
         | 6892 |  |  |       ddid.replacements = replacements;
 | 
      
         | 6893 |  |  |       for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
 | 
      
         | 6894 |  |  |         if (DEBUG_INSN_P (insn))
 | 
      
         | 6895 |  |  |           {
 | 
      
         | 6896 |  |  |             /* If this debug insn references a dead register that wasn't replaced
 | 
      
         | 6897 |  |  |                with an DEBUG_EXPR, reset the DEBUG_INSN.  */
 | 
      
         | 6898 |  |  |             ddid.seen_repl = false;
 | 
      
         | 6899 |  |  |             if (for_each_rtx (&INSN_VAR_LOCATION_LOC (insn),
 | 
      
         | 6900 |  |  |                               is_dead_debug_insn, &ddid))
 | 
      
         | 6901 |  |  |               {
 | 
      
         | 6902 |  |  |                 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
 | 
      
         | 6903 |  |  |                 df_insn_rescan (insn);
 | 
      
         | 6904 |  |  |               }
 | 
      
         | 6905 |  |  |             else if (ddid.seen_repl)
 | 
      
         | 6906 |  |  |               {
 | 
      
         | 6907 |  |  |                 INSN_VAR_LOCATION_LOC (insn)
 | 
      
         | 6908 |  |  |                   = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn),
 | 
      
         | 6909 |  |  |                                              NULL_RTX, replace_dead_reg,
 | 
      
         | 6910 |  |  |                                              replacements);
 | 
      
         | 6911 |  |  |                 df_insn_rescan (insn);
 | 
      
         | 6912 |  |  |               }
 | 
      
         | 6913 |  |  |           }
 | 
      
         | 6914 |  |  |       free (replacements);
 | 
      
         | 6915 |  |  |     }
 | 
      
         | 6916 |  |  |  
 | 
      
         | 6917 |  |  |   if (dump_file && ndead)
 | 
      
         | 6918 |  |  |     fprintf (dump_file, "Deleted %i trivially dead insns\n",
 | 
      
         | 6919 |  |  |              ndead);
 | 
      
         | 6920 |  |  |   /* Clean up.  */
 | 
      
         | 6921 |  |  |   free (counts);
 | 
      
         | 6922 |  |  |   timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
 | 
      
         | 6923 |  |  |   return ndead;
 | 
      
         | 6924 |  |  | }
 | 
      
         | 6925 |  |  |  
 | 
      
         | 6926 |  |  | /* This function is called via for_each_rtx.  The argument, NEWREG, is
 | 
      
         | 6927 |  |  |    a condition code register with the desired mode.  If we are looking
 | 
      
         | 6928 |  |  |    at the same register in a different mode, replace it with
 | 
      
         | 6929 |  |  |    NEWREG.  */
 | 
      
         | 6930 |  |  |  
 | 
      
         | 6931 |  |  | static int
 | 
      
         | 6932 |  |  | cse_change_cc_mode (rtx *loc, void *data)
 | 
      
         | 6933 |  |  | {
 | 
      
         | 6934 |  |  |   struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
 | 
      
         | 6935 |  |  |  
 | 
      
         | 6936 |  |  |   if (*loc
 | 
      
         | 6937 |  |  |       && REG_P (*loc)
 | 
      
         | 6938 |  |  |       && REGNO (*loc) == REGNO (args->newreg)
 | 
      
         | 6939 |  |  |       && GET_MODE (*loc) != GET_MODE (args->newreg))
 | 
      
         | 6940 |  |  |     {
 | 
      
         | 6941 |  |  |       validate_change (args->insn, loc, args->newreg, 1);
 | 
      
         | 6942 |  |  |  
 | 
      
         | 6943 |  |  |       return -1;
 | 
      
         | 6944 |  |  |     }
 | 
      
         | 6945 |  |  |   return 0;
 | 
      
         | 6946 |  |  | }
 | 
      
         | 6947 |  |  |  
 | 
      
         | 6948 |  |  | /* Change the mode of any reference to the register REGNO (NEWREG) to
 | 
      
         | 6949 |  |  |    GET_MODE (NEWREG) in INSN.  */
 | 
      
         | 6950 |  |  |  
 | 
      
         | 6951 |  |  | static void
 | 
      
         | 6952 |  |  | cse_change_cc_mode_insn (rtx insn, rtx newreg)
 | 
      
         | 6953 |  |  | {
 | 
      
         | 6954 |  |  |   struct change_cc_mode_args args;
 | 
      
         | 6955 |  |  |   int success;
 | 
      
         | 6956 |  |  |  
 | 
      
         | 6957 |  |  |   if (!INSN_P (insn))
 | 
      
         | 6958 |  |  |     return;
 | 
      
         | 6959 |  |  |  
 | 
      
         | 6960 |  |  |   args.insn = insn;
 | 
      
         | 6961 |  |  |   args.newreg = newreg;
 | 
      
         | 6962 |  |  |  
 | 
      
         | 6963 |  |  |   for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
 | 
      
         | 6964 |  |  |   for_each_rtx (®_NOTES (insn), cse_change_cc_mode, &args);
 | 
      
         | 6965 |  |  |  
 | 
      
         | 6966 |  |  |   /* If the following assertion was triggered, there is most probably
 | 
      
         | 6967 |  |  |      something wrong with the cc_modes_compatible back end function.
 | 
      
         | 6968 |  |  |      CC modes only can be considered compatible if the insn - with the mode
 | 
      
         | 6969 |  |  |      replaced by any of the compatible modes - can still be recognized.  */
 | 
      
         | 6970 |  |  |   success = apply_change_group ();
 | 
      
         | 6971 |  |  |   gcc_assert (success);
 | 
      
         | 6972 |  |  | }
 | 
      
         | 6973 |  |  |  
 | 
      
         | 6974 |  |  | /* Change the mode of any reference to the register REGNO (NEWREG) to
 | 
      
         | 6975 |  |  |    GET_MODE (NEWREG), starting at START.  Stop before END.  Stop at
 | 
      
         | 6976 |  |  |    any instruction which modifies NEWREG.  */
 | 
      
         | 6977 |  |  |  
 | 
      
         | 6978 |  |  | static void
 | 
      
         | 6979 |  |  | cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
 | 
      
         | 6980 |  |  | {
 | 
      
         | 6981 |  |  |   rtx insn;
 | 
      
         | 6982 |  |  |  
 | 
      
         | 6983 |  |  |   for (insn = start; insn != end; insn = NEXT_INSN (insn))
 | 
      
         | 6984 |  |  |     {
 | 
      
         | 6985 |  |  |       if (! INSN_P (insn))
 | 
      
         | 6986 |  |  |         continue;
 | 
      
         | 6987 |  |  |  
 | 
      
         | 6988 |  |  |       if (reg_set_p (newreg, insn))
 | 
      
         | 6989 |  |  |         return;
 | 
      
         | 6990 |  |  |  
 | 
      
         | 6991 |  |  |       cse_change_cc_mode_insn (insn, newreg);
 | 
      
         | 6992 |  |  |     }
 | 
      
         | 6993 |  |  | }
 | 
      
         | 6994 |  |  |  
 | 
      
         | 6995 |  |  | /* BB is a basic block which finishes with CC_REG as a condition code
 | 
      
         | 6996 |  |  |    register which is set to CC_SRC.  Look through the successors of BB
 | 
      
         | 6997 |  |  |    to find blocks which have a single predecessor (i.e., this one),
 | 
      
         | 6998 |  |  |    and look through those blocks for an assignment to CC_REG which is
 | 
      
         | 6999 |  |  |    equivalent to CC_SRC.  CAN_CHANGE_MODE indicates whether we are
 | 
      
         | 7000 |  |  |    permitted to change the mode of CC_SRC to a compatible mode.  This
 | 
      
         | 7001 |  |  |    returns VOIDmode if no equivalent assignments were found.
 | 
      
         | 7002 |  |  |    Otherwise it returns the mode which CC_SRC should wind up with.
 | 
      
         | 7003 |  |  |    ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
 | 
      
         | 7004 |  |  |    but is passed unmodified down to recursive calls in order to prevent
 | 
      
         | 7005 |  |  |    endless recursion.
 | 
      
         | 7006 |  |  |  
 | 
      
         | 7007 |  |  |    The main complexity in this function is handling the mode issues.
 | 
      
         | 7008 |  |  |    We may have more than one duplicate which we can eliminate, and we
 | 
      
         | 7009 |  |  |    try to find a mode which will work for multiple duplicates.  */
 | 
      
         | 7010 |  |  |  
 | 
      
         | 7011 |  |  | static enum machine_mode
 | 
      
         | 7012 |  |  | cse_cc_succs (basic_block bb, basic_block orig_bb, rtx cc_reg, rtx cc_src,
 | 
      
         | 7013 |  |  |               bool can_change_mode)
 | 
      
         | 7014 |  |  | {
 | 
      
         | 7015 |  |  |   bool found_equiv;
 | 
      
         | 7016 |  |  |   enum machine_mode mode;
 | 
      
         | 7017 |  |  |   unsigned int insn_count;
 | 
      
         | 7018 |  |  |   edge e;
 | 
      
         | 7019 |  |  |   rtx insns[2];
 | 
      
         | 7020 |  |  |   enum machine_mode modes[2];
 | 
      
         | 7021 |  |  |   rtx last_insns[2];
 | 
      
         | 7022 |  |  |   unsigned int i;
 | 
      
         | 7023 |  |  |   rtx newreg;
 | 
      
         | 7024 |  |  |   edge_iterator ei;
 | 
      
         | 7025 |  |  |  
 | 
      
         | 7026 |  |  |   /* We expect to have two successors.  Look at both before picking
 | 
      
         | 7027 |  |  |      the final mode for the comparison.  If we have more successors
 | 
      
         | 7028 |  |  |      (i.e., some sort of table jump, although that seems unlikely),
 | 
      
         | 7029 |  |  |      then we require all beyond the first two to use the same
 | 
      
         | 7030 |  |  |      mode.  */
 | 
      
         | 7031 |  |  |  
 | 
      
         | 7032 |  |  |   found_equiv = false;
 | 
      
         | 7033 |  |  |   mode = GET_MODE (cc_src);
 | 
      
         | 7034 |  |  |   insn_count = 0;
 | 
      
         | 7035 |  |  |   FOR_EACH_EDGE (e, ei, bb->succs)
 | 
      
         | 7036 |  |  |     {
 | 
      
         | 7037 |  |  |       rtx insn;
 | 
      
         | 7038 |  |  |       rtx end;
 | 
      
         | 7039 |  |  |  
 | 
      
         | 7040 |  |  |       if (e->flags & EDGE_COMPLEX)
 | 
      
         | 7041 |  |  |         continue;
 | 
      
         | 7042 |  |  |  
 | 
      
         | 7043 |  |  |       if (EDGE_COUNT (e->dest->preds) != 1
 | 
      
         | 7044 |  |  |           || e->dest == EXIT_BLOCK_PTR
 | 
      
         | 7045 |  |  |           /* Avoid endless recursion on unreachable blocks.  */
 | 
      
         | 7046 |  |  |           || e->dest == orig_bb)
 | 
      
         | 7047 |  |  |         continue;
 | 
      
         | 7048 |  |  |  
 | 
      
         | 7049 |  |  |       end = NEXT_INSN (BB_END (e->dest));
 | 
      
         | 7050 |  |  |       for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
 | 
      
         | 7051 |  |  |         {
 | 
      
         | 7052 |  |  |           rtx set;
 | 
      
         | 7053 |  |  |  
 | 
      
         | 7054 |  |  |           if (! INSN_P (insn))
 | 
      
         | 7055 |  |  |             continue;
 | 
      
         | 7056 |  |  |  
 | 
      
         | 7057 |  |  |           /* If CC_SRC is modified, we have to stop looking for
 | 
      
         | 7058 |  |  |              something which uses it.  */
 | 
      
         | 7059 |  |  |           if (modified_in_p (cc_src, insn))
 | 
      
         | 7060 |  |  |             break;
 | 
      
         | 7061 |  |  |  
 | 
      
         | 7062 |  |  |           /* Check whether INSN sets CC_REG to CC_SRC.  */
 | 
      
         | 7063 |  |  |           set = single_set (insn);
 | 
      
         | 7064 |  |  |           if (set
 | 
      
         | 7065 |  |  |               && REG_P (SET_DEST (set))
 | 
      
         | 7066 |  |  |               && REGNO (SET_DEST (set)) == REGNO (cc_reg))
 | 
      
         | 7067 |  |  |             {
 | 
      
         | 7068 |  |  |               bool found;
 | 
      
         | 7069 |  |  |               enum machine_mode set_mode;
 | 
      
         | 7070 |  |  |               enum machine_mode comp_mode;
 | 
      
         | 7071 |  |  |  
 | 
      
         | 7072 |  |  |               found = false;
 | 
      
         | 7073 |  |  |               set_mode = GET_MODE (SET_SRC (set));
 | 
      
         | 7074 |  |  |               comp_mode = set_mode;
 | 
      
         | 7075 |  |  |               if (rtx_equal_p (cc_src, SET_SRC (set)))
 | 
      
         | 7076 |  |  |                 found = true;
 | 
      
         | 7077 |  |  |               else if (GET_CODE (cc_src) == COMPARE
 | 
      
         | 7078 |  |  |                        && GET_CODE (SET_SRC (set)) == COMPARE
 | 
      
         | 7079 |  |  |                        && mode != set_mode
 | 
      
         | 7080 |  |  |                        && rtx_equal_p (XEXP (cc_src, 0),
 | 
      
         | 7081 |  |  |                                        XEXP (SET_SRC (set), 0))
 | 
      
         | 7082 |  |  |                        && rtx_equal_p (XEXP (cc_src, 1),
 | 
      
         | 7083 |  |  |                                        XEXP (SET_SRC (set), 1)))
 | 
      
         | 7084 |  |  |  
 | 
      
         | 7085 |  |  |                 {
 | 
      
         | 7086 |  |  |                   comp_mode = targetm.cc_modes_compatible (mode, set_mode);
 | 
      
         | 7087 |  |  |                   if (comp_mode != VOIDmode
 | 
      
         | 7088 |  |  |                       && (can_change_mode || comp_mode == mode))
 | 
      
         | 7089 |  |  |                     found = true;
 | 
      
         | 7090 |  |  |                 }
 | 
      
         | 7091 |  |  |  
 | 
      
         | 7092 |  |  |               if (found)
 | 
      
         | 7093 |  |  |                 {
 | 
      
         | 7094 |  |  |                   found_equiv = true;
 | 
      
         | 7095 |  |  |                   if (insn_count < ARRAY_SIZE (insns))
 | 
      
         | 7096 |  |  |                     {
 | 
      
         | 7097 |  |  |                       insns[insn_count] = insn;
 | 
      
         | 7098 |  |  |                       modes[insn_count] = set_mode;
 | 
      
         | 7099 |  |  |                       last_insns[insn_count] = end;
 | 
      
         | 7100 |  |  |                       ++insn_count;
 | 
      
         | 7101 |  |  |  
 | 
      
         | 7102 |  |  |                       if (mode != comp_mode)
 | 
      
         | 7103 |  |  |                         {
 | 
      
         | 7104 |  |  |                           gcc_assert (can_change_mode);
 | 
      
         | 7105 |  |  |                           mode = comp_mode;
 | 
      
         | 7106 |  |  |  
 | 
      
         | 7107 |  |  |                           /* The modified insn will be re-recognized later.  */
 | 
      
         | 7108 |  |  |                           PUT_MODE (cc_src, mode);
 | 
      
         | 7109 |  |  |                         }
 | 
      
         | 7110 |  |  |                     }
 | 
      
         | 7111 |  |  |                   else
 | 
      
         | 7112 |  |  |                     {
 | 
      
         | 7113 |  |  |                       if (set_mode != mode)
 | 
      
         | 7114 |  |  |                         {
 | 
      
         | 7115 |  |  |                           /* We found a matching expression in the
 | 
      
         | 7116 |  |  |                              wrong mode, but we don't have room to
 | 
      
         | 7117 |  |  |                              store it in the array.  Punt.  This case
 | 
      
         | 7118 |  |  |                              should be rare.  */
 | 
      
         | 7119 |  |  |                           break;
 | 
      
         | 7120 |  |  |                         }
 | 
      
         | 7121 |  |  |                       /* INSN sets CC_REG to a value equal to CC_SRC
 | 
      
         | 7122 |  |  |                          with the right mode.  We can simply delete
 | 
      
         | 7123 |  |  |                          it.  */
 | 
      
         | 7124 |  |  |                       delete_insn (insn);
 | 
      
         | 7125 |  |  |                     }
 | 
      
         | 7126 |  |  |  
 | 
      
         | 7127 |  |  |                   /* We found an instruction to delete.  Keep looking,
 | 
      
         | 7128 |  |  |                      in the hopes of finding a three-way jump.  */
 | 
      
         | 7129 |  |  |                   continue;
 | 
      
         | 7130 |  |  |                 }
 | 
      
         | 7131 |  |  |  
 | 
      
         | 7132 |  |  |               /* We found an instruction which sets the condition
 | 
      
         | 7133 |  |  |                  code, so don't look any farther.  */
 | 
      
         | 7134 |  |  |               break;
 | 
      
         | 7135 |  |  |             }
 | 
      
         | 7136 |  |  |  
 | 
      
         | 7137 |  |  |           /* If INSN sets CC_REG in some other way, don't look any
 | 
      
         | 7138 |  |  |              farther.  */
 | 
      
         | 7139 |  |  |           if (reg_set_p (cc_reg, insn))
 | 
      
         | 7140 |  |  |             break;
 | 
      
         | 7141 |  |  |         }
 | 
      
         | 7142 |  |  |  
 | 
      
         | 7143 |  |  |       /* If we fell off the bottom of the block, we can keep looking
 | 
      
         | 7144 |  |  |          through successors.  We pass CAN_CHANGE_MODE as false because
 | 
      
         | 7145 |  |  |          we aren't prepared to handle compatibility between the
 | 
      
         | 7146 |  |  |          further blocks and this block.  */
 | 
      
         | 7147 |  |  |       if (insn == end)
 | 
      
         | 7148 |  |  |         {
 | 
      
         | 7149 |  |  |           enum machine_mode submode;
 | 
      
         | 7150 |  |  |  
 | 
      
         | 7151 |  |  |           submode = cse_cc_succs (e->dest, orig_bb, cc_reg, cc_src, false);
 | 
      
         | 7152 |  |  |           if (submode != VOIDmode)
 | 
      
         | 7153 |  |  |             {
 | 
      
         | 7154 |  |  |               gcc_assert (submode == mode);
 | 
      
         | 7155 |  |  |               found_equiv = true;
 | 
      
         | 7156 |  |  |               can_change_mode = false;
 | 
      
         | 7157 |  |  |             }
 | 
      
         | 7158 |  |  |         }
 | 
      
         | 7159 |  |  |     }
 | 
      
         | 7160 |  |  |  
 | 
      
         | 7161 |  |  |   if (! found_equiv)
 | 
      
         | 7162 |  |  |     return VOIDmode;
 | 
      
         | 7163 |  |  |  
 | 
      
         | 7164 |  |  |   /* Now INSN_COUNT is the number of instructions we found which set
 | 
      
         | 7165 |  |  |      CC_REG to a value equivalent to CC_SRC.  The instructions are in
 | 
      
         | 7166 |  |  |      INSNS.  The modes used by those instructions are in MODES.  */
 | 
      
         | 7167 |  |  |  
 | 
      
         | 7168 |  |  |   newreg = NULL_RTX;
 | 
      
         | 7169 |  |  |   for (i = 0; i < insn_count; ++i)
 | 
      
         | 7170 |  |  |     {
 | 
      
         | 7171 |  |  |       if (modes[i] != mode)
 | 
      
         | 7172 |  |  |         {
 | 
      
         | 7173 |  |  |           /* We need to change the mode of CC_REG in INSNS[i] and
 | 
      
         | 7174 |  |  |              subsequent instructions.  */
 | 
      
         | 7175 |  |  |           if (! newreg)
 | 
      
         | 7176 |  |  |             {
 | 
      
         | 7177 |  |  |               if (GET_MODE (cc_reg) == mode)
 | 
      
         | 7178 |  |  |                 newreg = cc_reg;
 | 
      
         | 7179 |  |  |               else
 | 
      
         | 7180 |  |  |                 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
 | 
      
         | 7181 |  |  |             }
 | 
      
         | 7182 |  |  |           cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
 | 
      
         | 7183 |  |  |                                     newreg);
 | 
      
         | 7184 |  |  |         }
 | 
      
         | 7185 |  |  |  
 | 
      
         | 7186 |  |  |       delete_insn_and_edges (insns[i]);
 | 
      
         | 7187 |  |  |     }
 | 
      
         | 7188 |  |  |  
 | 
      
         | 7189 |  |  |   return mode;
 | 
      
         | 7190 |  |  | }
 | 
      
         | 7191 |  |  |  
 | 
      
         | 7192 |  |  | /* If we have a fixed condition code register (or two), walk through
 | 
      
         | 7193 |  |  |    the instructions and try to eliminate duplicate assignments.  */
 | 
      
         | 7194 |  |  |  
 | 
      
         | 7195 |  |  | static void
 | 
      
         | 7196 |  |  | cse_condition_code_reg (void)
 | 
      
         | 7197 |  |  | {
 | 
      
         | 7198 |  |  |   unsigned int cc_regno_1;
 | 
      
         | 7199 |  |  |   unsigned int cc_regno_2;
 | 
      
         | 7200 |  |  |   rtx cc_reg_1;
 | 
      
         | 7201 |  |  |   rtx cc_reg_2;
 | 
      
         | 7202 |  |  |   basic_block bb;
 | 
      
         | 7203 |  |  |  
 | 
      
         | 7204 |  |  |   if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
 | 
      
         | 7205 |  |  |     return;
 | 
      
         | 7206 |  |  |  
 | 
      
         | 7207 |  |  |   cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
 | 
      
         | 7208 |  |  |   if (cc_regno_2 != INVALID_REGNUM)
 | 
      
         | 7209 |  |  |     cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
 | 
      
         | 7210 |  |  |   else
 | 
      
         | 7211 |  |  |     cc_reg_2 = NULL_RTX;
 | 
      
         | 7212 |  |  |  
 | 
      
         | 7213 |  |  |   FOR_EACH_BB (bb)
 | 
      
         | 7214 |  |  |     {
 | 
      
         | 7215 |  |  |       rtx last_insn;
 | 
      
         | 7216 |  |  |       rtx cc_reg;
 | 
      
         | 7217 |  |  |       rtx insn;
 | 
      
         | 7218 |  |  |       rtx cc_src_insn;
 | 
      
         | 7219 |  |  |       rtx cc_src;
 | 
      
         | 7220 |  |  |       enum machine_mode mode;
 | 
      
         | 7221 |  |  |       enum machine_mode orig_mode;
 | 
      
         | 7222 |  |  |  
 | 
      
         | 7223 |  |  |       /* Look for blocks which end with a conditional jump based on a
 | 
      
         | 7224 |  |  |          condition code register.  Then look for the instruction which
 | 
      
         | 7225 |  |  |          sets the condition code register.  Then look through the
 | 
      
         | 7226 |  |  |          successor blocks for instructions which set the condition
 | 
      
         | 7227 |  |  |          code register to the same value.  There are other possible
 | 
      
         | 7228 |  |  |          uses of the condition code register, but these are by far the
 | 
      
         | 7229 |  |  |          most common and the ones which we are most likely to be able
 | 
      
         | 7230 |  |  |          to optimize.  */
 | 
      
         | 7231 |  |  |  
 | 
      
         | 7232 |  |  |       last_insn = BB_END (bb);
 | 
      
         | 7233 |  |  |       if (!JUMP_P (last_insn))
 | 
      
         | 7234 |  |  |         continue;
 | 
      
         | 7235 |  |  |  
 | 
      
         | 7236 |  |  |       if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
 | 
      
         | 7237 |  |  |         cc_reg = cc_reg_1;
 | 
      
         | 7238 |  |  |       else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
 | 
      
         | 7239 |  |  |         cc_reg = cc_reg_2;
 | 
      
         | 7240 |  |  |       else
 | 
      
         | 7241 |  |  |         continue;
 | 
      
         | 7242 |  |  |  
 | 
      
         | 7243 |  |  |       cc_src_insn = NULL_RTX;
 | 
      
         | 7244 |  |  |       cc_src = NULL_RTX;
 | 
      
         | 7245 |  |  |       for (insn = PREV_INSN (last_insn);
 | 
      
         | 7246 |  |  |            insn && insn != PREV_INSN (BB_HEAD (bb));
 | 
      
         | 7247 |  |  |            insn = PREV_INSN (insn))
 | 
      
         | 7248 |  |  |         {
 | 
      
         | 7249 |  |  |           rtx set;
 | 
      
         | 7250 |  |  |  
 | 
      
         | 7251 |  |  |           if (! INSN_P (insn))
 | 
      
         | 7252 |  |  |             continue;
 | 
      
         | 7253 |  |  |           set = single_set (insn);
 | 
      
         | 7254 |  |  |           if (set
 | 
      
         | 7255 |  |  |               && REG_P (SET_DEST (set))
 | 
      
         | 7256 |  |  |               && REGNO (SET_DEST (set)) == REGNO (cc_reg))
 | 
      
         | 7257 |  |  |             {
 | 
      
         | 7258 |  |  |               cc_src_insn = insn;
 | 
      
         | 7259 |  |  |               cc_src = SET_SRC (set);
 | 
      
         | 7260 |  |  |               break;
 | 
      
         | 7261 |  |  |             }
 | 
      
         | 7262 |  |  |           else if (reg_set_p (cc_reg, insn))
 | 
      
         | 7263 |  |  |             break;
 | 
      
         | 7264 |  |  |         }
 | 
      
         | 7265 |  |  |  
 | 
      
         | 7266 |  |  |       if (! cc_src_insn)
 | 
      
         | 7267 |  |  |         continue;
 | 
      
         | 7268 |  |  |  
 | 
      
         | 7269 |  |  |       if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
 | 
      
         | 7270 |  |  |         continue;
 | 
      
         | 7271 |  |  |  
 | 
      
         | 7272 |  |  |       /* Now CC_REG is a condition code register used for a
 | 
      
         | 7273 |  |  |          conditional jump at the end of the block, and CC_SRC, in
 | 
      
         | 7274 |  |  |          CC_SRC_INSN, is the value to which that condition code
 | 
      
         | 7275 |  |  |          register is set, and CC_SRC is still meaningful at the end of
 | 
      
         | 7276 |  |  |          the basic block.  */
 | 
      
         | 7277 |  |  |  
 | 
      
         | 7278 |  |  |       orig_mode = GET_MODE (cc_src);
 | 
      
         | 7279 |  |  |       mode = cse_cc_succs (bb, bb, cc_reg, cc_src, true);
 | 
      
         | 7280 |  |  |       if (mode != VOIDmode)
 | 
      
         | 7281 |  |  |         {
 | 
      
         | 7282 |  |  |           gcc_assert (mode == GET_MODE (cc_src));
 | 
      
         | 7283 |  |  |           if (mode != orig_mode)
 | 
      
         | 7284 |  |  |             {
 | 
      
         | 7285 |  |  |               rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
 | 
      
         | 7286 |  |  |  
 | 
      
         | 7287 |  |  |               cse_change_cc_mode_insn (cc_src_insn, newreg);
 | 
      
         | 7288 |  |  |  
 | 
      
         | 7289 |  |  |               /* Do the same in the following insns that use the
 | 
      
         | 7290 |  |  |                  current value of CC_REG within BB.  */
 | 
      
         | 7291 |  |  |               cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
 | 
      
         | 7292 |  |  |                                         NEXT_INSN (last_insn),
 | 
      
         | 7293 |  |  |                                         newreg);
 | 
      
         | 7294 |  |  |             }
 | 
      
         | 7295 |  |  |         }
 | 
      
         | 7296 |  |  |     }
 | 
      
         | 7297 |  |  | }
 | 
      
         | 7298 |  |  |  
 | 
      
         | 7299 |  |  |  
 | 
      
         | 7300 |  |  | /* Perform common subexpression elimination.  Nonzero value from
 | 
      
         | 7301 |  |  |    `cse_main' means that jumps were simplified and some code may now
 | 
      
         | 7302 |  |  |    be unreachable, so do jump optimization again.  */
 | 
      
         | 7303 |  |  | static bool
 | 
      
         | 7304 |  |  | gate_handle_cse (void)
 | 
      
         | 7305 |  |  | {
 | 
      
         | 7306 |  |  |   return optimize > 0;
 | 
      
         | 7307 |  |  | }
 | 
      
         | 7308 |  |  |  
 | 
      
         | 7309 |  |  | static unsigned int
 | 
      
         | 7310 |  |  | rest_of_handle_cse (void)
 | 
      
         | 7311 |  |  | {
 | 
      
         | 7312 |  |  |   int tem;
 | 
      
         | 7313 |  |  |  
 | 
      
         | 7314 |  |  |   if (dump_file)
 | 
      
         | 7315 |  |  |     dump_flow_info (dump_file, dump_flags);
 | 
      
         | 7316 |  |  |  
 | 
      
         | 7317 |  |  |   tem = cse_main (get_insns (), max_reg_num ());
 | 
      
         | 7318 |  |  |  
 | 
      
         | 7319 |  |  |   /* If we are not running more CSE passes, then we are no longer
 | 
      
         | 7320 |  |  |      expecting CSE to be run.  But always rerun it in a cheap mode.  */
 | 
      
         | 7321 |  |  |   cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
 | 
      
         | 7322 |  |  |  
 | 
      
         | 7323 |  |  |   if (tem == 2)
 | 
      
         | 7324 |  |  |     {
 | 
      
         | 7325 |  |  |       timevar_push (TV_JUMP);
 | 
      
         | 7326 |  |  |       rebuild_jump_labels (get_insns ());
 | 
      
         | 7327 |  |  |       cleanup_cfg (0);
 | 
      
         | 7328 |  |  |       timevar_pop (TV_JUMP);
 | 
      
         | 7329 |  |  |     }
 | 
      
         | 7330 |  |  |   else if (tem == 1 || optimize > 1)
 | 
      
         | 7331 |  |  |     cleanup_cfg (0);
 | 
      
         | 7332 |  |  |  
 | 
      
         | 7333 |  |  |   return 0;
 | 
      
         | 7334 |  |  | }
 | 
      
         | 7335 |  |  |  
 | 
      
         | 7336 |  |  | struct rtl_opt_pass pass_cse =
 | 
      
         | 7337 |  |  | {
 | 
      
         | 7338 |  |  |  {
 | 
      
         | 7339 |  |  |   RTL_PASS,
 | 
      
         | 7340 |  |  |   "cse1",                               /* name */
 | 
      
         | 7341 |  |  |   gate_handle_cse,                      /* gate */
 | 
      
         | 7342 |  |  |   rest_of_handle_cse,                   /* execute */
 | 
      
         | 7343 |  |  |   NULL,                                 /* sub */
 | 
      
         | 7344 |  |  |   NULL,                                 /* next */
 | 
      
         | 7345 |  |  |   0,                                    /* static_pass_number */
 | 
      
         | 7346 |  |  |   TV_CSE,                               /* tv_id */
 | 
      
         | 7347 |  |  |   0,                                    /* properties_required */
 | 
      
         | 7348 |  |  |   0,                                    /* properties_provided */
 | 
      
         | 7349 |  |  |   0,                                    /* properties_destroyed */
 | 
      
         | 7350 |  |  |   0,                                    /* todo_flags_start */
 | 
      
         | 7351 |  |  |   TODO_df_finish | TODO_verify_rtl_sharing |
 | 
      
         | 7352 |  |  |   TODO_ggc_collect |
 | 
      
         | 7353 |  |  |   TODO_verify_flow,                     /* todo_flags_finish */
 | 
      
         | 7354 |  |  |  }
 | 
      
         | 7355 |  |  | };
 | 
      
         | 7356 |  |  |  
 | 
      
         | 7357 |  |  |  
 | 
      
         | 7358 |  |  | static bool
 | 
      
         | 7359 |  |  | gate_handle_cse2 (void)
 | 
      
         | 7360 |  |  | {
 | 
      
         | 7361 |  |  |   return optimize > 0 && flag_rerun_cse_after_loop;
 | 
      
         | 7362 |  |  | }
 | 
      
         | 7363 |  |  |  
 | 
      
         | 7364 |  |  | /* Run second CSE pass after loop optimizations.  */
 | 
      
         | 7365 |  |  | static unsigned int
 | 
      
         | 7366 |  |  | rest_of_handle_cse2 (void)
 | 
      
         | 7367 |  |  | {
 | 
      
         | 7368 |  |  |   int tem;
 | 
      
         | 7369 |  |  |  
 | 
      
         | 7370 |  |  |   if (dump_file)
 | 
      
         | 7371 |  |  |     dump_flow_info (dump_file, dump_flags);
 | 
      
         | 7372 |  |  |  
 | 
      
         | 7373 |  |  |   tem = cse_main (get_insns (), max_reg_num ());
 | 
      
         | 7374 |  |  |  
 | 
      
         | 7375 |  |  |   /* Run a pass to eliminate duplicated assignments to condition code
 | 
      
         | 7376 |  |  |      registers.  We have to run this after bypass_jumps, because it
 | 
      
         | 7377 |  |  |      makes it harder for that pass to determine whether a jump can be
 | 
      
         | 7378 |  |  |      bypassed safely.  */
 | 
      
         | 7379 |  |  |   cse_condition_code_reg ();
 | 
      
         | 7380 |  |  |  
 | 
      
         | 7381 |  |  |   delete_trivially_dead_insns (get_insns (), max_reg_num ());
 | 
      
         | 7382 |  |  |  
 | 
      
         | 7383 |  |  |   if (tem == 2)
 | 
      
         | 7384 |  |  |     {
 | 
      
         | 7385 |  |  |       timevar_push (TV_JUMP);
 | 
      
         | 7386 |  |  |       rebuild_jump_labels (get_insns ());
 | 
      
         | 7387 |  |  |       cleanup_cfg (0);
 | 
      
         | 7388 |  |  |       timevar_pop (TV_JUMP);
 | 
      
         | 7389 |  |  |     }
 | 
      
         | 7390 |  |  |   else if (tem == 1)
 | 
      
         | 7391 |  |  |     cleanup_cfg (0);
 | 
      
         | 7392 |  |  |  
 | 
      
         | 7393 |  |  |   cse_not_expected = 1;
 | 
      
         | 7394 |  |  |   return 0;
 | 
      
         | 7395 |  |  | }
 | 
      
         | 7396 |  |  |  
 | 
      
         | 7397 |  |  |  
 | 
      
         | 7398 |  |  | struct rtl_opt_pass pass_cse2 =
 | 
      
         | 7399 |  |  | {
 | 
      
         | 7400 |  |  |  {
 | 
      
         | 7401 |  |  |   RTL_PASS,
 | 
      
         | 7402 |  |  |   "cse2",                               /* name */
 | 
      
         | 7403 |  |  |   gate_handle_cse2,                     /* gate */
 | 
      
         | 7404 |  |  |   rest_of_handle_cse2,                  /* execute */
 | 
      
         | 7405 |  |  |   NULL,                                 /* sub */
 | 
      
         | 7406 |  |  |   NULL,                                 /* next */
 | 
      
         | 7407 |  |  |   0,                                    /* static_pass_number */
 | 
      
         | 7408 |  |  |   TV_CSE2,                              /* tv_id */
 | 
      
         | 7409 |  |  |   0,                                    /* properties_required */
 | 
      
         | 7410 |  |  |   0,                                    /* properties_provided */
 | 
      
         | 7411 |  |  |   0,                                    /* properties_destroyed */
 | 
      
         | 7412 |  |  |   0,                                    /* todo_flags_start */
 | 
      
         | 7413 |  |  |   TODO_df_finish | TODO_verify_rtl_sharing |
 | 
      
         | 7414 |  |  |   TODO_ggc_collect |
 | 
      
         | 7415 |  |  |   TODO_verify_flow                      /* todo_flags_finish */
 | 
      
         | 7416 |  |  |  }
 | 
      
         | 7417 |  |  | };
 | 
      
         | 7418 |  |  |  
 | 
      
         | 7419 |  |  | static bool
 | 
      
         | 7420 |  |  | gate_handle_cse_after_global_opts (void)
 | 
      
         | 7421 |  |  | {
 | 
      
         | 7422 |  |  |   return optimize > 0 && flag_rerun_cse_after_global_opts;
 | 
      
         | 7423 |  |  | }
 | 
      
         | 7424 |  |  |  
 | 
      
         | 7425 |  |  | /* Run second CSE pass after loop optimizations.  */
 | 
      
         | 7426 |  |  | static unsigned int
 | 
      
         | 7427 |  |  | rest_of_handle_cse_after_global_opts (void)
 | 
      
         | 7428 |  |  | {
 | 
      
         | 7429 |  |  |   int save_cfj;
 | 
      
         | 7430 |  |  |   int tem;
 | 
      
         | 7431 |  |  |  
 | 
      
         | 7432 |  |  |   /* We only want to do local CSE, so don't follow jumps.  */
 | 
      
         | 7433 |  |  |   save_cfj = flag_cse_follow_jumps;
 | 
      
         | 7434 |  |  |   flag_cse_follow_jumps = 0;
 | 
      
         | 7435 |  |  |  
 | 
      
         | 7436 |  |  |   rebuild_jump_labels (get_insns ());
 | 
      
         | 7437 |  |  |   tem = cse_main (get_insns (), max_reg_num ());
 | 
      
         | 7438 |  |  |   purge_all_dead_edges ();
 | 
      
         | 7439 |  |  |   delete_trivially_dead_insns (get_insns (), max_reg_num ());
 | 
      
         | 7440 |  |  |  
 | 
      
         | 7441 |  |  |   cse_not_expected = !flag_rerun_cse_after_loop;
 | 
      
         | 7442 |  |  |  
 | 
      
         | 7443 |  |  |   /* If cse altered any jumps, rerun jump opts to clean things up.  */
 | 
      
         | 7444 |  |  |   if (tem == 2)
 | 
      
         | 7445 |  |  |     {
 | 
      
         | 7446 |  |  |       timevar_push (TV_JUMP);
 | 
      
         | 7447 |  |  |       rebuild_jump_labels (get_insns ());
 | 
      
         | 7448 |  |  |       cleanup_cfg (0);
 | 
      
         | 7449 |  |  |       timevar_pop (TV_JUMP);
 | 
      
         | 7450 |  |  |     }
 | 
      
         | 7451 |  |  |   else if (tem == 1)
 | 
      
         | 7452 |  |  |     cleanup_cfg (0);
 | 
      
         | 7453 |  |  |  
 | 
      
         | 7454 |  |  |   flag_cse_follow_jumps = save_cfj;
 | 
      
         | 7455 |  |  |   return 0;
 | 
      
         | 7456 |  |  | }
 | 
      
         | 7457 |  |  |  
 | 
      
         | 7458 |  |  | struct rtl_opt_pass pass_cse_after_global_opts =
 | 
      
         | 7459 |  |  | {
 | 
      
         | 7460 |  |  |  {
 | 
      
         | 7461 |  |  |   RTL_PASS,
 | 
      
         | 7462 |  |  |   "cse_local",                          /* name */
 | 
      
         | 7463 |  |  |   gate_handle_cse_after_global_opts,    /* gate */
 | 
      
         | 7464 |  |  |   rest_of_handle_cse_after_global_opts, /* execute */
 | 
      
         | 7465 |  |  |   NULL,                                 /* sub */
 | 
      
         | 7466 |  |  |   NULL,                                 /* next */
 | 
      
         | 7467 |  |  |   0,                                    /* static_pass_number */
 | 
      
         | 7468 |  |  |   TV_CSE,                               /* tv_id */
 | 
      
         | 7469 |  |  |   0,                                    /* properties_required */
 | 
      
         | 7470 |  |  |   0,                                    /* properties_provided */
 | 
      
         | 7471 |  |  |   0,                                    /* properties_destroyed */
 | 
      
         | 7472 |  |  |   0,                                    /* todo_flags_start */
 | 
      
         | 7473 |  |  |   TODO_df_finish | TODO_verify_rtl_sharing |
 | 
      
         | 7474 |  |  |   TODO_ggc_collect |
 | 
      
         | 7475 |  |  |   TODO_verify_flow                      /* todo_flags_finish */
 | 
      
         | 7476 |  |  |  }
 | 
      
         | 7477 |  |  | };
 |