| 1 | 280 | jeremybenn | /* A pass for lowering trees to RTL.
 | 
      
         | 2 |  |  |    Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
 | 
      
         | 3 |  |  |    Free Software Foundation, Inc.
 | 
      
         | 4 |  |  |  
 | 
      
         | 5 |  |  | This file is part of GCC.
 | 
      
         | 6 |  |  |  
 | 
      
         | 7 |  |  | GCC is free software; you can redistribute it and/or modify
 | 
      
         | 8 |  |  | it under the terms of the GNU General Public License as published by
 | 
      
         | 9 |  |  | the Free Software Foundation; either version 3, or (at your option)
 | 
      
         | 10 |  |  | any later version.
 | 
      
         | 11 |  |  |  
 | 
      
         | 12 |  |  | GCC is distributed in the hope that it will be useful,
 | 
      
         | 13 |  |  | but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
      
         | 14 |  |  | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
      
         | 15 |  |  | GNU General Public License for more details.
 | 
      
         | 16 |  |  |  
 | 
      
         | 17 |  |  | You should have received a copy of the GNU General Public License
 | 
      
         | 18 |  |  | along with GCC; see the file COPYING3.  If not see
 | 
      
         | 19 |  |  | <http://www.gnu.org/licenses/>.  */
 | 
      
         | 20 |  |  |  
 | 
      
         | 21 |  |  | #include "config.h"
 | 
      
         | 22 |  |  | #include "system.h"
 | 
      
         | 23 |  |  | #include "coretypes.h"
 | 
      
         | 24 |  |  | #include "tm.h"
 | 
      
         | 25 |  |  | #include "tree.h"
 | 
      
         | 26 |  |  | #include "rtl.h"
 | 
      
         | 27 |  |  | #include "tm_p.h"
 | 
      
         | 28 |  |  | #include "basic-block.h"
 | 
      
         | 29 |  |  | #include "function.h"
 | 
      
         | 30 |  |  | #include "expr.h"
 | 
      
         | 31 |  |  | #include "langhooks.h"
 | 
      
         | 32 |  |  | #include "tree-flow.h"
 | 
      
         | 33 |  |  | #include "timevar.h"
 | 
      
         | 34 |  |  | #include "tree-dump.h"
 | 
      
         | 35 |  |  | #include "tree-pass.h"
 | 
      
         | 36 |  |  | #include "except.h"
 | 
      
         | 37 |  |  | #include "flags.h"
 | 
      
         | 38 |  |  | #include "diagnostic.h"
 | 
      
         | 39 |  |  | #include "toplev.h"
 | 
      
         | 40 |  |  | #include "debug.h"
 | 
      
         | 41 |  |  | #include "params.h"
 | 
      
         | 42 |  |  | #include "tree-inline.h"
 | 
      
         | 43 |  |  | #include "value-prof.h"
 | 
      
         | 44 |  |  | #include "target.h"
 | 
      
         | 45 |  |  | #include "ssaexpand.h"
 | 
      
         | 46 |  |  |  
 | 
      
         | 47 |  |  |  
 | 
      
         | 48 |  |  | /* This variable holds information helping the rewriting of SSA trees
 | 
      
         | 49 |  |  |    into RTL.  */
 | 
      
         | 50 |  |  | struct ssaexpand SA;
 | 
      
         | 51 |  |  |  
 | 
      
         | 52 |  |  | /* This variable holds the currently expanded gimple statement for purposes
 | 
      
         | 53 |  |  |    of comminucating the profile info to the builtin expanders.  */
 | 
      
         | 54 |  |  | gimple currently_expanding_gimple_stmt;
 | 
      
         | 55 |  |  |  
 | 
      
         | 56 |  |  | /* Return an expression tree corresponding to the RHS of GIMPLE
 | 
      
         | 57 |  |  |    statement STMT.  */
 | 
      
         | 58 |  |  |  
 | 
      
         | 59 |  |  | tree
 | 
      
         | 60 |  |  | gimple_assign_rhs_to_tree (gimple stmt)
 | 
      
         | 61 |  |  | {
 | 
      
         | 62 |  |  |   tree t;
 | 
      
         | 63 |  |  |   enum gimple_rhs_class grhs_class;
 | 
      
         | 64 |  |  |  
 | 
      
         | 65 |  |  |   grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
 | 
      
         | 66 |  |  |  
 | 
      
         | 67 |  |  |   if (grhs_class == GIMPLE_BINARY_RHS)
 | 
      
         | 68 |  |  |     t = build2 (gimple_assign_rhs_code (stmt),
 | 
      
         | 69 |  |  |                 TREE_TYPE (gimple_assign_lhs (stmt)),
 | 
      
         | 70 |  |  |                 gimple_assign_rhs1 (stmt),
 | 
      
         | 71 |  |  |                 gimple_assign_rhs2 (stmt));
 | 
      
         | 72 |  |  |   else if (grhs_class == GIMPLE_UNARY_RHS)
 | 
      
         | 73 |  |  |     t = build1 (gimple_assign_rhs_code (stmt),
 | 
      
         | 74 |  |  |                 TREE_TYPE (gimple_assign_lhs (stmt)),
 | 
      
         | 75 |  |  |                 gimple_assign_rhs1 (stmt));
 | 
      
         | 76 |  |  |   else if (grhs_class == GIMPLE_SINGLE_RHS)
 | 
      
         | 77 |  |  |     {
 | 
      
         | 78 |  |  |       t = gimple_assign_rhs1 (stmt);
 | 
      
         | 79 |  |  |       /* Avoid modifying this tree in place below.  */
 | 
      
         | 80 |  |  |       if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
 | 
      
         | 81 |  |  |            && gimple_location (stmt) != EXPR_LOCATION (t))
 | 
      
         | 82 |  |  |           || (gimple_block (stmt)
 | 
      
         | 83 |  |  |               && currently_expanding_to_rtl
 | 
      
         | 84 |  |  |               && EXPR_P (t)
 | 
      
         | 85 |  |  |               && gimple_block (stmt) != TREE_BLOCK (t)))
 | 
      
         | 86 |  |  |         t = copy_node (t);
 | 
      
         | 87 |  |  |     }
 | 
      
         | 88 |  |  |   else
 | 
      
         | 89 |  |  |     gcc_unreachable ();
 | 
      
         | 90 |  |  |  
 | 
      
         | 91 |  |  |   if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
 | 
      
         | 92 |  |  |     SET_EXPR_LOCATION (t, gimple_location (stmt));
 | 
      
         | 93 |  |  |   if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
 | 
      
         | 94 |  |  |     TREE_BLOCK (t) = gimple_block (stmt);
 | 
      
         | 95 |  |  |  
 | 
      
         | 96 |  |  |   return t;
 | 
      
         | 97 |  |  | }
 | 
      
         | 98 |  |  |  
 | 
      
         | 99 |  |  |  
 | 
      
         | 100 |  |  | #ifndef STACK_ALIGNMENT_NEEDED
 | 
      
         | 101 |  |  | #define STACK_ALIGNMENT_NEEDED 1
 | 
      
         | 102 |  |  | #endif
 | 
      
         | 103 |  |  |  
 | 
      
         | 104 |  |  | #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
 | 
      
         | 105 |  |  |  
 | 
      
         | 106 |  |  | /* Associate declaration T with storage space X.  If T is no
 | 
      
         | 107 |  |  |    SSA name this is exactly SET_DECL_RTL, otherwise make the
 | 
      
         | 108 |  |  |    partition of T associated with X.  */
 | 
      
         | 109 |  |  | static inline void
 | 
      
         | 110 |  |  | set_rtl (tree t, rtx x)
 | 
      
         | 111 |  |  | {
 | 
      
         | 112 |  |  |   if (TREE_CODE (t) == SSA_NAME)
 | 
      
         | 113 |  |  |     {
 | 
      
         | 114 |  |  |       SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
 | 
      
         | 115 |  |  |       if (x && !MEM_P (x))
 | 
      
         | 116 |  |  |         set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
 | 
      
         | 117 |  |  |       /* For the benefit of debug information at -O0 (where vartracking
 | 
      
         | 118 |  |  |          doesn't run) record the place also in the base DECL if it's
 | 
      
         | 119 |  |  |          a normal variable (not a parameter).  */
 | 
      
         | 120 |  |  |       if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
 | 
      
         | 121 |  |  |         {
 | 
      
         | 122 |  |  |           tree var = SSA_NAME_VAR (t);
 | 
      
         | 123 |  |  |           /* If we don't yet have something recorded, just record it now.  */
 | 
      
         | 124 |  |  |           if (!DECL_RTL_SET_P (var))
 | 
      
         | 125 |  |  |             SET_DECL_RTL (var, x);
 | 
      
         | 126 |  |  |           /* If we have it set alrady to "multiple places" don't
 | 
      
         | 127 |  |  |              change this.  */
 | 
      
         | 128 |  |  |           else if (DECL_RTL (var) == pc_rtx)
 | 
      
         | 129 |  |  |             ;
 | 
      
         | 130 |  |  |           /* If we have something recorded and it's not the same place
 | 
      
         | 131 |  |  |              as we want to record now, we have multiple partitions for the
 | 
      
         | 132 |  |  |              same base variable, with different places.  We can't just
 | 
      
         | 133 |  |  |              randomly chose one, hence we have to say that we don't know.
 | 
      
         | 134 |  |  |              This only happens with optimization, and there var-tracking
 | 
      
         | 135 |  |  |              will figure out the right thing.  */
 | 
      
         | 136 |  |  |           else if (DECL_RTL (var) != x)
 | 
      
         | 137 |  |  |             SET_DECL_RTL (var, pc_rtx);
 | 
      
         | 138 |  |  |         }
 | 
      
         | 139 |  |  |     }
 | 
      
         | 140 |  |  |   else
 | 
      
         | 141 |  |  |     SET_DECL_RTL (t, x);
 | 
      
         | 142 |  |  | }
 | 
      
         | 143 |  |  |  
 | 
      
         | 144 |  |  | /* This structure holds data relevant to one variable that will be
 | 
      
         | 145 |  |  |    placed in a stack slot.  */
 | 
      
         | 146 |  |  | struct stack_var
 | 
      
         | 147 |  |  | {
 | 
      
         | 148 |  |  |   /* The Variable.  */
 | 
      
         | 149 |  |  |   tree decl;
 | 
      
         | 150 |  |  |  
 | 
      
         | 151 |  |  |   /* The offset of the variable.  During partitioning, this is the
 | 
      
         | 152 |  |  |      offset relative to the partition.  After partitioning, this
 | 
      
         | 153 |  |  |      is relative to the stack frame.  */
 | 
      
         | 154 |  |  |   HOST_WIDE_INT offset;
 | 
      
         | 155 |  |  |  
 | 
      
         | 156 |  |  |   /* Initially, the size of the variable.  Later, the size of the partition,
 | 
      
         | 157 |  |  |      if this variable becomes it's partition's representative.  */
 | 
      
         | 158 |  |  |   HOST_WIDE_INT size;
 | 
      
         | 159 |  |  |  
 | 
      
         | 160 |  |  |   /* The *byte* alignment required for this variable.  Or as, with the
 | 
      
         | 161 |  |  |      size, the alignment for this partition.  */
 | 
      
         | 162 |  |  |   unsigned int alignb;
 | 
      
         | 163 |  |  |  
 | 
      
         | 164 |  |  |   /* The partition representative.  */
 | 
      
         | 165 |  |  |   size_t representative;
 | 
      
         | 166 |  |  |  
 | 
      
         | 167 |  |  |   /* The next stack variable in the partition, or EOC.  */
 | 
      
         | 168 |  |  |   size_t next;
 | 
      
         | 169 |  |  |  
 | 
      
         | 170 |  |  |   /* The numbers of conflicting stack variables.  */
 | 
      
         | 171 |  |  |   bitmap conflicts;
 | 
      
         | 172 |  |  | };
 | 
      
         | 173 |  |  |  
 | 
      
         | 174 |  |  | #define EOC  ((size_t)-1)
 | 
      
         | 175 |  |  |  
 | 
      
         | 176 |  |  | /* We have an array of such objects while deciding allocation.  */
 | 
      
         | 177 |  |  | static struct stack_var *stack_vars;
 | 
      
         | 178 |  |  | static size_t stack_vars_alloc;
 | 
      
         | 179 |  |  | static size_t stack_vars_num;
 | 
      
         | 180 |  |  |  
 | 
      
         | 181 |  |  | /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
 | 
      
         | 182 |  |  |    is non-decreasing.  */
 | 
      
         | 183 |  |  | static size_t *stack_vars_sorted;
 | 
      
         | 184 |  |  |  
 | 
      
         | 185 |  |  | /* The phase of the stack frame.  This is the known misalignment of
 | 
      
         | 186 |  |  |    virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
 | 
      
         | 187 |  |  |    (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
 | 
      
         | 188 |  |  | static int frame_phase;
 | 
      
         | 189 |  |  |  
 | 
      
         | 190 |  |  | /* Used during expand_used_vars to remember if we saw any decls for
 | 
      
         | 191 |  |  |    which we'd like to enable stack smashing protection.  */
 | 
      
         | 192 |  |  | static bool has_protected_decls;
 | 
      
         | 193 |  |  |  
 | 
      
         | 194 |  |  | /* Used during expand_used_vars.  Remember if we say a character buffer
 | 
      
         | 195 |  |  |    smaller than our cutoff threshold.  Used for -Wstack-protector.  */
 | 
      
         | 196 |  |  | static bool has_short_buffer;
 | 
      
         | 197 |  |  |  
 | 
      
         | 198 |  |  | /* Discover the byte alignment to use for DECL.  Ignore alignment
 | 
      
         | 199 |  |  |    we can't do with expected alignment of the stack boundary.  */
 | 
      
         | 200 |  |  |  
 | 
      
         | 201 |  |  | static unsigned int
 | 
      
         | 202 |  |  | get_decl_align_unit (tree decl)
 | 
      
         | 203 |  |  | {
 | 
      
         | 204 |  |  |   unsigned int align;
 | 
      
         | 205 |  |  |  
 | 
      
         | 206 |  |  |   align = LOCAL_DECL_ALIGNMENT (decl);
 | 
      
         | 207 |  |  |  
 | 
      
         | 208 |  |  |   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
 | 
      
         | 209 |  |  |     align = MAX_SUPPORTED_STACK_ALIGNMENT;
 | 
      
         | 210 |  |  |  
 | 
      
         | 211 |  |  |   if (SUPPORTS_STACK_ALIGNMENT)
 | 
      
         | 212 |  |  |     {
 | 
      
         | 213 |  |  |       if (crtl->stack_alignment_estimated < align)
 | 
      
         | 214 |  |  |         {
 | 
      
         | 215 |  |  |           gcc_assert(!crtl->stack_realign_processed);
 | 
      
         | 216 |  |  |           crtl->stack_alignment_estimated = align;
 | 
      
         | 217 |  |  |         }
 | 
      
         | 218 |  |  |     }
 | 
      
         | 219 |  |  |  
 | 
      
         | 220 |  |  |   /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
 | 
      
         | 221 |  |  |      So here we only make sure stack_alignment_needed >= align.  */
 | 
      
         | 222 |  |  |   if (crtl->stack_alignment_needed < align)
 | 
      
         | 223 |  |  |     crtl->stack_alignment_needed = align;
 | 
      
         | 224 |  |  |   if (crtl->max_used_stack_slot_alignment < align)
 | 
      
         | 225 |  |  |     crtl->max_used_stack_slot_alignment = align;
 | 
      
         | 226 |  |  |  
 | 
      
         | 227 |  |  |   return align / BITS_PER_UNIT;
 | 
      
         | 228 |  |  | }
 | 
      
         | 229 |  |  |  
 | 
      
         | 230 |  |  | /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
 | 
      
         | 231 |  |  |    Return the frame offset.  */
 | 
      
         | 232 |  |  |  
 | 
      
         | 233 |  |  | static HOST_WIDE_INT
 | 
      
         | 234 |  |  | alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
 | 
      
         | 235 |  |  | {
 | 
      
         | 236 |  |  |   HOST_WIDE_INT offset, new_frame_offset;
 | 
      
         | 237 |  |  |  
 | 
      
         | 238 |  |  |   new_frame_offset = frame_offset;
 | 
      
         | 239 |  |  |   if (FRAME_GROWS_DOWNWARD)
 | 
      
         | 240 |  |  |     {
 | 
      
         | 241 |  |  |       new_frame_offset -= size + frame_phase;
 | 
      
         | 242 |  |  |       new_frame_offset &= -align;
 | 
      
         | 243 |  |  |       new_frame_offset += frame_phase;
 | 
      
         | 244 |  |  |       offset = new_frame_offset;
 | 
      
         | 245 |  |  |     }
 | 
      
         | 246 |  |  |   else
 | 
      
         | 247 |  |  |     {
 | 
      
         | 248 |  |  |       new_frame_offset -= frame_phase;
 | 
      
         | 249 |  |  |       new_frame_offset += align - 1;
 | 
      
         | 250 |  |  |       new_frame_offset &= -align;
 | 
      
         | 251 |  |  |       new_frame_offset += frame_phase;
 | 
      
         | 252 |  |  |       offset = new_frame_offset;
 | 
      
         | 253 |  |  |       new_frame_offset += size;
 | 
      
         | 254 |  |  |     }
 | 
      
         | 255 |  |  |   frame_offset = new_frame_offset;
 | 
      
         | 256 |  |  |  
 | 
      
         | 257 |  |  |   if (frame_offset_overflow (frame_offset, cfun->decl))
 | 
      
         | 258 |  |  |     frame_offset = offset = 0;
 | 
      
         | 259 |  |  |  
 | 
      
         | 260 |  |  |   return offset;
 | 
      
         | 261 |  |  | }
 | 
      
         | 262 |  |  |  
 | 
      
         | 263 |  |  | /* Accumulate DECL into STACK_VARS.  */
 | 
      
         | 264 |  |  |  
 | 
      
         | 265 |  |  | static void
 | 
      
         | 266 |  |  | add_stack_var (tree decl)
 | 
      
         | 267 |  |  | {
 | 
      
         | 268 |  |  |   if (stack_vars_num >= stack_vars_alloc)
 | 
      
         | 269 |  |  |     {
 | 
      
         | 270 |  |  |       if (stack_vars_alloc)
 | 
      
         | 271 |  |  |         stack_vars_alloc = stack_vars_alloc * 3 / 2;
 | 
      
         | 272 |  |  |       else
 | 
      
         | 273 |  |  |         stack_vars_alloc = 32;
 | 
      
         | 274 |  |  |       stack_vars
 | 
      
         | 275 |  |  |         = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
 | 
      
         | 276 |  |  |     }
 | 
      
         | 277 |  |  |   stack_vars[stack_vars_num].decl = decl;
 | 
      
         | 278 |  |  |   stack_vars[stack_vars_num].offset = 0;
 | 
      
         | 279 |  |  |   stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
 | 
      
         | 280 |  |  |   stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
 | 
      
         | 281 |  |  |  
 | 
      
         | 282 |  |  |   /* All variables are initially in their own partition.  */
 | 
      
         | 283 |  |  |   stack_vars[stack_vars_num].representative = stack_vars_num;
 | 
      
         | 284 |  |  |   stack_vars[stack_vars_num].next = EOC;
 | 
      
         | 285 |  |  |  
 | 
      
         | 286 |  |  |   /* All variables initially conflict with no other.  */
 | 
      
         | 287 |  |  |   stack_vars[stack_vars_num].conflicts = NULL;
 | 
      
         | 288 |  |  |  
 | 
      
         | 289 |  |  |   /* Ensure that this decl doesn't get put onto the list twice.  */
 | 
      
         | 290 |  |  |   set_rtl (decl, pc_rtx);
 | 
      
         | 291 |  |  |  
 | 
      
         | 292 |  |  |   stack_vars_num++;
 | 
      
         | 293 |  |  | }
 | 
      
         | 294 |  |  |  
 | 
      
         | 295 |  |  | /* Make the decls associated with luid's X and Y conflict.  */
 | 
      
         | 296 |  |  |  
 | 
      
         | 297 |  |  | static void
 | 
      
         | 298 |  |  | add_stack_var_conflict (size_t x, size_t y)
 | 
      
         | 299 |  |  | {
 | 
      
         | 300 |  |  |   struct stack_var *a = &stack_vars[x];
 | 
      
         | 301 |  |  |   struct stack_var *b = &stack_vars[y];
 | 
      
         | 302 |  |  |   if (!a->conflicts)
 | 
      
         | 303 |  |  |     a->conflicts = BITMAP_ALLOC (NULL);
 | 
      
         | 304 |  |  |   if (!b->conflicts)
 | 
      
         | 305 |  |  |     b->conflicts = BITMAP_ALLOC (NULL);
 | 
      
         | 306 |  |  |   bitmap_set_bit (a->conflicts, y);
 | 
      
         | 307 |  |  |   bitmap_set_bit (b->conflicts, x);
 | 
      
         | 308 |  |  | }
 | 
      
         | 309 |  |  |  
 | 
      
         | 310 |  |  | /* Check whether the decls associated with luid's X and Y conflict.  */
 | 
      
         | 311 |  |  |  
 | 
      
         | 312 |  |  | static bool
 | 
      
         | 313 |  |  | stack_var_conflict_p (size_t x, size_t y)
 | 
      
         | 314 |  |  | {
 | 
      
         | 315 |  |  |   struct stack_var *a = &stack_vars[x];
 | 
      
         | 316 |  |  |   struct stack_var *b = &stack_vars[y];
 | 
      
         | 317 |  |  |   if (!a->conflicts || !b->conflicts)
 | 
      
         | 318 |  |  |     return false;
 | 
      
         | 319 |  |  |   return bitmap_bit_p (a->conflicts, y);
 | 
      
         | 320 |  |  | }
 | 
      
         | 321 |  |  |  
 | 
      
         | 322 |  |  | /* Returns true if TYPE is or contains a union type.  */
 | 
      
         | 323 |  |  |  
 | 
      
         | 324 |  |  | static bool
 | 
      
         | 325 |  |  | aggregate_contains_union_type (tree type)
 | 
      
         | 326 |  |  | {
 | 
      
         | 327 |  |  |   tree field;
 | 
      
         | 328 |  |  |  
 | 
      
         | 329 |  |  |   if (TREE_CODE (type) == UNION_TYPE
 | 
      
         | 330 |  |  |       || TREE_CODE (type) == QUAL_UNION_TYPE)
 | 
      
         | 331 |  |  |     return true;
 | 
      
         | 332 |  |  |   if (TREE_CODE (type) == ARRAY_TYPE)
 | 
      
         | 333 |  |  |     return aggregate_contains_union_type (TREE_TYPE (type));
 | 
      
         | 334 |  |  |   if (TREE_CODE (type) != RECORD_TYPE)
 | 
      
         | 335 |  |  |     return false;
 | 
      
         | 336 |  |  |  
 | 
      
         | 337 |  |  |   for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
 | 
      
         | 338 |  |  |     if (TREE_CODE (field) == FIELD_DECL)
 | 
      
         | 339 |  |  |       if (aggregate_contains_union_type (TREE_TYPE (field)))
 | 
      
         | 340 |  |  |         return true;
 | 
      
         | 341 |  |  |  
 | 
      
         | 342 |  |  |   return false;
 | 
      
         | 343 |  |  | }
 | 
      
         | 344 |  |  |  
 | 
      
         | 345 |  |  | /* A subroutine of expand_used_vars.  If two variables X and Y have alias
 | 
      
         | 346 |  |  |    sets that do not conflict, then do add a conflict for these variables
 | 
      
         | 347 |  |  |    in the interference graph.  We also need to make sure to add conflicts
 | 
      
         | 348 |  |  |    for union containing structures.  Else RTL alias analysis comes along
 | 
      
         | 349 |  |  |    and due to type based aliasing rules decides that for two overlapping
 | 
      
         | 350 |  |  |    union temporaries { short s; int i; } accesses to the same mem through
 | 
      
         | 351 |  |  |    different types may not alias and happily reorders stores across
 | 
      
         | 352 |  |  |    life-time boundaries of the temporaries (See PR25654).
 | 
      
         | 353 |  |  |    We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P.  */
 | 
      
         | 354 |  |  |  
 | 
      
         | 355 |  |  | static void
 | 
      
         | 356 |  |  | add_alias_set_conflicts (void)
 | 
      
         | 357 |  |  | {
 | 
      
         | 358 |  |  |   size_t i, j, n = stack_vars_num;
 | 
      
         | 359 |  |  |  
 | 
      
         | 360 |  |  |   for (i = 0; i < n; ++i)
 | 
      
         | 361 |  |  |     {
 | 
      
         | 362 |  |  |       tree type_i = TREE_TYPE (stack_vars[i].decl);
 | 
      
         | 363 |  |  |       bool aggr_i = AGGREGATE_TYPE_P (type_i);
 | 
      
         | 364 |  |  |       bool contains_union;
 | 
      
         | 365 |  |  |  
 | 
      
         | 366 |  |  |       contains_union = aggregate_contains_union_type (type_i);
 | 
      
         | 367 |  |  |       for (j = 0; j < i; ++j)
 | 
      
         | 368 |  |  |         {
 | 
      
         | 369 |  |  |           tree type_j = TREE_TYPE (stack_vars[j].decl);
 | 
      
         | 370 |  |  |           bool aggr_j = AGGREGATE_TYPE_P (type_j);
 | 
      
         | 371 |  |  |           if (aggr_i != aggr_j
 | 
      
         | 372 |  |  |               /* Either the objects conflict by means of type based
 | 
      
         | 373 |  |  |                  aliasing rules, or we need to add a conflict.  */
 | 
      
         | 374 |  |  |               || !objects_must_conflict_p (type_i, type_j)
 | 
      
         | 375 |  |  |               /* In case the types do not conflict ensure that access
 | 
      
         | 376 |  |  |                  to elements will conflict.  In case of unions we have
 | 
      
         | 377 |  |  |                  to be careful as type based aliasing rules may say
 | 
      
         | 378 |  |  |                  access to the same memory does not conflict.  So play
 | 
      
         | 379 |  |  |                  safe and add a conflict in this case.  */
 | 
      
         | 380 |  |  |               || contains_union)
 | 
      
         | 381 |  |  |             add_stack_var_conflict (i, j);
 | 
      
         | 382 |  |  |         }
 | 
      
         | 383 |  |  |     }
 | 
      
         | 384 |  |  | }
 | 
      
         | 385 |  |  |  
 | 
      
         | 386 |  |  | /* A subroutine of partition_stack_vars.  A comparison function for qsort,
 | 
      
         | 387 |  |  |    sorting an array of indices by the size and type of the object.  */
 | 
      
         | 388 |  |  |  
 | 
      
         | 389 |  |  | static int
 | 
      
         | 390 |  |  | stack_var_size_cmp (const void *a, const void *b)
 | 
      
         | 391 |  |  | {
 | 
      
         | 392 |  |  |   HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
 | 
      
         | 393 |  |  |   HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
 | 
      
         | 394 |  |  |   tree decla, declb;
 | 
      
         | 395 |  |  |   unsigned int uida, uidb;
 | 
      
         | 396 |  |  |  
 | 
      
         | 397 |  |  |   if (sa < sb)
 | 
      
         | 398 |  |  |     return -1;
 | 
      
         | 399 |  |  |   if (sa > sb)
 | 
      
         | 400 |  |  |     return 1;
 | 
      
         | 401 |  |  |   decla = stack_vars[*(const size_t *)a].decl;
 | 
      
         | 402 |  |  |   declb = stack_vars[*(const size_t *)b].decl;
 | 
      
         | 403 |  |  |   /* For stack variables of the same size use and id of the decls
 | 
      
         | 404 |  |  |      to make the sort stable.  Two SSA names are compared by their
 | 
      
         | 405 |  |  |      version, SSA names come before non-SSA names, and two normal
 | 
      
         | 406 |  |  |      decls are compared by their DECL_UID.  */
 | 
      
         | 407 |  |  |   if (TREE_CODE (decla) == SSA_NAME)
 | 
      
         | 408 |  |  |     {
 | 
      
         | 409 |  |  |       if (TREE_CODE (declb) == SSA_NAME)
 | 
      
         | 410 |  |  |         uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
 | 
      
         | 411 |  |  |       else
 | 
      
         | 412 |  |  |         return -1;
 | 
      
         | 413 |  |  |     }
 | 
      
         | 414 |  |  |   else if (TREE_CODE (declb) == SSA_NAME)
 | 
      
         | 415 |  |  |     return 1;
 | 
      
         | 416 |  |  |   else
 | 
      
         | 417 |  |  |     uida = DECL_UID (decla), uidb = DECL_UID (declb);
 | 
      
         | 418 |  |  |   if (uida < uidb)
 | 
      
         | 419 |  |  |     return -1;
 | 
      
         | 420 |  |  |   if (uida > uidb)
 | 
      
         | 421 |  |  |     return 1;
 | 
      
         | 422 |  |  |   return 0;
 | 
      
         | 423 |  |  | }
 | 
      
         | 424 |  |  |  
 | 
      
         | 425 |  |  |  
 | 
      
         | 426 |  |  | /* If the points-to solution *PI points to variables that are in a partition
 | 
      
         | 427 |  |  |    together with other variables add all partition members to the pointed-to
 | 
      
         | 428 |  |  |    variables bitmap.  */
 | 
      
         | 429 |  |  |  
 | 
      
         | 430 |  |  | static void
 | 
      
         | 431 |  |  | add_partitioned_vars_to_ptset (struct pt_solution *pt,
 | 
      
         | 432 |  |  |                                struct pointer_map_t *decls_to_partitions,
 | 
      
         | 433 |  |  |                                struct pointer_set_t *visited, bitmap temp)
 | 
      
         | 434 |  |  | {
 | 
      
         | 435 |  |  |   bitmap_iterator bi;
 | 
      
         | 436 |  |  |   unsigned i;
 | 
      
         | 437 |  |  |   bitmap *part;
 | 
      
         | 438 |  |  |  
 | 
      
         | 439 |  |  |   if (pt->anything
 | 
      
         | 440 |  |  |       || pt->vars == NULL
 | 
      
         | 441 |  |  |       /* The pointed-to vars bitmap is shared, it is enough to
 | 
      
         | 442 |  |  |          visit it once.  */
 | 
      
         | 443 |  |  |       || pointer_set_insert(visited, pt->vars))
 | 
      
         | 444 |  |  |     return;
 | 
      
         | 445 |  |  |  
 | 
      
         | 446 |  |  |   bitmap_clear (temp);
 | 
      
         | 447 |  |  |  
 | 
      
         | 448 |  |  |   /* By using a temporary bitmap to store all members of the partitions
 | 
      
         | 449 |  |  |      we have to add we make sure to visit each of the partitions only
 | 
      
         | 450 |  |  |      once.  */
 | 
      
         | 451 |  |  |   EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
 | 
      
         | 452 |  |  |     if ((!temp
 | 
      
         | 453 |  |  |          || !bitmap_bit_p (temp, i))
 | 
      
         | 454 |  |  |         && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
 | 
      
         | 455 |  |  |                                                     (void *)(size_t) i)))
 | 
      
         | 456 |  |  |       bitmap_ior_into (temp, *part);
 | 
      
         | 457 |  |  |   if (!bitmap_empty_p (temp))
 | 
      
         | 458 |  |  |     bitmap_ior_into (pt->vars, temp);
 | 
      
         | 459 |  |  | }
 | 
      
         | 460 |  |  |  
 | 
      
         | 461 |  |  | /* Update points-to sets based on partition info, so we can use them on RTL.
 | 
      
         | 462 |  |  |    The bitmaps representing stack partitions will be saved until expand,
 | 
      
         | 463 |  |  |    where partitioned decls used as bases in memory expressions will be
 | 
      
         | 464 |  |  |    rewritten.  */
 | 
      
         | 465 |  |  |  
 | 
      
         | 466 |  |  | static void
 | 
      
         | 467 |  |  | update_alias_info_with_stack_vars (void)
 | 
      
         | 468 |  |  | {
 | 
      
         | 469 |  |  |   struct pointer_map_t *decls_to_partitions = NULL;
 | 
      
         | 470 |  |  |   size_t i, j;
 | 
      
         | 471 |  |  |   tree var = NULL_TREE;
 | 
      
         | 472 |  |  |  
 | 
      
         | 473 |  |  |   for (i = 0; i < stack_vars_num; i++)
 | 
      
         | 474 |  |  |     {
 | 
      
         | 475 |  |  |       bitmap part = NULL;
 | 
      
         | 476 |  |  |       tree name;
 | 
      
         | 477 |  |  |       struct ptr_info_def *pi;
 | 
      
         | 478 |  |  |  
 | 
      
         | 479 |  |  |       /* Not interested in partitions with single variable.  */
 | 
      
         | 480 |  |  |       if (stack_vars[i].representative != i
 | 
      
         | 481 |  |  |           || stack_vars[i].next == EOC)
 | 
      
         | 482 |  |  |         continue;
 | 
      
         | 483 |  |  |  
 | 
      
         | 484 |  |  |       if (!decls_to_partitions)
 | 
      
         | 485 |  |  |         {
 | 
      
         | 486 |  |  |           decls_to_partitions = pointer_map_create ();
 | 
      
         | 487 |  |  |           cfun->gimple_df->decls_to_pointers = pointer_map_create ();
 | 
      
         | 488 |  |  |         }
 | 
      
         | 489 |  |  |  
 | 
      
         | 490 |  |  |       /* Create an SSA_NAME that points to the partition for use
 | 
      
         | 491 |  |  |          as base during alias-oracle queries on RTL for bases that
 | 
      
         | 492 |  |  |          have been partitioned.  */
 | 
      
         | 493 |  |  |       if (var == NULL_TREE)
 | 
      
         | 494 |  |  |         var = create_tmp_var (ptr_type_node, NULL);
 | 
      
         | 495 |  |  |       name = make_ssa_name (var, NULL);
 | 
      
         | 496 |  |  |  
 | 
      
         | 497 |  |  |       /* Create bitmaps representing partitions.  They will be used for
 | 
      
         | 498 |  |  |          points-to sets later, so use GGC alloc.  */
 | 
      
         | 499 |  |  |       part = BITMAP_GGC_ALLOC ();
 | 
      
         | 500 |  |  |       for (j = i; j != EOC; j = stack_vars[j].next)
 | 
      
         | 501 |  |  |         {
 | 
      
         | 502 |  |  |           tree decl = stack_vars[j].decl;
 | 
      
         | 503 |  |  |           unsigned int uid = DECL_UID (decl);
 | 
      
         | 504 |  |  |           /* We should never end up partitioning SSA names (though they
 | 
      
         | 505 |  |  |              may end up on the stack).  Neither should we allocate stack
 | 
      
         | 506 |  |  |              space to something that is unused and thus unreferenced.  */
 | 
      
         | 507 |  |  |           gcc_assert (DECL_P (decl)
 | 
      
         | 508 |  |  |                       && referenced_var_lookup (uid));
 | 
      
         | 509 |  |  |           bitmap_set_bit (part, uid);
 | 
      
         | 510 |  |  |           *((bitmap *) pointer_map_insert (decls_to_partitions,
 | 
      
         | 511 |  |  |                                            (void *)(size_t) uid)) = part;
 | 
      
         | 512 |  |  |           *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
 | 
      
         | 513 |  |  |                                          decl)) = name;
 | 
      
         | 514 |  |  |         }
 | 
      
         | 515 |  |  |  
 | 
      
         | 516 |  |  |       /* Make the SSA name point to all partition members.  */
 | 
      
         | 517 |  |  |       pi = get_ptr_info (name);
 | 
      
         | 518 |  |  |       pt_solution_set (&pi->pt, part);
 | 
      
         | 519 |  |  |     }
 | 
      
         | 520 |  |  |  
 | 
      
         | 521 |  |  |   /* Make all points-to sets that contain one member of a partition
 | 
      
         | 522 |  |  |      contain all members of the partition.  */
 | 
      
         | 523 |  |  |   if (decls_to_partitions)
 | 
      
         | 524 |  |  |     {
 | 
      
         | 525 |  |  |       unsigned i;
 | 
      
         | 526 |  |  |       struct pointer_set_t *visited = pointer_set_create ();
 | 
      
         | 527 |  |  |       bitmap temp = BITMAP_ALLOC (NULL);
 | 
      
         | 528 |  |  |  
 | 
      
         | 529 |  |  |       for (i = 1; i < num_ssa_names; i++)
 | 
      
         | 530 |  |  |         {
 | 
      
         | 531 |  |  |           tree name = ssa_name (i);
 | 
      
         | 532 |  |  |           struct ptr_info_def *pi;
 | 
      
         | 533 |  |  |  
 | 
      
         | 534 |  |  |           if (name
 | 
      
         | 535 |  |  |               && POINTER_TYPE_P (TREE_TYPE (name))
 | 
      
         | 536 |  |  |               && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
 | 
      
         | 537 |  |  |             add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
 | 
      
         | 538 |  |  |                                            visited, temp);
 | 
      
         | 539 |  |  |         }
 | 
      
         | 540 |  |  |  
 | 
      
         | 541 |  |  |       add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
 | 
      
         | 542 |  |  |                                      decls_to_partitions, visited, temp);
 | 
      
         | 543 |  |  |       add_partitioned_vars_to_ptset (&cfun->gimple_df->callused,
 | 
      
         | 544 |  |  |                                      decls_to_partitions, visited, temp);
 | 
      
         | 545 |  |  |  
 | 
      
         | 546 |  |  |       pointer_set_destroy (visited);
 | 
      
         | 547 |  |  |       pointer_map_destroy (decls_to_partitions);
 | 
      
         | 548 |  |  |       BITMAP_FREE (temp);
 | 
      
         | 549 |  |  |     }
 | 
      
         | 550 |  |  | }
 | 
      
         | 551 |  |  |  
 | 
      
         | 552 |  |  | /* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
 | 
      
         | 553 |  |  |    partitioning algorithm.  Partitions A and B are known to be non-conflicting.
 | 
      
         | 554 |  |  |    Merge them into a single partition A.
 | 
      
         | 555 |  |  |  
 | 
      
         | 556 |  |  |    At the same time, add OFFSET to all variables in partition B.  At the end
 | 
      
         | 557 |  |  |    of the partitioning process we've have a nice block easy to lay out within
 | 
      
         | 558 |  |  |    the stack frame.  */
 | 
      
         | 559 |  |  |  
 | 
      
         | 560 |  |  | static void
 | 
      
         | 561 |  |  | union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
 | 
      
         | 562 |  |  | {
 | 
      
         | 563 |  |  |   size_t i, last;
 | 
      
         | 564 |  |  |   struct stack_var *vb = &stack_vars[b];
 | 
      
         | 565 |  |  |   bitmap_iterator bi;
 | 
      
         | 566 |  |  |   unsigned u;
 | 
      
         | 567 |  |  |  
 | 
      
         | 568 |  |  |   /* Update each element of partition B with the given offset,
 | 
      
         | 569 |  |  |      and merge them into partition A.  */
 | 
      
         | 570 |  |  |   for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
 | 
      
         | 571 |  |  |     {
 | 
      
         | 572 |  |  |       stack_vars[i].offset += offset;
 | 
      
         | 573 |  |  |       stack_vars[i].representative = a;
 | 
      
         | 574 |  |  |     }
 | 
      
         | 575 |  |  |   stack_vars[last].next = stack_vars[a].next;
 | 
      
         | 576 |  |  |   stack_vars[a].next = b;
 | 
      
         | 577 |  |  |  
 | 
      
         | 578 |  |  |   /* Update the required alignment of partition A to account for B.  */
 | 
      
         | 579 |  |  |   if (stack_vars[a].alignb < stack_vars[b].alignb)
 | 
      
         | 580 |  |  |     stack_vars[a].alignb = stack_vars[b].alignb;
 | 
      
         | 581 |  |  |  
 | 
      
         | 582 |  |  |   /* Update the interference graph and merge the conflicts.  */
 | 
      
         | 583 |  |  |   if (vb->conflicts)
 | 
      
         | 584 |  |  |     {
 | 
      
         | 585 |  |  |       EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
 | 
      
         | 586 |  |  |         add_stack_var_conflict (a, stack_vars[u].representative);
 | 
      
         | 587 |  |  |       BITMAP_FREE (vb->conflicts);
 | 
      
         | 588 |  |  |     }
 | 
      
         | 589 |  |  | }
 | 
      
         | 590 |  |  |  
 | 
      
         | 591 |  |  | /* A subroutine of expand_used_vars.  Binpack the variables into
 | 
      
         | 592 |  |  |    partitions constrained by the interference graph.  The overall
 | 
      
         | 593 |  |  |    algorithm used is as follows:
 | 
      
         | 594 |  |  |  
 | 
      
         | 595 |  |  |         Sort the objects by size.
 | 
      
         | 596 |  |  |         For each object A {
 | 
      
         | 597 |  |  |           S = size(A)
 | 
      
         | 598 |  |  |           O = 0
 | 
      
         | 599 |  |  |           loop {
 | 
      
         | 600 |  |  |             Look for the largest non-conflicting object B with size <= S.
 | 
      
         | 601 |  |  |             UNION (A, B)
 | 
      
         | 602 |  |  |             offset(B) = O
 | 
      
         | 603 |  |  |             O += size(B)
 | 
      
         | 604 |  |  |             S -= size(B)
 | 
      
         | 605 |  |  |           }
 | 
      
         | 606 |  |  |         }
 | 
      
         | 607 |  |  | */
 | 
      
         | 608 |  |  |  
 | 
      
         | 609 |  |  | static void
 | 
      
         | 610 |  |  | partition_stack_vars (void)
 | 
      
         | 611 |  |  | {
 | 
      
         | 612 |  |  |   size_t si, sj, n = stack_vars_num;
 | 
      
         | 613 |  |  |  
 | 
      
         | 614 |  |  |   stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
 | 
      
         | 615 |  |  |   for (si = 0; si < n; ++si)
 | 
      
         | 616 |  |  |     stack_vars_sorted[si] = si;
 | 
      
         | 617 |  |  |  
 | 
      
         | 618 |  |  |   if (n == 1)
 | 
      
         | 619 |  |  |     return;
 | 
      
         | 620 |  |  |  
 | 
      
         | 621 |  |  |   qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
 | 
      
         | 622 |  |  |  
 | 
      
         | 623 |  |  |   for (si = 0; si < n; ++si)
 | 
      
         | 624 |  |  |     {
 | 
      
         | 625 |  |  |       size_t i = stack_vars_sorted[si];
 | 
      
         | 626 |  |  |       HOST_WIDE_INT isize = stack_vars[i].size;
 | 
      
         | 627 |  |  |       HOST_WIDE_INT offset = 0;
 | 
      
         | 628 |  |  |  
 | 
      
         | 629 |  |  |       for (sj = si; sj-- > 0; )
 | 
      
         | 630 |  |  |         {
 | 
      
         | 631 |  |  |           size_t j = stack_vars_sorted[sj];
 | 
      
         | 632 |  |  |           HOST_WIDE_INT jsize = stack_vars[j].size;
 | 
      
         | 633 |  |  |           unsigned int jalign = stack_vars[j].alignb;
 | 
      
         | 634 |  |  |  
 | 
      
         | 635 |  |  |           /* Ignore objects that aren't partition representatives.  */
 | 
      
         | 636 |  |  |           if (stack_vars[j].representative != j)
 | 
      
         | 637 |  |  |             continue;
 | 
      
         | 638 |  |  |  
 | 
      
         | 639 |  |  |           /* Ignore objects too large for the remaining space.  */
 | 
      
         | 640 |  |  |           if (isize < jsize)
 | 
      
         | 641 |  |  |             continue;
 | 
      
         | 642 |  |  |  
 | 
      
         | 643 |  |  |           /* Ignore conflicting objects.  */
 | 
      
         | 644 |  |  |           if (stack_var_conflict_p (i, j))
 | 
      
         | 645 |  |  |             continue;
 | 
      
         | 646 |  |  |  
 | 
      
         | 647 |  |  |           /* Refine the remaining space check to include alignment.  */
 | 
      
         | 648 |  |  |           if (offset & (jalign - 1))
 | 
      
         | 649 |  |  |             {
 | 
      
         | 650 |  |  |               HOST_WIDE_INT toff = offset;
 | 
      
         | 651 |  |  |               toff += jalign - 1;
 | 
      
         | 652 |  |  |               toff &= -(HOST_WIDE_INT)jalign;
 | 
      
         | 653 |  |  |               if (isize - (toff - offset) < jsize)
 | 
      
         | 654 |  |  |                 continue;
 | 
      
         | 655 |  |  |  
 | 
      
         | 656 |  |  |               isize -= toff - offset;
 | 
      
         | 657 |  |  |               offset = toff;
 | 
      
         | 658 |  |  |             }
 | 
      
         | 659 |  |  |  
 | 
      
         | 660 |  |  |           /* UNION the objects, placing J at OFFSET.  */
 | 
      
         | 661 |  |  |           union_stack_vars (i, j, offset);
 | 
      
         | 662 |  |  |  
 | 
      
         | 663 |  |  |           isize -= jsize;
 | 
      
         | 664 |  |  |           if (isize == 0)
 | 
      
         | 665 |  |  |             break;
 | 
      
         | 666 |  |  |         }
 | 
      
         | 667 |  |  |     }
 | 
      
         | 668 |  |  |  
 | 
      
         | 669 |  |  |   if (optimize)
 | 
      
         | 670 |  |  |     update_alias_info_with_stack_vars ();
 | 
      
         | 671 |  |  | }
 | 
      
         | 672 |  |  |  
 | 
      
         | 673 |  |  | /* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
 | 
      
         | 674 |  |  |  
 | 
      
         | 675 |  |  | static void
 | 
      
         | 676 |  |  | dump_stack_var_partition (void)
 | 
      
         | 677 |  |  | {
 | 
      
         | 678 |  |  |   size_t si, i, j, n = stack_vars_num;
 | 
      
         | 679 |  |  |  
 | 
      
         | 680 |  |  |   for (si = 0; si < n; ++si)
 | 
      
         | 681 |  |  |     {
 | 
      
         | 682 |  |  |       i = stack_vars_sorted[si];
 | 
      
         | 683 |  |  |  
 | 
      
         | 684 |  |  |       /* Skip variables that aren't partition representatives, for now.  */
 | 
      
         | 685 |  |  |       if (stack_vars[i].representative != i)
 | 
      
         | 686 |  |  |         continue;
 | 
      
         | 687 |  |  |  
 | 
      
         | 688 |  |  |       fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
 | 
      
         | 689 |  |  |                " align %u\n", (unsigned long) i, stack_vars[i].size,
 | 
      
         | 690 |  |  |                stack_vars[i].alignb);
 | 
      
         | 691 |  |  |  
 | 
      
         | 692 |  |  |       for (j = i; j != EOC; j = stack_vars[j].next)
 | 
      
         | 693 |  |  |         {
 | 
      
         | 694 |  |  |           fputc ('\t', dump_file);
 | 
      
         | 695 |  |  |           print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
 | 
      
         | 696 |  |  |           fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
 | 
      
         | 697 |  |  |                    stack_vars[j].offset);
 | 
      
         | 698 |  |  |         }
 | 
      
         | 699 |  |  |     }
 | 
      
         | 700 |  |  | }
 | 
      
         | 701 |  |  |  
 | 
      
         | 702 |  |  | /* Assign rtl to DECL at frame offset OFFSET.  */
 | 
      
         | 703 |  |  |  
 | 
      
         | 704 |  |  | static void
 | 
      
         | 705 |  |  | expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
 | 
      
         | 706 |  |  | {
 | 
      
         | 707 |  |  |   /* Alignment is unsigned.   */
 | 
      
         | 708 |  |  |   unsigned HOST_WIDE_INT align;
 | 
      
         | 709 |  |  |   rtx x;
 | 
      
         | 710 |  |  |  
 | 
      
         | 711 |  |  |   /* If this fails, we've overflowed the stack frame.  Error nicely?  */
 | 
      
         | 712 |  |  |   gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
 | 
      
         | 713 |  |  |  
 | 
      
         | 714 |  |  |   x = plus_constant (virtual_stack_vars_rtx, offset);
 | 
      
         | 715 |  |  |   x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
 | 
      
         | 716 |  |  |  
 | 
      
         | 717 |  |  |   if (TREE_CODE (decl) != SSA_NAME)
 | 
      
         | 718 |  |  |     {
 | 
      
         | 719 |  |  |       /* Set alignment we actually gave this decl if it isn't an SSA name.
 | 
      
         | 720 |  |  |          If it is we generate stack slots only accidentally so it isn't as
 | 
      
         | 721 |  |  |          important, we'll simply use the alignment that is already set.  */
 | 
      
         | 722 |  |  |       offset -= frame_phase;
 | 
      
         | 723 |  |  |       align = offset & -offset;
 | 
      
         | 724 |  |  |       align *= BITS_PER_UNIT;
 | 
      
         | 725 |  |  |       if (align == 0)
 | 
      
         | 726 |  |  |         align = STACK_BOUNDARY;
 | 
      
         | 727 |  |  |       else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
 | 
      
         | 728 |  |  |         align = MAX_SUPPORTED_STACK_ALIGNMENT;
 | 
      
         | 729 |  |  |  
 | 
      
         | 730 |  |  |       DECL_ALIGN (decl) = align;
 | 
      
         | 731 |  |  |       DECL_USER_ALIGN (decl) = 0;
 | 
      
         | 732 |  |  |     }
 | 
      
         | 733 |  |  |  
 | 
      
         | 734 |  |  |   set_mem_attributes (x, SSAVAR (decl), true);
 | 
      
         | 735 |  |  |   set_rtl (decl, x);
 | 
      
         | 736 |  |  | }
 | 
      
         | 737 |  |  |  
 | 
      
         | 738 |  |  | /* A subroutine of expand_used_vars.  Give each partition representative
 | 
      
         | 739 |  |  |    a unique location within the stack frame.  Update each partition member
 | 
      
         | 740 |  |  |    with that location.  */
 | 
      
         | 741 |  |  |  
 | 
      
         | 742 |  |  | static void
 | 
      
         | 743 |  |  | expand_stack_vars (bool (*pred) (tree))
 | 
      
         | 744 |  |  | {
 | 
      
         | 745 |  |  |   size_t si, i, j, n = stack_vars_num;
 | 
      
         | 746 |  |  |  
 | 
      
         | 747 |  |  |   for (si = 0; si < n; ++si)
 | 
      
         | 748 |  |  |     {
 | 
      
         | 749 |  |  |       HOST_WIDE_INT offset;
 | 
      
         | 750 |  |  |  
 | 
      
         | 751 |  |  |       i = stack_vars_sorted[si];
 | 
      
         | 752 |  |  |  
 | 
      
         | 753 |  |  |       /* Skip variables that aren't partition representatives, for now.  */
 | 
      
         | 754 |  |  |       if (stack_vars[i].representative != i)
 | 
      
         | 755 |  |  |         continue;
 | 
      
         | 756 |  |  |  
 | 
      
         | 757 |  |  |       /* Skip variables that have already had rtl assigned.  See also
 | 
      
         | 758 |  |  |          add_stack_var where we perpetrate this pc_rtx hack.  */
 | 
      
         | 759 |  |  |       if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
 | 
      
         | 760 |  |  |            ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
 | 
      
         | 761 |  |  |            : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
 | 
      
         | 762 |  |  |         continue;
 | 
      
         | 763 |  |  |  
 | 
      
         | 764 |  |  |       /* Check the predicate to see whether this variable should be
 | 
      
         | 765 |  |  |          allocated in this pass.  */
 | 
      
         | 766 |  |  |       if (pred && !pred (stack_vars[i].decl))
 | 
      
         | 767 |  |  |         continue;
 | 
      
         | 768 |  |  |  
 | 
      
         | 769 |  |  |       offset = alloc_stack_frame_space (stack_vars[i].size,
 | 
      
         | 770 |  |  |                                         stack_vars[i].alignb);
 | 
      
         | 771 |  |  |  
 | 
      
         | 772 |  |  |       /* Create rtl for each variable based on their location within the
 | 
      
         | 773 |  |  |          partition.  */
 | 
      
         | 774 |  |  |       for (j = i; j != EOC; j = stack_vars[j].next)
 | 
      
         | 775 |  |  |         {
 | 
      
         | 776 |  |  |           gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
 | 
      
         | 777 |  |  |           expand_one_stack_var_at (stack_vars[j].decl,
 | 
      
         | 778 |  |  |                                    stack_vars[j].offset + offset);
 | 
      
         | 779 |  |  |         }
 | 
      
         | 780 |  |  |     }
 | 
      
         | 781 |  |  | }
 | 
      
         | 782 |  |  |  
 | 
      
         | 783 |  |  | /* Take into account all sizes of partitions and reset DECL_RTLs.  */
 | 
      
         | 784 |  |  | static HOST_WIDE_INT
 | 
      
         | 785 |  |  | account_stack_vars (void)
 | 
      
         | 786 |  |  | {
 | 
      
         | 787 |  |  |   size_t si, j, i, n = stack_vars_num;
 | 
      
         | 788 |  |  |   HOST_WIDE_INT size = 0;
 | 
      
         | 789 |  |  |  
 | 
      
         | 790 |  |  |   for (si = 0; si < n; ++si)
 | 
      
         | 791 |  |  |     {
 | 
      
         | 792 |  |  |       i = stack_vars_sorted[si];
 | 
      
         | 793 |  |  |  
 | 
      
         | 794 |  |  |       /* Skip variables that aren't partition representatives, for now.  */
 | 
      
         | 795 |  |  |       if (stack_vars[i].representative != i)
 | 
      
         | 796 |  |  |         continue;
 | 
      
         | 797 |  |  |  
 | 
      
         | 798 |  |  |       size += stack_vars[i].size;
 | 
      
         | 799 |  |  |       for (j = i; j != EOC; j = stack_vars[j].next)
 | 
      
         | 800 |  |  |         set_rtl (stack_vars[j].decl, NULL);
 | 
      
         | 801 |  |  |     }
 | 
      
         | 802 |  |  |   return size;
 | 
      
         | 803 |  |  | }
 | 
      
         | 804 |  |  |  
 | 
      
         | 805 |  |  | /* A subroutine of expand_one_var.  Called to immediately assign rtl
 | 
      
         | 806 |  |  |    to a variable to be allocated in the stack frame.  */
 | 
      
         | 807 |  |  |  
 | 
      
         | 808 |  |  | static void
 | 
      
         | 809 |  |  | expand_one_stack_var (tree var)
 | 
      
         | 810 |  |  | {
 | 
      
         | 811 |  |  |   HOST_WIDE_INT size, offset, align;
 | 
      
         | 812 |  |  |  
 | 
      
         | 813 |  |  |   size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
 | 
      
         | 814 |  |  |   align = get_decl_align_unit (SSAVAR (var));
 | 
      
         | 815 |  |  |   offset = alloc_stack_frame_space (size, align);
 | 
      
         | 816 |  |  |  
 | 
      
         | 817 |  |  |   expand_one_stack_var_at (var, offset);
 | 
      
         | 818 |  |  | }
 | 
      
         | 819 |  |  |  
 | 
      
         | 820 |  |  | /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
 | 
      
         | 821 |  |  |    that will reside in a hard register.  */
 | 
      
         | 822 |  |  |  
 | 
      
         | 823 |  |  | static void
 | 
      
         | 824 |  |  | expand_one_hard_reg_var (tree var)
 | 
      
         | 825 |  |  | {
 | 
      
         | 826 |  |  |   rest_of_decl_compilation (var, 0, 0);
 | 
      
         | 827 |  |  | }
 | 
      
         | 828 |  |  |  
 | 
      
         | 829 |  |  | /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
 | 
      
         | 830 |  |  |    that will reside in a pseudo register.  */
 | 
      
         | 831 |  |  |  
 | 
      
         | 832 |  |  | static void
 | 
      
         | 833 |  |  | expand_one_register_var (tree var)
 | 
      
         | 834 |  |  | {
 | 
      
         | 835 |  |  |   tree decl = SSAVAR (var);
 | 
      
         | 836 |  |  |   tree type = TREE_TYPE (decl);
 | 
      
         | 837 |  |  |   enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
 | 
      
         | 838 |  |  |   rtx x = gen_reg_rtx (reg_mode);
 | 
      
         | 839 |  |  |  
 | 
      
         | 840 |  |  |   set_rtl (var, x);
 | 
      
         | 841 |  |  |  
 | 
      
         | 842 |  |  |   /* Note if the object is a user variable.  */
 | 
      
         | 843 |  |  |   if (!DECL_ARTIFICIAL (decl))
 | 
      
         | 844 |  |  |     mark_user_reg (x);
 | 
      
         | 845 |  |  |  
 | 
      
         | 846 |  |  |   if (POINTER_TYPE_P (type))
 | 
      
         | 847 |  |  |     mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
 | 
      
         | 848 |  |  | }
 | 
      
         | 849 |  |  |  
 | 
      
         | 850 |  |  | /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
 | 
      
         | 851 |  |  |    has some associated error, e.g. its type is error-mark.  We just need
 | 
      
         | 852 |  |  |    to pick something that won't crash the rest of the compiler.  */
 | 
      
         | 853 |  |  |  
 | 
      
         | 854 |  |  | static void
 | 
      
         | 855 |  |  | expand_one_error_var (tree var)
 | 
      
         | 856 |  |  | {
 | 
      
         | 857 |  |  |   enum machine_mode mode = DECL_MODE (var);
 | 
      
         | 858 |  |  |   rtx x;
 | 
      
         | 859 |  |  |  
 | 
      
         | 860 |  |  |   if (mode == BLKmode)
 | 
      
         | 861 |  |  |     x = gen_rtx_MEM (BLKmode, const0_rtx);
 | 
      
         | 862 |  |  |   else if (mode == VOIDmode)
 | 
      
         | 863 |  |  |     x = const0_rtx;
 | 
      
         | 864 |  |  |   else
 | 
      
         | 865 |  |  |     x = gen_reg_rtx (mode);
 | 
      
         | 866 |  |  |  
 | 
      
         | 867 |  |  |   SET_DECL_RTL (var, x);
 | 
      
         | 868 |  |  | }
 | 
      
         | 869 |  |  |  
 | 
      
         | 870 |  |  | /* A subroutine of expand_one_var.  VAR is a variable that will be
 | 
      
         | 871 |  |  |    allocated to the local stack frame.  Return true if we wish to
 | 
      
         | 872 |  |  |    add VAR to STACK_VARS so that it will be coalesced with other
 | 
      
         | 873 |  |  |    variables.  Return false to allocate VAR immediately.
 | 
      
         | 874 |  |  |  
 | 
      
         | 875 |  |  |    This function is used to reduce the number of variables considered
 | 
      
         | 876 |  |  |    for coalescing, which reduces the size of the quadratic problem.  */
 | 
      
         | 877 |  |  |  
 | 
      
         | 878 |  |  | static bool
 | 
      
         | 879 |  |  | defer_stack_allocation (tree var, bool toplevel)
 | 
      
         | 880 |  |  | {
 | 
      
         | 881 |  |  |   /* If stack protection is enabled, *all* stack variables must be deferred,
 | 
      
         | 882 |  |  |      so that we can re-order the strings to the top of the frame.  */
 | 
      
         | 883 |  |  |   if (flag_stack_protect)
 | 
      
         | 884 |  |  |     return true;
 | 
      
         | 885 |  |  |  
 | 
      
         | 886 |  |  |   /* Variables in the outermost scope automatically conflict with
 | 
      
         | 887 |  |  |      every other variable.  The only reason to want to defer them
 | 
      
         | 888 |  |  |      at all is that, after sorting, we can more efficiently pack
 | 
      
         | 889 |  |  |      small variables in the stack frame.  Continue to defer at -O2.  */
 | 
      
         | 890 |  |  |   if (toplevel && optimize < 2)
 | 
      
         | 891 |  |  |     return false;
 | 
      
         | 892 |  |  |  
 | 
      
         | 893 |  |  |   /* Without optimization, *most* variables are allocated from the
 | 
      
         | 894 |  |  |      stack, which makes the quadratic problem large exactly when we
 | 
      
         | 895 |  |  |      want compilation to proceed as quickly as possible.  On the
 | 
      
         | 896 |  |  |      other hand, we don't want the function's stack frame size to
 | 
      
         | 897 |  |  |      get completely out of hand.  So we avoid adding scalars and
 | 
      
         | 898 |  |  |      "small" aggregates to the list at all.  */
 | 
      
         | 899 |  |  |   if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
 | 
      
         | 900 |  |  |     return false;
 | 
      
         | 901 |  |  |  
 | 
      
         | 902 |  |  |   return true;
 | 
      
         | 903 |  |  | }
 | 
      
         | 904 |  |  |  
 | 
      
         | 905 |  |  | /* A subroutine of expand_used_vars.  Expand one variable according to
 | 
      
         | 906 |  |  |    its flavor.  Variables to be placed on the stack are not actually
 | 
      
         | 907 |  |  |    expanded yet, merely recorded.
 | 
      
         | 908 |  |  |    When REALLY_EXPAND is false, only add stack values to be allocated.
 | 
      
         | 909 |  |  |    Return stack usage this variable is supposed to take.
 | 
      
         | 910 |  |  | */
 | 
      
         | 911 |  |  |  
 | 
      
         | 912 |  |  | static HOST_WIDE_INT
 | 
      
         | 913 |  |  | expand_one_var (tree var, bool toplevel, bool really_expand)
 | 
      
         | 914 |  |  | {
 | 
      
         | 915 |  |  |   tree origvar = var;
 | 
      
         | 916 |  |  |   var = SSAVAR (var);
 | 
      
         | 917 |  |  |  
 | 
      
         | 918 |  |  |   if (SUPPORTS_STACK_ALIGNMENT
 | 
      
         | 919 |  |  |       && TREE_TYPE (var) != error_mark_node
 | 
      
         | 920 |  |  |       && TREE_CODE (var) == VAR_DECL)
 | 
      
         | 921 |  |  |     {
 | 
      
         | 922 |  |  |       unsigned int align;
 | 
      
         | 923 |  |  |  
 | 
      
         | 924 |  |  |       /* Because we don't know if VAR will be in register or on stack,
 | 
      
         | 925 |  |  |          we conservatively assume it will be on stack even if VAR is
 | 
      
         | 926 |  |  |          eventually put into register after RA pass.  For non-automatic
 | 
      
         | 927 |  |  |          variables, which won't be on stack, we collect alignment of
 | 
      
         | 928 |  |  |          type and ignore user specified alignment.  */
 | 
      
         | 929 |  |  |       if (TREE_STATIC (var) || DECL_EXTERNAL (var))
 | 
      
         | 930 |  |  |         align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
 | 
      
         | 931 |  |  |                                    TYPE_MODE (TREE_TYPE (var)),
 | 
      
         | 932 |  |  |                                    TYPE_ALIGN (TREE_TYPE (var)));
 | 
      
         | 933 |  |  |       else
 | 
      
         | 934 |  |  |         align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
 | 
      
         | 935 |  |  |  
 | 
      
         | 936 |  |  |       if (crtl->stack_alignment_estimated < align)
 | 
      
         | 937 |  |  |         {
 | 
      
         | 938 |  |  |           /* stack_alignment_estimated shouldn't change after stack
 | 
      
         | 939 |  |  |              realign decision made */
 | 
      
         | 940 |  |  |           gcc_assert(!crtl->stack_realign_processed);
 | 
      
         | 941 |  |  |           crtl->stack_alignment_estimated = align;
 | 
      
         | 942 |  |  |         }
 | 
      
         | 943 |  |  |     }
 | 
      
         | 944 |  |  |  
 | 
      
         | 945 |  |  |   if (TREE_CODE (origvar) == SSA_NAME)
 | 
      
         | 946 |  |  |     {
 | 
      
         | 947 |  |  |       gcc_assert (TREE_CODE (var) != VAR_DECL
 | 
      
         | 948 |  |  |                   || (!DECL_EXTERNAL (var)
 | 
      
         | 949 |  |  |                       && !DECL_HAS_VALUE_EXPR_P (var)
 | 
      
         | 950 |  |  |                       && !TREE_STATIC (var)
 | 
      
         | 951 |  |  |                       && TREE_TYPE (var) != error_mark_node
 | 
      
         | 952 |  |  |                       && !DECL_HARD_REGISTER (var)
 | 
      
         | 953 |  |  |                       && really_expand));
 | 
      
         | 954 |  |  |     }
 | 
      
         | 955 |  |  |   if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
 | 
      
         | 956 |  |  |     ;
 | 
      
         | 957 |  |  |   else if (DECL_EXTERNAL (var))
 | 
      
         | 958 |  |  |     ;
 | 
      
         | 959 |  |  |   else if (DECL_HAS_VALUE_EXPR_P (var))
 | 
      
         | 960 |  |  |     ;
 | 
      
         | 961 |  |  |   else if (TREE_STATIC (var))
 | 
      
         | 962 |  |  |     ;
 | 
      
         | 963 |  |  |   else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
 | 
      
         | 964 |  |  |     ;
 | 
      
         | 965 |  |  |   else if (TREE_TYPE (var) == error_mark_node)
 | 
      
         | 966 |  |  |     {
 | 
      
         | 967 |  |  |       if (really_expand)
 | 
      
         | 968 |  |  |         expand_one_error_var (var);
 | 
      
         | 969 |  |  |     }
 | 
      
         | 970 |  |  |   else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
 | 
      
         | 971 |  |  |     {
 | 
      
         | 972 |  |  |       if (really_expand)
 | 
      
         | 973 |  |  |         expand_one_hard_reg_var (var);
 | 
      
         | 974 |  |  |     }
 | 
      
         | 975 |  |  |   else if (use_register_for_decl (var))
 | 
      
         | 976 |  |  |     {
 | 
      
         | 977 |  |  |       if (really_expand)
 | 
      
         | 978 |  |  |         expand_one_register_var (origvar);
 | 
      
         | 979 |  |  |     }
 | 
      
         | 980 |  |  |   else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
 | 
      
         | 981 |  |  |     {
 | 
      
         | 982 |  |  |       if (really_expand)
 | 
      
         | 983 |  |  |         {
 | 
      
         | 984 |  |  |           error ("size of variable %q+D is too large", var);
 | 
      
         | 985 |  |  |           expand_one_error_var (var);
 | 
      
         | 986 |  |  |         }
 | 
      
         | 987 |  |  |     }
 | 
      
         | 988 |  |  |   else if (defer_stack_allocation (var, toplevel))
 | 
      
         | 989 |  |  |     add_stack_var (origvar);
 | 
      
         | 990 |  |  |   else
 | 
      
         | 991 |  |  |     {
 | 
      
         | 992 |  |  |       if (really_expand)
 | 
      
         | 993 |  |  |         expand_one_stack_var (origvar);
 | 
      
         | 994 |  |  |       return tree_low_cst (DECL_SIZE_UNIT (var), 1);
 | 
      
         | 995 |  |  |     }
 | 
      
         | 996 |  |  |   return 0;
 | 
      
         | 997 |  |  | }
 | 
      
         | 998 |  |  |  
 | 
      
         | 999 |  |  | /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
 | 
      
         | 1000 |  |  |    expanding variables.  Those variables that can be put into registers
 | 
      
         | 1001 |  |  |    are allocated pseudos; those that can't are put on the stack.
 | 
      
         | 1002 |  |  |  
 | 
      
         | 1003 |  |  |    TOPLEVEL is true if this is the outermost BLOCK.  */
 | 
      
         | 1004 |  |  |  
 | 
      
         | 1005 |  |  | static void
 | 
      
         | 1006 |  |  | expand_used_vars_for_block (tree block, bool toplevel)
 | 
      
         | 1007 |  |  | {
 | 
      
         | 1008 |  |  |   size_t i, j, old_sv_num, this_sv_num, new_sv_num;
 | 
      
         | 1009 |  |  |   tree t;
 | 
      
         | 1010 |  |  |  
 | 
      
         | 1011 |  |  |   old_sv_num = toplevel ? 0 : stack_vars_num;
 | 
      
         | 1012 |  |  |  
 | 
      
         | 1013 |  |  |   /* Expand all variables at this level.  */
 | 
      
         | 1014 |  |  |   for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
 | 
      
         | 1015 |  |  |     if (TREE_USED (t))
 | 
      
         | 1016 |  |  |       expand_one_var (t, toplevel, true);
 | 
      
         | 1017 |  |  |  
 | 
      
         | 1018 |  |  |   this_sv_num = stack_vars_num;
 | 
      
         | 1019 |  |  |  
 | 
      
         | 1020 |  |  |   /* Expand all variables at containing levels.  */
 | 
      
         | 1021 |  |  |   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
 | 
      
         | 1022 |  |  |     expand_used_vars_for_block (t, false);
 | 
      
         | 1023 |  |  |  
 | 
      
         | 1024 |  |  |   /* Since we do not track exact variable lifetimes (which is not even
 | 
      
         | 1025 |  |  |      possible for variables whose address escapes), we mirror the block
 | 
      
         | 1026 |  |  |      tree in the interference graph.  Here we cause all variables at this
 | 
      
         | 1027 |  |  |      level, and all sublevels, to conflict.  */
 | 
      
         | 1028 |  |  |   if (old_sv_num < this_sv_num)
 | 
      
         | 1029 |  |  |     {
 | 
      
         | 1030 |  |  |       new_sv_num = stack_vars_num;
 | 
      
         | 1031 |  |  |  
 | 
      
         | 1032 |  |  |       for (i = old_sv_num; i < new_sv_num; ++i)
 | 
      
         | 1033 |  |  |         for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
 | 
      
         | 1034 |  |  |           add_stack_var_conflict (i, j);
 | 
      
         | 1035 |  |  |     }
 | 
      
         | 1036 |  |  | }
 | 
      
         | 1037 |  |  |  
 | 
      
         | 1038 |  |  | /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
 | 
      
         | 1039 |  |  |    and clear TREE_USED on all local variables.  */
 | 
      
         | 1040 |  |  |  
 | 
      
         | 1041 |  |  | static void
 | 
      
         | 1042 |  |  | clear_tree_used (tree block)
 | 
      
         | 1043 |  |  | {
 | 
      
         | 1044 |  |  |   tree t;
 | 
      
         | 1045 |  |  |  
 | 
      
         | 1046 |  |  |   for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
 | 
      
         | 1047 |  |  |     /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
 | 
      
         | 1048 |  |  |       TREE_USED (t) = 0;
 | 
      
         | 1049 |  |  |  
 | 
      
         | 1050 |  |  |   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
 | 
      
         | 1051 |  |  |     clear_tree_used (t);
 | 
      
         | 1052 |  |  | }
 | 
      
         | 1053 |  |  |  
 | 
      
         | 1054 |  |  | /* Examine TYPE and determine a bit mask of the following features.  */
 | 
      
         | 1055 |  |  |  
 | 
      
         | 1056 |  |  | #define SPCT_HAS_LARGE_CHAR_ARRAY       1
 | 
      
         | 1057 |  |  | #define SPCT_HAS_SMALL_CHAR_ARRAY       2
 | 
      
         | 1058 |  |  | #define SPCT_HAS_ARRAY                  4
 | 
      
         | 1059 |  |  | #define SPCT_HAS_AGGREGATE              8
 | 
      
         | 1060 |  |  |  
 | 
      
         | 1061 |  |  | static unsigned int
 | 
      
         | 1062 |  |  | stack_protect_classify_type (tree type)
 | 
      
         | 1063 |  |  | {
 | 
      
         | 1064 |  |  |   unsigned int ret = 0;
 | 
      
         | 1065 |  |  |   tree t;
 | 
      
         | 1066 |  |  |  
 | 
      
         | 1067 |  |  |   switch (TREE_CODE (type))
 | 
      
         | 1068 |  |  |     {
 | 
      
         | 1069 |  |  |     case ARRAY_TYPE:
 | 
      
         | 1070 |  |  |       t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
 | 
      
         | 1071 |  |  |       if (t == char_type_node
 | 
      
         | 1072 |  |  |           || t == signed_char_type_node
 | 
      
         | 1073 |  |  |           || t == unsigned_char_type_node)
 | 
      
         | 1074 |  |  |         {
 | 
      
         | 1075 |  |  |           unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
 | 
      
         | 1076 |  |  |           unsigned HOST_WIDE_INT len;
 | 
      
         | 1077 |  |  |  
 | 
      
         | 1078 |  |  |           if (!TYPE_SIZE_UNIT (type)
 | 
      
         | 1079 |  |  |               || !host_integerp (TYPE_SIZE_UNIT (type), 1))
 | 
      
         | 1080 |  |  |             len = max;
 | 
      
         | 1081 |  |  |           else
 | 
      
         | 1082 |  |  |             len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
 | 
      
         | 1083 |  |  |  
 | 
      
         | 1084 |  |  |           if (len < max)
 | 
      
         | 1085 |  |  |             ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
 | 
      
         | 1086 |  |  |           else
 | 
      
         | 1087 |  |  |             ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
 | 
      
         | 1088 |  |  |         }
 | 
      
         | 1089 |  |  |       else
 | 
      
         | 1090 |  |  |         ret = SPCT_HAS_ARRAY;
 | 
      
         | 1091 |  |  |       break;
 | 
      
         | 1092 |  |  |  
 | 
      
         | 1093 |  |  |     case UNION_TYPE:
 | 
      
         | 1094 |  |  |     case QUAL_UNION_TYPE:
 | 
      
         | 1095 |  |  |     case RECORD_TYPE:
 | 
      
         | 1096 |  |  |       ret = SPCT_HAS_AGGREGATE;
 | 
      
         | 1097 |  |  |       for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
 | 
      
         | 1098 |  |  |         if (TREE_CODE (t) == FIELD_DECL)
 | 
      
         | 1099 |  |  |           ret |= stack_protect_classify_type (TREE_TYPE (t));
 | 
      
         | 1100 |  |  |       break;
 | 
      
         | 1101 |  |  |  
 | 
      
         | 1102 |  |  |     default:
 | 
      
         | 1103 |  |  |       break;
 | 
      
         | 1104 |  |  |     }
 | 
      
         | 1105 |  |  |  
 | 
      
         | 1106 |  |  |   return ret;
 | 
      
         | 1107 |  |  | }
 | 
      
         | 1108 |  |  |  
 | 
      
         | 1109 |  |  | /* Return nonzero if DECL should be segregated into the "vulnerable" upper
 | 
      
         | 1110 |  |  |    part of the local stack frame.  Remember if we ever return nonzero for
 | 
      
         | 1111 |  |  |    any variable in this function.  The return value is the phase number in
 | 
      
         | 1112 |  |  |    which the variable should be allocated.  */
 | 
      
         | 1113 |  |  |  
 | 
      
         | 1114 |  |  | static int
 | 
      
         | 1115 |  |  | stack_protect_decl_phase (tree decl)
 | 
      
         | 1116 |  |  | {
 | 
      
         | 1117 |  |  |   unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
 | 
      
         | 1118 |  |  |   int ret = 0;
 | 
      
         | 1119 |  |  |  
 | 
      
         | 1120 |  |  |   if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
 | 
      
         | 1121 |  |  |     has_short_buffer = true;
 | 
      
         | 1122 |  |  |  
 | 
      
         | 1123 |  |  |   if (flag_stack_protect == 2)
 | 
      
         | 1124 |  |  |     {
 | 
      
         | 1125 |  |  |       if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
 | 
      
         | 1126 |  |  |           && !(bits & SPCT_HAS_AGGREGATE))
 | 
      
         | 1127 |  |  |         ret = 1;
 | 
      
         | 1128 |  |  |       else if (bits & SPCT_HAS_ARRAY)
 | 
      
         | 1129 |  |  |         ret = 2;
 | 
      
         | 1130 |  |  |     }
 | 
      
         | 1131 |  |  |   else
 | 
      
         | 1132 |  |  |     ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
 | 
      
         | 1133 |  |  |  
 | 
      
         | 1134 |  |  |   if (ret)
 | 
      
         | 1135 |  |  |     has_protected_decls = true;
 | 
      
         | 1136 |  |  |  
 | 
      
         | 1137 |  |  |   return ret;
 | 
      
         | 1138 |  |  | }
 | 
      
         | 1139 |  |  |  
 | 
      
         | 1140 |  |  | /* Two helper routines that check for phase 1 and phase 2.  These are used
 | 
      
         | 1141 |  |  |    as callbacks for expand_stack_vars.  */
 | 
      
         | 1142 |  |  |  
 | 
      
         | 1143 |  |  | static bool
 | 
      
         | 1144 |  |  | stack_protect_decl_phase_1 (tree decl)
 | 
      
         | 1145 |  |  | {
 | 
      
         | 1146 |  |  |   return stack_protect_decl_phase (decl) == 1;
 | 
      
         | 1147 |  |  | }
 | 
      
         | 1148 |  |  |  
 | 
      
         | 1149 |  |  | static bool
 | 
      
         | 1150 |  |  | stack_protect_decl_phase_2 (tree decl)
 | 
      
         | 1151 |  |  | {
 | 
      
         | 1152 |  |  |   return stack_protect_decl_phase (decl) == 2;
 | 
      
         | 1153 |  |  | }
 | 
      
         | 1154 |  |  |  
 | 
      
         | 1155 |  |  | /* Ensure that variables in different stack protection phases conflict
 | 
      
         | 1156 |  |  |    so that they are not merged and share the same stack slot.  */
 | 
      
         | 1157 |  |  |  
 | 
      
         | 1158 |  |  | static void
 | 
      
         | 1159 |  |  | add_stack_protection_conflicts (void)
 | 
      
         | 1160 |  |  | {
 | 
      
         | 1161 |  |  |   size_t i, j, n = stack_vars_num;
 | 
      
         | 1162 |  |  |   unsigned char *phase;
 | 
      
         | 1163 |  |  |  
 | 
      
         | 1164 |  |  |   phase = XNEWVEC (unsigned char, n);
 | 
      
         | 1165 |  |  |   for (i = 0; i < n; ++i)
 | 
      
         | 1166 |  |  |     phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
 | 
      
         | 1167 |  |  |  
 | 
      
         | 1168 |  |  |   for (i = 0; i < n; ++i)
 | 
      
         | 1169 |  |  |     {
 | 
      
         | 1170 |  |  |       unsigned char ph_i = phase[i];
 | 
      
         | 1171 |  |  |       for (j = 0; j < i; ++j)
 | 
      
         | 1172 |  |  |         if (ph_i != phase[j])
 | 
      
         | 1173 |  |  |           add_stack_var_conflict (i, j);
 | 
      
         | 1174 |  |  |     }
 | 
      
         | 1175 |  |  |  
 | 
      
         | 1176 |  |  |   XDELETEVEC (phase);
 | 
      
         | 1177 |  |  | }
 | 
      
         | 1178 |  |  |  
 | 
      
         | 1179 |  |  | /* Create a decl for the guard at the top of the stack frame.  */
 | 
      
         | 1180 |  |  |  
 | 
      
         | 1181 |  |  | static void
 | 
      
         | 1182 |  |  | create_stack_guard (void)
 | 
      
         | 1183 |  |  | {
 | 
      
         | 1184 |  |  |   tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
 | 
      
         | 1185 |  |  |                            VAR_DECL, NULL, ptr_type_node);
 | 
      
         | 1186 |  |  |   TREE_THIS_VOLATILE (guard) = 1;
 | 
      
         | 1187 |  |  |   TREE_USED (guard) = 1;
 | 
      
         | 1188 |  |  |   expand_one_stack_var (guard);
 | 
      
         | 1189 |  |  |   crtl->stack_protect_guard = guard;
 | 
      
         | 1190 |  |  | }
 | 
      
         | 1191 |  |  |  
 | 
      
         | 1192 |  |  | /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
 | 
      
         | 1193 |  |  |    expanding variables.  Those variables that can be put into registers
 | 
      
         | 1194 |  |  |    are allocated pseudos; those that can't are put on the stack.
 | 
      
         | 1195 |  |  |  
 | 
      
         | 1196 |  |  |    TOPLEVEL is true if this is the outermost BLOCK.  */
 | 
      
         | 1197 |  |  |  
 | 
      
         | 1198 |  |  | static HOST_WIDE_INT
 | 
      
         | 1199 |  |  | account_used_vars_for_block (tree block, bool toplevel)
 | 
      
         | 1200 |  |  | {
 | 
      
         | 1201 |  |  |   tree t;
 | 
      
         | 1202 |  |  |   HOST_WIDE_INT size = 0;
 | 
      
         | 1203 |  |  |  
 | 
      
         | 1204 |  |  |   /* Expand all variables at this level.  */
 | 
      
         | 1205 |  |  |   for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
 | 
      
         | 1206 |  |  |     if (TREE_USED (t))
 | 
      
         | 1207 |  |  |       size += expand_one_var (t, toplevel, false);
 | 
      
         | 1208 |  |  |  
 | 
      
         | 1209 |  |  |   /* Expand all variables at containing levels.  */
 | 
      
         | 1210 |  |  |   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
 | 
      
         | 1211 |  |  |     size += account_used_vars_for_block (t, false);
 | 
      
         | 1212 |  |  |  
 | 
      
         | 1213 |  |  |   return size;
 | 
      
         | 1214 |  |  | }
 | 
      
         | 1215 |  |  |  
 | 
      
         | 1216 |  |  | /* Prepare for expanding variables.  */
 | 
      
         | 1217 |  |  | static void
 | 
      
         | 1218 |  |  | init_vars_expansion (void)
 | 
      
         | 1219 |  |  | {
 | 
      
         | 1220 |  |  |   tree t;
 | 
      
         | 1221 |  |  |   /* Set TREE_USED on all variables in the local_decls.  */
 | 
      
         | 1222 |  |  |   for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
 | 
      
         | 1223 |  |  |     TREE_USED (TREE_VALUE (t)) = 1;
 | 
      
         | 1224 |  |  |  
 | 
      
         | 1225 |  |  |   /* Clear TREE_USED on all variables associated with a block scope.  */
 | 
      
         | 1226 |  |  |   clear_tree_used (DECL_INITIAL (current_function_decl));
 | 
      
         | 1227 |  |  |  
 | 
      
         | 1228 |  |  |   /* Initialize local stack smashing state.  */
 | 
      
         | 1229 |  |  |   has_protected_decls = false;
 | 
      
         | 1230 |  |  |   has_short_buffer = false;
 | 
      
         | 1231 |  |  | }
 | 
      
         | 1232 |  |  |  
 | 
      
         | 1233 |  |  | /* Free up stack variable graph data.  */
 | 
      
         | 1234 |  |  | static void
 | 
      
         | 1235 |  |  | fini_vars_expansion (void)
 | 
      
         | 1236 |  |  | {
 | 
      
         | 1237 |  |  |   size_t i, n = stack_vars_num;
 | 
      
         | 1238 |  |  |   for (i = 0; i < n; i++)
 | 
      
         | 1239 |  |  |     BITMAP_FREE (stack_vars[i].conflicts);
 | 
      
         | 1240 |  |  |   XDELETEVEC (stack_vars);
 | 
      
         | 1241 |  |  |   XDELETEVEC (stack_vars_sorted);
 | 
      
         | 1242 |  |  |   stack_vars = NULL;
 | 
      
         | 1243 |  |  |   stack_vars_alloc = stack_vars_num = 0;
 | 
      
         | 1244 |  |  | }
 | 
      
         | 1245 |  |  |  
 | 
      
         | 1246 |  |  | /* Make a fair guess for the size of the stack frame of the current
 | 
      
         | 1247 |  |  |    function.  This doesn't have to be exact, the result is only used
 | 
      
         | 1248 |  |  |    in the inline heuristics.  So we don't want to run the full stack
 | 
      
         | 1249 |  |  |    var packing algorithm (which is quadratic in the number of stack
 | 
      
         | 1250 |  |  |    vars).  Instead, we calculate the total size of all stack vars.
 | 
      
         | 1251 |  |  |    This turns out to be a pretty fair estimate -- packing of stack
 | 
      
         | 1252 |  |  |    vars doesn't happen very often.  */
 | 
      
         | 1253 |  |  |  
 | 
      
         | 1254 |  |  | HOST_WIDE_INT
 | 
      
         | 1255 |  |  | estimated_stack_frame_size (void)
 | 
      
         | 1256 |  |  | {
 | 
      
         | 1257 |  |  |   HOST_WIDE_INT size = 0;
 | 
      
         | 1258 |  |  |   size_t i;
 | 
      
         | 1259 |  |  |   tree t, outer_block = DECL_INITIAL (current_function_decl);
 | 
      
         | 1260 |  |  |  
 | 
      
         | 1261 |  |  |   init_vars_expansion ();
 | 
      
         | 1262 |  |  |  
 | 
      
         | 1263 |  |  |   for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
 | 
      
         | 1264 |  |  |     {
 | 
      
         | 1265 |  |  |       tree var = TREE_VALUE (t);
 | 
      
         | 1266 |  |  |  
 | 
      
         | 1267 |  |  |       if (TREE_USED (var))
 | 
      
         | 1268 |  |  |         size += expand_one_var (var, true, false);
 | 
      
         | 1269 |  |  |       TREE_USED (var) = 1;
 | 
      
         | 1270 |  |  |     }
 | 
      
         | 1271 |  |  |   size += account_used_vars_for_block (outer_block, true);
 | 
      
         | 1272 |  |  |  
 | 
      
         | 1273 |  |  |   if (stack_vars_num > 0)
 | 
      
         | 1274 |  |  |     {
 | 
      
         | 1275 |  |  |       /* Fake sorting the stack vars for account_stack_vars ().  */
 | 
      
         | 1276 |  |  |       stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
 | 
      
         | 1277 |  |  |       for (i = 0; i < stack_vars_num; ++i)
 | 
      
         | 1278 |  |  |         stack_vars_sorted[i] = i;
 | 
      
         | 1279 |  |  |       size += account_stack_vars ();
 | 
      
         | 1280 |  |  |       fini_vars_expansion ();
 | 
      
         | 1281 |  |  |     }
 | 
      
         | 1282 |  |  |  
 | 
      
         | 1283 |  |  |   return size;
 | 
      
         | 1284 |  |  | }
 | 
      
         | 1285 |  |  |  
 | 
      
         | 1286 |  |  | /* Expand all variables used in the function.  */
 | 
      
         | 1287 |  |  |  
 | 
      
         | 1288 |  |  | static void
 | 
      
         | 1289 |  |  | expand_used_vars (void)
 | 
      
         | 1290 |  |  | {
 | 
      
         | 1291 |  |  |   tree t, next, outer_block = DECL_INITIAL (current_function_decl);
 | 
      
         | 1292 |  |  |   tree maybe_local_decls = NULL_TREE;
 | 
      
         | 1293 |  |  |   unsigned i;
 | 
      
         | 1294 |  |  |  
 | 
      
         | 1295 |  |  |   /* Compute the phase of the stack frame for this function.  */
 | 
      
         | 1296 |  |  |   {
 | 
      
         | 1297 |  |  |     int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
 | 
      
         | 1298 |  |  |     int off = STARTING_FRAME_OFFSET % align;
 | 
      
         | 1299 |  |  |     frame_phase = off ? align - off : 0;
 | 
      
         | 1300 |  |  |   }
 | 
      
         | 1301 |  |  |  
 | 
      
         | 1302 |  |  |   init_vars_expansion ();
 | 
      
         | 1303 |  |  |  
 | 
      
         | 1304 |  |  |   for (i = 0; i < SA.map->num_partitions; i++)
 | 
      
         | 1305 |  |  |     {
 | 
      
         | 1306 |  |  |       tree var = partition_to_var (SA.map, i);
 | 
      
         | 1307 |  |  |  
 | 
      
         | 1308 |  |  |       gcc_assert (is_gimple_reg (var));
 | 
      
         | 1309 |  |  |       if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
 | 
      
         | 1310 |  |  |         expand_one_var (var, true, true);
 | 
      
         | 1311 |  |  |       else
 | 
      
         | 1312 |  |  |         {
 | 
      
         | 1313 |  |  |           /* This is a PARM_DECL or RESULT_DECL.  For those partitions that
 | 
      
         | 1314 |  |  |              contain the default def (representing the parm or result itself)
 | 
      
         | 1315 |  |  |              we don't do anything here.  But those which don't contain the
 | 
      
         | 1316 |  |  |              default def (representing a temporary based on the parm/result)
 | 
      
         | 1317 |  |  |              we need to allocate space just like for normal VAR_DECLs.  */
 | 
      
         | 1318 |  |  |           if (!bitmap_bit_p (SA.partition_has_default_def, i))
 | 
      
         | 1319 |  |  |             {
 | 
      
         | 1320 |  |  |               expand_one_var (var, true, true);
 | 
      
         | 1321 |  |  |               gcc_assert (SA.partition_to_pseudo[i]);
 | 
      
         | 1322 |  |  |             }
 | 
      
         | 1323 |  |  |         }
 | 
      
         | 1324 |  |  |     }
 | 
      
         | 1325 |  |  |  
 | 
      
         | 1326 |  |  |   /* At this point all variables on the local_decls with TREE_USED
 | 
      
         | 1327 |  |  |      set are not associated with any block scope.  Lay them out.  */
 | 
      
         | 1328 |  |  |   t = cfun->local_decls;
 | 
      
         | 1329 |  |  |   cfun->local_decls = NULL_TREE;
 | 
      
         | 1330 |  |  |   for (; t; t = next)
 | 
      
         | 1331 |  |  |     {
 | 
      
         | 1332 |  |  |       tree var = TREE_VALUE (t);
 | 
      
         | 1333 |  |  |       bool expand_now = false;
 | 
      
         | 1334 |  |  |  
 | 
      
         | 1335 |  |  |       next = TREE_CHAIN (t);
 | 
      
         | 1336 |  |  |  
 | 
      
         | 1337 |  |  |       /* Expanded above already.  */
 | 
      
         | 1338 |  |  |       if (is_gimple_reg (var))
 | 
      
         | 1339 |  |  |         {
 | 
      
         | 1340 |  |  |           TREE_USED (var) = 0;
 | 
      
         | 1341 |  |  |           goto next;
 | 
      
         | 1342 |  |  |         }
 | 
      
         | 1343 |  |  |       /* We didn't set a block for static or extern because it's hard
 | 
      
         | 1344 |  |  |          to tell the difference between a global variable (re)declared
 | 
      
         | 1345 |  |  |          in a local scope, and one that's really declared there to
 | 
      
         | 1346 |  |  |          begin with.  And it doesn't really matter much, since we're
 | 
      
         | 1347 |  |  |          not giving them stack space.  Expand them now.  */
 | 
      
         | 1348 |  |  |       else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
 | 
      
         | 1349 |  |  |         expand_now = true;
 | 
      
         | 1350 |  |  |  
 | 
      
         | 1351 |  |  |       /* If the variable is not associated with any block, then it
 | 
      
         | 1352 |  |  |          was created by the optimizers, and could be live anywhere
 | 
      
         | 1353 |  |  |          in the function.  */
 | 
      
         | 1354 |  |  |       else if (TREE_USED (var))
 | 
      
         | 1355 |  |  |         expand_now = true;
 | 
      
         | 1356 |  |  |  
 | 
      
         | 1357 |  |  |       /* Finally, mark all variables on the list as used.  We'll use
 | 
      
         | 1358 |  |  |          this in a moment when we expand those associated with scopes.  */
 | 
      
         | 1359 |  |  |       TREE_USED (var) = 1;
 | 
      
         | 1360 |  |  |  
 | 
      
         | 1361 |  |  |       if (expand_now)
 | 
      
         | 1362 |  |  |         expand_one_var (var, true, true);
 | 
      
         | 1363 |  |  |  
 | 
      
         | 1364 |  |  |     next:
 | 
      
         | 1365 |  |  |       if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
 | 
      
         | 1366 |  |  |         {
 | 
      
         | 1367 |  |  |           rtx rtl = DECL_RTL_IF_SET (var);
 | 
      
         | 1368 |  |  |  
 | 
      
         | 1369 |  |  |           /* Keep artificial non-ignored vars in cfun->local_decls
 | 
      
         | 1370 |  |  |              chain until instantiate_decls.  */
 | 
      
         | 1371 |  |  |           if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
 | 
      
         | 1372 |  |  |             {
 | 
      
         | 1373 |  |  |               TREE_CHAIN (t) = cfun->local_decls;
 | 
      
         | 1374 |  |  |               cfun->local_decls = t;
 | 
      
         | 1375 |  |  |               continue;
 | 
      
         | 1376 |  |  |             }
 | 
      
         | 1377 |  |  |           else if (rtl == NULL_RTX)
 | 
      
         | 1378 |  |  |             {
 | 
      
         | 1379 |  |  |               /* If rtl isn't set yet, which can happen e.g. with
 | 
      
         | 1380 |  |  |                  -fstack-protector, retry before returning from this
 | 
      
         | 1381 |  |  |                  function.  */
 | 
      
         | 1382 |  |  |               TREE_CHAIN (t) = maybe_local_decls;
 | 
      
         | 1383 |  |  |               maybe_local_decls = t;
 | 
      
         | 1384 |  |  |               continue;
 | 
      
         | 1385 |  |  |             }
 | 
      
         | 1386 |  |  |         }
 | 
      
         | 1387 |  |  |  
 | 
      
         | 1388 |  |  |       ggc_free (t);
 | 
      
         | 1389 |  |  |     }
 | 
      
         | 1390 |  |  |  
 | 
      
         | 1391 |  |  |   /* At this point, all variables within the block tree with TREE_USED
 | 
      
         | 1392 |  |  |      set are actually used by the optimized function.  Lay them out.  */
 | 
      
         | 1393 |  |  |   expand_used_vars_for_block (outer_block, true);
 | 
      
         | 1394 |  |  |  
 | 
      
         | 1395 |  |  |   if (stack_vars_num > 0)
 | 
      
         | 1396 |  |  |     {
 | 
      
         | 1397 |  |  |       /* Due to the way alias sets work, no variables with non-conflicting
 | 
      
         | 1398 |  |  |          alias sets may be assigned the same address.  Add conflicts to
 | 
      
         | 1399 |  |  |          reflect this.  */
 | 
      
         | 1400 |  |  |       add_alias_set_conflicts ();
 | 
      
         | 1401 |  |  |  
 | 
      
         | 1402 |  |  |       /* If stack protection is enabled, we don't share space between
 | 
      
         | 1403 |  |  |          vulnerable data and non-vulnerable data.  */
 | 
      
         | 1404 |  |  |       if (flag_stack_protect)
 | 
      
         | 1405 |  |  |         add_stack_protection_conflicts ();
 | 
      
         | 1406 |  |  |  
 | 
      
         | 1407 |  |  |       /* Now that we have collected all stack variables, and have computed a
 | 
      
         | 1408 |  |  |          minimal interference graph, attempt to save some stack space.  */
 | 
      
         | 1409 |  |  |       partition_stack_vars ();
 | 
      
         | 1410 |  |  |       if (dump_file)
 | 
      
         | 1411 |  |  |         dump_stack_var_partition ();
 | 
      
         | 1412 |  |  |     }
 | 
      
         | 1413 |  |  |  
 | 
      
         | 1414 |  |  |   /* There are several conditions under which we should create a
 | 
      
         | 1415 |  |  |      stack guard: protect-all, alloca used, protected decls present.  */
 | 
      
         | 1416 |  |  |   if (flag_stack_protect == 2
 | 
      
         | 1417 |  |  |       || (flag_stack_protect
 | 
      
         | 1418 |  |  |           && (cfun->calls_alloca || has_protected_decls)))
 | 
      
         | 1419 |  |  |     create_stack_guard ();
 | 
      
         | 1420 |  |  |  
 | 
      
         | 1421 |  |  |   /* Assign rtl to each variable based on these partitions.  */
 | 
      
         | 1422 |  |  |   if (stack_vars_num > 0)
 | 
      
         | 1423 |  |  |     {
 | 
      
         | 1424 |  |  |       /* Reorder decls to be protected by iterating over the variables
 | 
      
         | 1425 |  |  |          array multiple times, and allocating out of each phase in turn.  */
 | 
      
         | 1426 |  |  |       /* ??? We could probably integrate this into the qsort we did
 | 
      
         | 1427 |  |  |          earlier, such that we naturally see these variables first,
 | 
      
         | 1428 |  |  |          and thus naturally allocate things in the right order.  */
 | 
      
         | 1429 |  |  |       if (has_protected_decls)
 | 
      
         | 1430 |  |  |         {
 | 
      
         | 1431 |  |  |           /* Phase 1 contains only character arrays.  */
 | 
      
         | 1432 |  |  |           expand_stack_vars (stack_protect_decl_phase_1);
 | 
      
         | 1433 |  |  |  
 | 
      
         | 1434 |  |  |           /* Phase 2 contains other kinds of arrays.  */
 | 
      
         | 1435 |  |  |           if (flag_stack_protect == 2)
 | 
      
         | 1436 |  |  |             expand_stack_vars (stack_protect_decl_phase_2);
 | 
      
         | 1437 |  |  |         }
 | 
      
         | 1438 |  |  |  
 | 
      
         | 1439 |  |  |       expand_stack_vars (NULL);
 | 
      
         | 1440 |  |  |  
 | 
      
         | 1441 |  |  |       fini_vars_expansion ();
 | 
      
         | 1442 |  |  |     }
 | 
      
         | 1443 |  |  |  
 | 
      
         | 1444 |  |  |   /* If there were any artificial non-ignored vars without rtl
 | 
      
         | 1445 |  |  |      found earlier, see if deferred stack allocation hasn't assigned
 | 
      
         | 1446 |  |  |      rtl to them.  */
 | 
      
         | 1447 |  |  |   for (t = maybe_local_decls; t; t = next)
 | 
      
         | 1448 |  |  |     {
 | 
      
         | 1449 |  |  |       tree var = TREE_VALUE (t);
 | 
      
         | 1450 |  |  |       rtx rtl = DECL_RTL_IF_SET (var);
 | 
      
         | 1451 |  |  |  
 | 
      
         | 1452 |  |  |       next = TREE_CHAIN (t);
 | 
      
         | 1453 |  |  |  
 | 
      
         | 1454 |  |  |       /* Keep artificial non-ignored vars in cfun->local_decls
 | 
      
         | 1455 |  |  |          chain until instantiate_decls.  */
 | 
      
         | 1456 |  |  |       if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
 | 
      
         | 1457 |  |  |         {
 | 
      
         | 1458 |  |  |           TREE_CHAIN (t) = cfun->local_decls;
 | 
      
         | 1459 |  |  |           cfun->local_decls = t;
 | 
      
         | 1460 |  |  |           continue;
 | 
      
         | 1461 |  |  |         }
 | 
      
         | 1462 |  |  |  
 | 
      
         | 1463 |  |  |       ggc_free (t);
 | 
      
         | 1464 |  |  |     }
 | 
      
         | 1465 |  |  |  
 | 
      
         | 1466 |  |  |   /* If the target requires that FRAME_OFFSET be aligned, do it.  */
 | 
      
         | 1467 |  |  |   if (STACK_ALIGNMENT_NEEDED)
 | 
      
         | 1468 |  |  |     {
 | 
      
         | 1469 |  |  |       HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
 | 
      
         | 1470 |  |  |       if (!FRAME_GROWS_DOWNWARD)
 | 
      
         | 1471 |  |  |         frame_offset += align - 1;
 | 
      
         | 1472 |  |  |       frame_offset &= -align;
 | 
      
         | 1473 |  |  |     }
 | 
      
         | 1474 |  |  | }
 | 
      
         | 1475 |  |  |  
 | 
      
         | 1476 |  |  |  
 | 
      
         | 1477 |  |  | /* If we need to produce a detailed dump, print the tree representation
 | 
      
         | 1478 |  |  |    for STMT to the dump file.  SINCE is the last RTX after which the RTL
 | 
      
         | 1479 |  |  |    generated for STMT should have been appended.  */
 | 
      
         | 1480 |  |  |  
 | 
      
         | 1481 |  |  | static void
 | 
      
         | 1482 |  |  | maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
 | 
      
         | 1483 |  |  | {
 | 
      
         | 1484 |  |  |   if (dump_file && (dump_flags & TDF_DETAILS))
 | 
      
         | 1485 |  |  |     {
 | 
      
         | 1486 |  |  |       fprintf (dump_file, "\n;; ");
 | 
      
         | 1487 |  |  |       print_gimple_stmt (dump_file, stmt, 0,
 | 
      
         | 1488 |  |  |                          TDF_SLIM | (dump_flags & TDF_LINENO));
 | 
      
         | 1489 |  |  |       fprintf (dump_file, "\n");
 | 
      
         | 1490 |  |  |  
 | 
      
         | 1491 |  |  |       print_rtl (dump_file, since ? NEXT_INSN (since) : since);
 | 
      
         | 1492 |  |  |     }
 | 
      
         | 1493 |  |  | }
 | 
      
         | 1494 |  |  |  
 | 
      
         | 1495 |  |  | /* Maps the blocks that do not contain tree labels to rtx labels.  */
 | 
      
         | 1496 |  |  |  
 | 
      
         | 1497 |  |  | static struct pointer_map_t *lab_rtx_for_bb;
 | 
      
         | 1498 |  |  |  
 | 
      
         | 1499 |  |  | /* Returns the label_rtx expression for a label starting basic block BB.  */
 | 
      
         | 1500 |  |  |  
 | 
      
         | 1501 |  |  | static rtx
 | 
      
         | 1502 |  |  | label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
 | 
      
         | 1503 |  |  | {
 | 
      
         | 1504 |  |  |   gimple_stmt_iterator gsi;
 | 
      
         | 1505 |  |  |   tree lab;
 | 
      
         | 1506 |  |  |   gimple lab_stmt;
 | 
      
         | 1507 |  |  |   void **elt;
 | 
      
         | 1508 |  |  |  
 | 
      
         | 1509 |  |  |   if (bb->flags & BB_RTL)
 | 
      
         | 1510 |  |  |     return block_label (bb);
 | 
      
         | 1511 |  |  |  
 | 
      
         | 1512 |  |  |   elt = pointer_map_contains (lab_rtx_for_bb, bb);
 | 
      
         | 1513 |  |  |   if (elt)
 | 
      
         | 1514 |  |  |     return (rtx) *elt;
 | 
      
         | 1515 |  |  |  
 | 
      
         | 1516 |  |  |   /* Find the tree label if it is present.  */
 | 
      
         | 1517 |  |  |  
 | 
      
         | 1518 |  |  |   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
 | 
      
         | 1519 |  |  |     {
 | 
      
         | 1520 |  |  |       lab_stmt = gsi_stmt (gsi);
 | 
      
         | 1521 |  |  |       if (gimple_code (lab_stmt) != GIMPLE_LABEL)
 | 
      
         | 1522 |  |  |         break;
 | 
      
         | 1523 |  |  |  
 | 
      
         | 1524 |  |  |       lab = gimple_label_label (lab_stmt);
 | 
      
         | 1525 |  |  |       if (DECL_NONLOCAL (lab))
 | 
      
         | 1526 |  |  |         break;
 | 
      
         | 1527 |  |  |  
 | 
      
         | 1528 |  |  |       return label_rtx (lab);
 | 
      
         | 1529 |  |  |     }
 | 
      
         | 1530 |  |  |  
 | 
      
         | 1531 |  |  |   elt = pointer_map_insert (lab_rtx_for_bb, bb);
 | 
      
         | 1532 |  |  |   *elt = gen_label_rtx ();
 | 
      
         | 1533 |  |  |   return (rtx) *elt;
 | 
      
         | 1534 |  |  | }
 | 
      
         | 1535 |  |  |  
 | 
      
         | 1536 |  |  |  
 | 
      
         | 1537 |  |  | /* A subroutine of expand_gimple_cond.  Given E, a fallthrough edge
 | 
      
         | 1538 |  |  |    of a basic block where we just expanded the conditional at the end,
 | 
      
         | 1539 |  |  |    possibly clean up the CFG and instruction sequence.  LAST is the
 | 
      
         | 1540 |  |  |    last instruction before the just emitted jump sequence.  */
 | 
      
         | 1541 |  |  |  
 | 
      
         | 1542 |  |  | static void
 | 
      
         | 1543 |  |  | maybe_cleanup_end_of_block (edge e, rtx last)
 | 
      
         | 1544 |  |  | {
 | 
      
         | 1545 |  |  |   /* Special case: when jumpif decides that the condition is
 | 
      
         | 1546 |  |  |      trivial it emits an unconditional jump (and the necessary
 | 
      
         | 1547 |  |  |      barrier).  But we still have two edges, the fallthru one is
 | 
      
         | 1548 |  |  |      wrong.  purge_dead_edges would clean this up later.  Unfortunately
 | 
      
         | 1549 |  |  |      we have to insert insns (and split edges) before
 | 
      
         | 1550 |  |  |      find_many_sub_basic_blocks and hence before purge_dead_edges.
 | 
      
         | 1551 |  |  |      But splitting edges might create new blocks which depend on the
 | 
      
         | 1552 |  |  |      fact that if there are two edges there's no barrier.  So the
 | 
      
         | 1553 |  |  |      barrier would get lost and verify_flow_info would ICE.  Instead
 | 
      
         | 1554 |  |  |      of auditing all edge splitters to care for the barrier (which
 | 
      
         | 1555 |  |  |      normally isn't there in a cleaned CFG), fix it here.  */
 | 
      
         | 1556 |  |  |   if (BARRIER_P (get_last_insn ()))
 | 
      
         | 1557 |  |  |     {
 | 
      
         | 1558 |  |  |       rtx insn;
 | 
      
         | 1559 |  |  |       remove_edge (e);
 | 
      
         | 1560 |  |  |       /* Now, we have a single successor block, if we have insns to
 | 
      
         | 1561 |  |  |          insert on the remaining edge we potentially will insert
 | 
      
         | 1562 |  |  |          it at the end of this block (if the dest block isn't feasible)
 | 
      
         | 1563 |  |  |          in order to avoid splitting the edge.  This insertion will take
 | 
      
         | 1564 |  |  |          place in front of the last jump.  But we might have emitted
 | 
      
         | 1565 |  |  |          multiple jumps (conditional and one unconditional) to the
 | 
      
         | 1566 |  |  |          same destination.  Inserting in front of the last one then
 | 
      
         | 1567 |  |  |          is a problem.  See PR 40021.  We fix this by deleting all
 | 
      
         | 1568 |  |  |          jumps except the last unconditional one.  */
 | 
      
         | 1569 |  |  |       insn = PREV_INSN (get_last_insn ());
 | 
      
         | 1570 |  |  |       /* Make sure we have an unconditional jump.  Otherwise we're
 | 
      
         | 1571 |  |  |          confused.  */
 | 
      
         | 1572 |  |  |       gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
 | 
      
         | 1573 |  |  |       for (insn = PREV_INSN (insn); insn != last;)
 | 
      
         | 1574 |  |  |         {
 | 
      
         | 1575 |  |  |           insn = PREV_INSN (insn);
 | 
      
         | 1576 |  |  |           if (JUMP_P (NEXT_INSN (insn)))
 | 
      
         | 1577 |  |  |             delete_insn (NEXT_INSN (insn));
 | 
      
         | 1578 |  |  |         }
 | 
      
         | 1579 |  |  |     }
 | 
      
         | 1580 |  |  | }
 | 
      
         | 1581 |  |  |  
 | 
      
         | 1582 |  |  | /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
 | 
      
         | 1583 |  |  |    Returns a new basic block if we've terminated the current basic
 | 
      
         | 1584 |  |  |    block and created a new one.  */
 | 
      
         | 1585 |  |  |  
 | 
      
         | 1586 |  |  | static basic_block
 | 
      
         | 1587 |  |  | expand_gimple_cond (basic_block bb, gimple stmt)
 | 
      
         | 1588 |  |  | {
 | 
      
         | 1589 |  |  |   basic_block new_bb, dest;
 | 
      
         | 1590 |  |  |   edge new_edge;
 | 
      
         | 1591 |  |  |   edge true_edge;
 | 
      
         | 1592 |  |  |   edge false_edge;
 | 
      
         | 1593 |  |  |   rtx last2, last;
 | 
      
         | 1594 |  |  |   enum tree_code code;
 | 
      
         | 1595 |  |  |   tree op0, op1;
 | 
      
         | 1596 |  |  |  
 | 
      
         | 1597 |  |  |   code = gimple_cond_code (stmt);
 | 
      
         | 1598 |  |  |   op0 = gimple_cond_lhs (stmt);
 | 
      
         | 1599 |  |  |   op1 = gimple_cond_rhs (stmt);
 | 
      
         | 1600 |  |  |   /* We're sometimes presented with such code:
 | 
      
         | 1601 |  |  |        D.123_1 = x < y;
 | 
      
         | 1602 |  |  |        if (D.123_1 != 0)
 | 
      
         | 1603 |  |  |          ...
 | 
      
         | 1604 |  |  |      This would expand to two comparisons which then later might
 | 
      
         | 1605 |  |  |      be cleaned up by combine.  But some pattern matchers like if-conversion
 | 
      
         | 1606 |  |  |      work better when there's only one compare, so make up for this
 | 
      
         | 1607 |  |  |      here as special exception if TER would have made the same change.  */
 | 
      
         | 1608 |  |  |   if (gimple_cond_single_var_p (stmt)
 | 
      
         | 1609 |  |  |       && SA.values
 | 
      
         | 1610 |  |  |       && TREE_CODE (op0) == SSA_NAME
 | 
      
         | 1611 |  |  |       && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
 | 
      
         | 1612 |  |  |     {
 | 
      
         | 1613 |  |  |       gimple second = SSA_NAME_DEF_STMT (op0);
 | 
      
         | 1614 |  |  |       if (gimple_code (second) == GIMPLE_ASSIGN)
 | 
      
         | 1615 |  |  |         {
 | 
      
         | 1616 |  |  |           enum tree_code code2 = gimple_assign_rhs_code (second);
 | 
      
         | 1617 |  |  |           if (TREE_CODE_CLASS (code2) == tcc_comparison)
 | 
      
         | 1618 |  |  |             {
 | 
      
         | 1619 |  |  |               code = code2;
 | 
      
         | 1620 |  |  |               op0 = gimple_assign_rhs1 (second);
 | 
      
         | 1621 |  |  |               op1 = gimple_assign_rhs2 (second);
 | 
      
         | 1622 |  |  |             }
 | 
      
         | 1623 |  |  |           /* If jumps are cheap turn some more codes into
 | 
      
         | 1624 |  |  |              jumpy sequences.  */
 | 
      
         | 1625 |  |  |           else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
 | 
      
         | 1626 |  |  |             {
 | 
      
         | 1627 |  |  |               if ((code2 == BIT_AND_EXPR
 | 
      
         | 1628 |  |  |                    && TYPE_PRECISION (TREE_TYPE (op0)) == 1
 | 
      
         | 1629 |  |  |                    && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
 | 
      
         | 1630 |  |  |                   || code2 == TRUTH_AND_EXPR)
 | 
      
         | 1631 |  |  |                 {
 | 
      
         | 1632 |  |  |                   code = TRUTH_ANDIF_EXPR;
 | 
      
         | 1633 |  |  |                   op0 = gimple_assign_rhs1 (second);
 | 
      
         | 1634 |  |  |                   op1 = gimple_assign_rhs2 (second);
 | 
      
         | 1635 |  |  |                 }
 | 
      
         | 1636 |  |  |               else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
 | 
      
         | 1637 |  |  |                 {
 | 
      
         | 1638 |  |  |                   code = TRUTH_ORIF_EXPR;
 | 
      
         | 1639 |  |  |                   op0 = gimple_assign_rhs1 (second);
 | 
      
         | 1640 |  |  |                   op1 = gimple_assign_rhs2 (second);
 | 
      
         | 1641 |  |  |                 }
 | 
      
         | 1642 |  |  |             }
 | 
      
         | 1643 |  |  |         }
 | 
      
         | 1644 |  |  |     }
 | 
      
         | 1645 |  |  |  
 | 
      
         | 1646 |  |  |   last2 = last = get_last_insn ();
 | 
      
         | 1647 |  |  |  
 | 
      
         | 1648 |  |  |   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
 | 
      
         | 1649 |  |  |   if (gimple_has_location (stmt))
 | 
      
         | 1650 |  |  |     {
 | 
      
         | 1651 |  |  |       set_curr_insn_source_location (gimple_location (stmt));
 | 
      
         | 1652 |  |  |       set_curr_insn_block (gimple_block (stmt));
 | 
      
         | 1653 |  |  |     }
 | 
      
         | 1654 |  |  |  
 | 
      
         | 1655 |  |  |   /* These flags have no purpose in RTL land.  */
 | 
      
         | 1656 |  |  |   true_edge->flags &= ~EDGE_TRUE_VALUE;
 | 
      
         | 1657 |  |  |   false_edge->flags &= ~EDGE_FALSE_VALUE;
 | 
      
         | 1658 |  |  |  
 | 
      
         | 1659 |  |  |   /* We can either have a pure conditional jump with one fallthru edge or
 | 
      
         | 1660 |  |  |      two-way jump that needs to be decomposed into two basic blocks.  */
 | 
      
         | 1661 |  |  |   if (false_edge->dest == bb->next_bb)
 | 
      
         | 1662 |  |  |     {
 | 
      
         | 1663 |  |  |       jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
 | 
      
         | 1664 |  |  |                 true_edge->probability);
 | 
      
         | 1665 |  |  |       maybe_dump_rtl_for_gimple_stmt (stmt, last);
 | 
      
         | 1666 |  |  |       if (true_edge->goto_locus)
 | 
      
         | 1667 |  |  |         {
 | 
      
         | 1668 |  |  |           set_curr_insn_source_location (true_edge->goto_locus);
 | 
      
         | 1669 |  |  |           set_curr_insn_block (true_edge->goto_block);
 | 
      
         | 1670 |  |  |           true_edge->goto_locus = curr_insn_locator ();
 | 
      
         | 1671 |  |  |         }
 | 
      
         | 1672 |  |  |       true_edge->goto_block = NULL;
 | 
      
         | 1673 |  |  |       false_edge->flags |= EDGE_FALLTHRU;
 | 
      
         | 1674 |  |  |       maybe_cleanup_end_of_block (false_edge, last);
 | 
      
         | 1675 |  |  |       return NULL;
 | 
      
         | 1676 |  |  |     }
 | 
      
         | 1677 |  |  |   if (true_edge->dest == bb->next_bb)
 | 
      
         | 1678 |  |  |     {
 | 
      
         | 1679 |  |  |       jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
 | 
      
         | 1680 |  |  |                    false_edge->probability);
 | 
      
         | 1681 |  |  |       maybe_dump_rtl_for_gimple_stmt (stmt, last);
 | 
      
         | 1682 |  |  |       if (false_edge->goto_locus)
 | 
      
         | 1683 |  |  |         {
 | 
      
         | 1684 |  |  |           set_curr_insn_source_location (false_edge->goto_locus);
 | 
      
         | 1685 |  |  |           set_curr_insn_block (false_edge->goto_block);
 | 
      
         | 1686 |  |  |           false_edge->goto_locus = curr_insn_locator ();
 | 
      
         | 1687 |  |  |         }
 | 
      
         | 1688 |  |  |       false_edge->goto_block = NULL;
 | 
      
         | 1689 |  |  |       true_edge->flags |= EDGE_FALLTHRU;
 | 
      
         | 1690 |  |  |       maybe_cleanup_end_of_block (true_edge, last);
 | 
      
         | 1691 |  |  |       return NULL;
 | 
      
         | 1692 |  |  |     }
 | 
      
         | 1693 |  |  |  
 | 
      
         | 1694 |  |  |   jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
 | 
      
         | 1695 |  |  |             true_edge->probability);
 | 
      
         | 1696 |  |  |   last = get_last_insn ();
 | 
      
         | 1697 |  |  |   if (false_edge->goto_locus)
 | 
      
         | 1698 |  |  |     {
 | 
      
         | 1699 |  |  |       set_curr_insn_source_location (false_edge->goto_locus);
 | 
      
         | 1700 |  |  |       set_curr_insn_block (false_edge->goto_block);
 | 
      
         | 1701 |  |  |       false_edge->goto_locus = curr_insn_locator ();
 | 
      
         | 1702 |  |  |     }
 | 
      
         | 1703 |  |  |   false_edge->goto_block = NULL;
 | 
      
         | 1704 |  |  |   emit_jump (label_rtx_for_bb (false_edge->dest));
 | 
      
         | 1705 |  |  |  
 | 
      
         | 1706 |  |  |   BB_END (bb) = last;
 | 
      
         | 1707 |  |  |   if (BARRIER_P (BB_END (bb)))
 | 
      
         | 1708 |  |  |     BB_END (bb) = PREV_INSN (BB_END (bb));
 | 
      
         | 1709 |  |  |   update_bb_for_insn (bb);
 | 
      
         | 1710 |  |  |  
 | 
      
         | 1711 |  |  |   new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
 | 
      
         | 1712 |  |  |   dest = false_edge->dest;
 | 
      
         | 1713 |  |  |   redirect_edge_succ (false_edge, new_bb);
 | 
      
         | 1714 |  |  |   false_edge->flags |= EDGE_FALLTHRU;
 | 
      
         | 1715 |  |  |   new_bb->count = false_edge->count;
 | 
      
         | 1716 |  |  |   new_bb->frequency = EDGE_FREQUENCY (false_edge);
 | 
      
         | 1717 |  |  |   new_edge = make_edge (new_bb, dest, 0);
 | 
      
         | 1718 |  |  |   new_edge->probability = REG_BR_PROB_BASE;
 | 
      
         | 1719 |  |  |   new_edge->count = new_bb->count;
 | 
      
         | 1720 |  |  |   if (BARRIER_P (BB_END (new_bb)))
 | 
      
         | 1721 |  |  |     BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
 | 
      
         | 1722 |  |  |   update_bb_for_insn (new_bb);
 | 
      
         | 1723 |  |  |  
 | 
      
         | 1724 |  |  |   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
 | 
      
         | 1725 |  |  |  
 | 
      
         | 1726 |  |  |   if (true_edge->goto_locus)
 | 
      
         | 1727 |  |  |     {
 | 
      
         | 1728 |  |  |       set_curr_insn_source_location (true_edge->goto_locus);
 | 
      
         | 1729 |  |  |       set_curr_insn_block (true_edge->goto_block);
 | 
      
         | 1730 |  |  |       true_edge->goto_locus = curr_insn_locator ();
 | 
      
         | 1731 |  |  |     }
 | 
      
         | 1732 |  |  |   true_edge->goto_block = NULL;
 | 
      
         | 1733 |  |  |  
 | 
      
         | 1734 |  |  |   return new_bb;
 | 
      
         | 1735 |  |  | }
 | 
      
         | 1736 |  |  |  
 | 
      
         | 1737 |  |  | /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
 | 
      
         | 1738 |  |  |    statement STMT.  */
 | 
      
         | 1739 |  |  |  
 | 
      
         | 1740 |  |  | static void
 | 
      
         | 1741 |  |  | expand_call_stmt (gimple stmt)
 | 
      
         | 1742 |  |  | {
 | 
      
         | 1743 |  |  |   tree exp;
 | 
      
         | 1744 |  |  |   tree lhs = gimple_call_lhs (stmt);
 | 
      
         | 1745 |  |  |   size_t i;
 | 
      
         | 1746 |  |  |   bool builtin_p;
 | 
      
         | 1747 |  |  |   tree decl;
 | 
      
         | 1748 |  |  |  
 | 
      
         | 1749 |  |  |   exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
 | 
      
         | 1750 |  |  |  
 | 
      
         | 1751 |  |  |   CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
 | 
      
         | 1752 |  |  |   decl = gimple_call_fndecl (stmt);
 | 
      
         | 1753 |  |  |   builtin_p = decl && DECL_BUILT_IN (decl);
 | 
      
         | 1754 |  |  |  
 | 
      
         | 1755 |  |  |   TREE_TYPE (exp) = gimple_call_return_type (stmt);
 | 
      
         | 1756 |  |  |   CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
 | 
      
         | 1757 |  |  |  
 | 
      
         | 1758 |  |  |   for (i = 0; i < gimple_call_num_args (stmt); i++)
 | 
      
         | 1759 |  |  |     {
 | 
      
         | 1760 |  |  |       tree arg = gimple_call_arg (stmt, i);
 | 
      
         | 1761 |  |  |       gimple def;
 | 
      
         | 1762 |  |  |       /* TER addresses into arguments of builtin functions so we have a
 | 
      
         | 1763 |  |  |          chance to infer more correct alignment information.  See PR39954.  */
 | 
      
         | 1764 |  |  |       if (builtin_p
 | 
      
         | 1765 |  |  |           && TREE_CODE (arg) == SSA_NAME
 | 
      
         | 1766 |  |  |           && (def = get_gimple_for_ssa_name (arg))
 | 
      
         | 1767 |  |  |           && gimple_assign_rhs_code (def) == ADDR_EXPR)
 | 
      
         | 1768 |  |  |         arg = gimple_assign_rhs1 (def);
 | 
      
         | 1769 |  |  |       CALL_EXPR_ARG (exp, i) = arg;
 | 
      
         | 1770 |  |  |     }
 | 
      
         | 1771 |  |  |  
 | 
      
         | 1772 |  |  |   if (gimple_has_side_effects (stmt))
 | 
      
         | 1773 |  |  |     TREE_SIDE_EFFECTS (exp) = 1;
 | 
      
         | 1774 |  |  |  
 | 
      
         | 1775 |  |  |   if (gimple_call_nothrow_p (stmt))
 | 
      
         | 1776 |  |  |     TREE_NOTHROW (exp) = 1;
 | 
      
         | 1777 |  |  |  
 | 
      
         | 1778 |  |  |   CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
 | 
      
         | 1779 |  |  |   CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
 | 
      
         | 1780 |  |  |   CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
 | 
      
         | 1781 |  |  |   CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
 | 
      
         | 1782 |  |  |   CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
 | 
      
         | 1783 |  |  |   SET_EXPR_LOCATION (exp, gimple_location (stmt));
 | 
      
         | 1784 |  |  |   TREE_BLOCK (exp) = gimple_block (stmt);
 | 
      
         | 1785 |  |  |  
 | 
      
         | 1786 |  |  |   if (lhs)
 | 
      
         | 1787 |  |  |     expand_assignment (lhs, exp, false);
 | 
      
         | 1788 |  |  |   else
 | 
      
         | 1789 |  |  |     expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
 | 
      
         | 1790 |  |  | }
 | 
      
         | 1791 |  |  |  
 | 
      
         | 1792 |  |  | /* A subroutine of expand_gimple_stmt, expanding one gimple statement
 | 
      
         | 1793 |  |  |    STMT that doesn't require special handling for outgoing edges.  That
 | 
      
         | 1794 |  |  |    is no tailcalls and no GIMPLE_COND.  */
 | 
      
         | 1795 |  |  |  
 | 
      
         | 1796 |  |  | static void
 | 
      
         | 1797 |  |  | expand_gimple_stmt_1 (gimple stmt)
 | 
      
         | 1798 |  |  | {
 | 
      
         | 1799 |  |  |   tree op0;
 | 
      
         | 1800 |  |  |   switch (gimple_code (stmt))
 | 
      
         | 1801 |  |  |     {
 | 
      
         | 1802 |  |  |     case GIMPLE_GOTO:
 | 
      
         | 1803 |  |  |       op0 = gimple_goto_dest (stmt);
 | 
      
         | 1804 |  |  |       if (TREE_CODE (op0) == LABEL_DECL)
 | 
      
         | 1805 |  |  |         expand_goto (op0);
 | 
      
         | 1806 |  |  |       else
 | 
      
         | 1807 |  |  |         expand_computed_goto (op0);
 | 
      
         | 1808 |  |  |       break;
 | 
      
         | 1809 |  |  |     case GIMPLE_LABEL:
 | 
      
         | 1810 |  |  |       expand_label (gimple_label_label (stmt));
 | 
      
         | 1811 |  |  |       break;
 | 
      
         | 1812 |  |  |     case GIMPLE_NOP:
 | 
      
         | 1813 |  |  |     case GIMPLE_PREDICT:
 | 
      
         | 1814 |  |  |       break;
 | 
      
         | 1815 |  |  |     case GIMPLE_SWITCH:
 | 
      
         | 1816 |  |  |       expand_case (stmt);
 | 
      
         | 1817 |  |  |       break;
 | 
      
         | 1818 |  |  |     case GIMPLE_ASM:
 | 
      
         | 1819 |  |  |       expand_asm_stmt (stmt);
 | 
      
         | 1820 |  |  |       break;
 | 
      
         | 1821 |  |  |     case GIMPLE_CALL:
 | 
      
         | 1822 |  |  |       expand_call_stmt (stmt);
 | 
      
         | 1823 |  |  |       break;
 | 
      
         | 1824 |  |  |  
 | 
      
         | 1825 |  |  |     case GIMPLE_RETURN:
 | 
      
         | 1826 |  |  |       op0 = gimple_return_retval (stmt);
 | 
      
         | 1827 |  |  |  
 | 
      
         | 1828 |  |  |       if (op0 && op0 != error_mark_node)
 | 
      
         | 1829 |  |  |         {
 | 
      
         | 1830 |  |  |           tree result = DECL_RESULT (current_function_decl);
 | 
      
         | 1831 |  |  |  
 | 
      
         | 1832 |  |  |           /* If we are not returning the current function's RESULT_DECL,
 | 
      
         | 1833 |  |  |              build an assignment to it.  */
 | 
      
         | 1834 |  |  |           if (op0 != result)
 | 
      
         | 1835 |  |  |             {
 | 
      
         | 1836 |  |  |               /* I believe that a function's RESULT_DECL is unique.  */
 | 
      
         | 1837 |  |  |               gcc_assert (TREE_CODE (op0) != RESULT_DECL);
 | 
      
         | 1838 |  |  |  
 | 
      
         | 1839 |  |  |               /* ??? We'd like to use simply expand_assignment here,
 | 
      
         | 1840 |  |  |                  but this fails if the value is of BLKmode but the return
 | 
      
         | 1841 |  |  |                  decl is a register.  expand_return has special handling
 | 
      
         | 1842 |  |  |                  for this combination, which eventually should move
 | 
      
         | 1843 |  |  |                  to common code.  See comments there.  Until then, let's
 | 
      
         | 1844 |  |  |                  build a modify expression :-/  */
 | 
      
         | 1845 |  |  |               op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
 | 
      
         | 1846 |  |  |                             result, op0);
 | 
      
         | 1847 |  |  |             }
 | 
      
         | 1848 |  |  |         }
 | 
      
         | 1849 |  |  |       if (!op0)
 | 
      
         | 1850 |  |  |         expand_null_return ();
 | 
      
         | 1851 |  |  |       else
 | 
      
         | 1852 |  |  |         expand_return (op0);
 | 
      
         | 1853 |  |  |       break;
 | 
      
         | 1854 |  |  |  
 | 
      
         | 1855 |  |  |     case GIMPLE_ASSIGN:
 | 
      
         | 1856 |  |  |       {
 | 
      
         | 1857 |  |  |         tree lhs = gimple_assign_lhs (stmt);
 | 
      
         | 1858 |  |  |  
 | 
      
         | 1859 |  |  |         /* Tree expand used to fiddle with |= and &= of two bitfield
 | 
      
         | 1860 |  |  |            COMPONENT_REFs here.  This can't happen with gimple, the LHS
 | 
      
         | 1861 |  |  |            of binary assigns must be a gimple reg.  */
 | 
      
         | 1862 |  |  |  
 | 
      
         | 1863 |  |  |         if (TREE_CODE (lhs) != SSA_NAME
 | 
      
         | 1864 |  |  |             || get_gimple_rhs_class (gimple_expr_code (stmt))
 | 
      
         | 1865 |  |  |                == GIMPLE_SINGLE_RHS)
 | 
      
         | 1866 |  |  |           {
 | 
      
         | 1867 |  |  |             tree rhs = gimple_assign_rhs1 (stmt);
 | 
      
         | 1868 |  |  |             gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
 | 
      
         | 1869 |  |  |                         == GIMPLE_SINGLE_RHS);
 | 
      
         | 1870 |  |  |             if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
 | 
      
         | 1871 |  |  |               SET_EXPR_LOCATION (rhs, gimple_location (stmt));
 | 
      
         | 1872 |  |  |             expand_assignment (lhs, rhs,
 | 
      
         | 1873 |  |  |                                gimple_assign_nontemporal_move_p (stmt));
 | 
      
         | 1874 |  |  |           }
 | 
      
         | 1875 |  |  |         else
 | 
      
         | 1876 |  |  |           {
 | 
      
         | 1877 |  |  |             rtx target, temp;
 | 
      
         | 1878 |  |  |             bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
 | 
      
         | 1879 |  |  |             struct separate_ops ops;
 | 
      
         | 1880 |  |  |             bool promoted = false;
 | 
      
         | 1881 |  |  |  
 | 
      
         | 1882 |  |  |             target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
 | 
      
         | 1883 |  |  |             if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
 | 
      
         | 1884 |  |  |               promoted = true;
 | 
      
         | 1885 |  |  |  
 | 
      
         | 1886 |  |  |             ops.code = gimple_assign_rhs_code (stmt);
 | 
      
         | 1887 |  |  |             ops.type = TREE_TYPE (lhs);
 | 
      
         | 1888 |  |  |             switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
 | 
      
         | 1889 |  |  |               {
 | 
      
         | 1890 |  |  |                 case GIMPLE_BINARY_RHS:
 | 
      
         | 1891 |  |  |                   ops.op1 = gimple_assign_rhs2 (stmt);
 | 
      
         | 1892 |  |  |                   /* Fallthru */
 | 
      
         | 1893 |  |  |                 case GIMPLE_UNARY_RHS:
 | 
      
         | 1894 |  |  |                   ops.op0 = gimple_assign_rhs1 (stmt);
 | 
      
         | 1895 |  |  |                   break;
 | 
      
         | 1896 |  |  |                 default:
 | 
      
         | 1897 |  |  |                   gcc_unreachable ();
 | 
      
         | 1898 |  |  |               }
 | 
      
         | 1899 |  |  |             ops.location = gimple_location (stmt);
 | 
      
         | 1900 |  |  |  
 | 
      
         | 1901 |  |  |             /* If we want to use a nontemporal store, force the value to
 | 
      
         | 1902 |  |  |                register first.  If we store into a promoted register,
 | 
      
         | 1903 |  |  |                don't directly expand to target.  */
 | 
      
         | 1904 |  |  |             temp = nontemporal || promoted ? NULL_RTX : target;
 | 
      
         | 1905 |  |  |             temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
 | 
      
         | 1906 |  |  |                                        EXPAND_NORMAL);
 | 
      
         | 1907 |  |  |  
 | 
      
         | 1908 |  |  |             if (temp == target)
 | 
      
         | 1909 |  |  |               ;
 | 
      
         | 1910 |  |  |             else if (promoted)
 | 
      
         | 1911 |  |  |               {
 | 
      
         | 1912 |  |  |                 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
 | 
      
         | 1913 |  |  |                 /* If TEMP is a VOIDmode constant, use convert_modes to make
 | 
      
         | 1914 |  |  |                    sure that we properly convert it.  */
 | 
      
         | 1915 |  |  |                 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
 | 
      
         | 1916 |  |  |                   {
 | 
      
         | 1917 |  |  |                     temp = convert_modes (GET_MODE (target),
 | 
      
         | 1918 |  |  |                                           TYPE_MODE (ops.type),
 | 
      
         | 1919 |  |  |                                           temp, unsignedp);
 | 
      
         | 1920 |  |  |                     temp = convert_modes (GET_MODE (SUBREG_REG (target)),
 | 
      
         | 1921 |  |  |                                           GET_MODE (target), temp, unsignedp);
 | 
      
         | 1922 |  |  |                   }
 | 
      
         | 1923 |  |  |  
 | 
      
         | 1924 |  |  |                 convert_move (SUBREG_REG (target), temp, unsignedp);
 | 
      
         | 1925 |  |  |               }
 | 
      
         | 1926 |  |  |             else if (nontemporal && emit_storent_insn (target, temp))
 | 
      
         | 1927 |  |  |               ;
 | 
      
         | 1928 |  |  |             else
 | 
      
         | 1929 |  |  |               {
 | 
      
         | 1930 |  |  |                 temp = force_operand (temp, target);
 | 
      
         | 1931 |  |  |                 if (temp != target)
 | 
      
         | 1932 |  |  |                   emit_move_insn (target, temp);
 | 
      
         | 1933 |  |  |               }
 | 
      
         | 1934 |  |  |           }
 | 
      
         | 1935 |  |  |       }
 | 
      
         | 1936 |  |  |       break;
 | 
      
         | 1937 |  |  |  
 | 
      
         | 1938 |  |  |     default:
 | 
      
         | 1939 |  |  |       gcc_unreachable ();
 | 
      
         | 1940 |  |  |     }
 | 
      
         | 1941 |  |  | }
 | 
      
         | 1942 |  |  |  
 | 
      
         | 1943 |  |  | /* Expand one gimple statement STMT and return the last RTL instruction
 | 
      
         | 1944 |  |  |    before any of the newly generated ones.
 | 
      
         | 1945 |  |  |  
 | 
      
         | 1946 |  |  |    In addition to generating the necessary RTL instructions this also
 | 
      
         | 1947 |  |  |    sets REG_EH_REGION notes if necessary and sets the current source
 | 
      
         | 1948 |  |  |    location for diagnostics.  */
 | 
      
         | 1949 |  |  |  
 | 
      
         | 1950 |  |  | static rtx
 | 
      
         | 1951 |  |  | expand_gimple_stmt (gimple stmt)
 | 
      
         | 1952 |  |  | {
 | 
      
         | 1953 |  |  |   int lp_nr = 0;
 | 
      
         | 1954 |  |  |   rtx last = NULL;
 | 
      
         | 1955 |  |  |   location_t saved_location = input_location;
 | 
      
         | 1956 |  |  |  
 | 
      
         | 1957 |  |  |   last = get_last_insn ();
 | 
      
         | 1958 |  |  |  
 | 
      
         | 1959 |  |  |   /* If this is an expression of some kind and it has an associated line
 | 
      
         | 1960 |  |  |      number, then emit the line number before expanding the expression.
 | 
      
         | 1961 |  |  |  
 | 
      
         | 1962 |  |  |      We need to save and restore the file and line information so that
 | 
      
         | 1963 |  |  |      errors discovered during expansion are emitted with the right
 | 
      
         | 1964 |  |  |      information.  It would be better of the diagnostic routines
 | 
      
         | 1965 |  |  |      used the file/line information embedded in the tree nodes rather
 | 
      
         | 1966 |  |  |      than globals.  */
 | 
      
         | 1967 |  |  |   gcc_assert (cfun);
 | 
      
         | 1968 |  |  |  
 | 
      
         | 1969 |  |  |   if (gimple_has_location (stmt))
 | 
      
         | 1970 |  |  |     {
 | 
      
         | 1971 |  |  |       input_location = gimple_location (stmt);
 | 
      
         | 1972 |  |  |       set_curr_insn_source_location (input_location);
 | 
      
         | 1973 |  |  |  
 | 
      
         | 1974 |  |  |       /* Record where the insns produced belong.  */
 | 
      
         | 1975 |  |  |       set_curr_insn_block (gimple_block (stmt));
 | 
      
         | 1976 |  |  |     }
 | 
      
         | 1977 |  |  |  
 | 
      
         | 1978 |  |  |   expand_gimple_stmt_1 (stmt);
 | 
      
         | 1979 |  |  |   /* Free any temporaries used to evaluate this statement.  */
 | 
      
         | 1980 |  |  |   free_temp_slots ();
 | 
      
         | 1981 |  |  |  
 | 
      
         | 1982 |  |  |   input_location = saved_location;
 | 
      
         | 1983 |  |  |  
 | 
      
         | 1984 |  |  |   /* Mark all insns that may trap.  */
 | 
      
         | 1985 |  |  |   lp_nr = lookup_stmt_eh_lp (stmt);
 | 
      
         | 1986 |  |  |   if (lp_nr)
 | 
      
         | 1987 |  |  |     {
 | 
      
         | 1988 |  |  |       rtx insn;
 | 
      
         | 1989 |  |  |       for (insn = next_real_insn (last); insn;
 | 
      
         | 1990 |  |  |            insn = next_real_insn (insn))
 | 
      
         | 1991 |  |  |         {
 | 
      
         | 1992 |  |  |           if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
 | 
      
         | 1993 |  |  |               /* If we want exceptions for non-call insns, any
 | 
      
         | 1994 |  |  |                  may_trap_p instruction may throw.  */
 | 
      
         | 1995 |  |  |               && GET_CODE (PATTERN (insn)) != CLOBBER
 | 
      
         | 1996 |  |  |               && GET_CODE (PATTERN (insn)) != USE
 | 
      
         | 1997 |  |  |               && insn_could_throw_p (insn))
 | 
      
         | 1998 |  |  |             make_reg_eh_region_note (insn, 0, lp_nr);
 | 
      
         | 1999 |  |  |         }
 | 
      
         | 2000 |  |  |     }
 | 
      
         | 2001 |  |  |  
 | 
      
         | 2002 |  |  |   return last;
 | 
      
         | 2003 |  |  | }
 | 
      
         | 2004 |  |  |  
 | 
      
         | 2005 |  |  | /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
 | 
      
         | 2006 |  |  |    that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
 | 
      
         | 2007 |  |  |    generated a tail call (something that might be denied by the ABI
 | 
      
         | 2008 |  |  |    rules governing the call; see calls.c).
 | 
      
         | 2009 |  |  |  
 | 
      
         | 2010 |  |  |    Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
 | 
      
         | 2011 |  |  |    can still reach the rest of BB.  The case here is __builtin_sqrt,
 | 
      
         | 2012 |  |  |    where the NaN result goes through the external function (with a
 | 
      
         | 2013 |  |  |    tailcall) and the normal result happens via a sqrt instruction.  */
 | 
      
         | 2014 |  |  |  
 | 
      
         | 2015 |  |  | static basic_block
 | 
      
         | 2016 |  |  | expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
 | 
      
         | 2017 |  |  | {
 | 
      
         | 2018 |  |  |   rtx last2, last;
 | 
      
         | 2019 |  |  |   edge e;
 | 
      
         | 2020 |  |  |   edge_iterator ei;
 | 
      
         | 2021 |  |  |   int probability;
 | 
      
         | 2022 |  |  |   gcov_type count;
 | 
      
         | 2023 |  |  |  
 | 
      
         | 2024 |  |  |   last2 = last = expand_gimple_stmt (stmt);
 | 
      
         | 2025 |  |  |  
 | 
      
         | 2026 |  |  |   for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
 | 
      
         | 2027 |  |  |     if (CALL_P (last) && SIBLING_CALL_P (last))
 | 
      
         | 2028 |  |  |       goto found;
 | 
      
         | 2029 |  |  |  
 | 
      
         | 2030 |  |  |   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
 | 
      
         | 2031 |  |  |  
 | 
      
         | 2032 |  |  |   *can_fallthru = true;
 | 
      
         | 2033 |  |  |   return NULL;
 | 
      
         | 2034 |  |  |  
 | 
      
         | 2035 |  |  |  found:
 | 
      
         | 2036 |  |  |   /* ??? Wouldn't it be better to just reset any pending stack adjust?
 | 
      
         | 2037 |  |  |      Any instructions emitted here are about to be deleted.  */
 | 
      
         | 2038 |  |  |   do_pending_stack_adjust ();
 | 
      
         | 2039 |  |  |  
 | 
      
         | 2040 |  |  |   /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
 | 
      
         | 2041 |  |  |   /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
 | 
      
         | 2042 |  |  |      EH or abnormal edges, we shouldn't have created a tail call in
 | 
      
         | 2043 |  |  |      the first place.  So it seems to me we should just be removing
 | 
      
         | 2044 |  |  |      all edges here, or redirecting the existing fallthru edge to
 | 
      
         | 2045 |  |  |      the exit block.  */
 | 
      
         | 2046 |  |  |  
 | 
      
         | 2047 |  |  |   probability = 0;
 | 
      
         | 2048 |  |  |   count = 0;
 | 
      
         | 2049 |  |  |  
 | 
      
         | 2050 |  |  |   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
 | 
      
         | 2051 |  |  |     {
 | 
      
         | 2052 |  |  |       if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
 | 
      
         | 2053 |  |  |         {
 | 
      
         | 2054 |  |  |           if (e->dest != EXIT_BLOCK_PTR)
 | 
      
         | 2055 |  |  |             {
 | 
      
         | 2056 |  |  |               e->dest->count -= e->count;
 | 
      
         | 2057 |  |  |               e->dest->frequency -= EDGE_FREQUENCY (e);
 | 
      
         | 2058 |  |  |               if (e->dest->count < 0)
 | 
      
         | 2059 |  |  |                 e->dest->count = 0;
 | 
      
         | 2060 |  |  |               if (e->dest->frequency < 0)
 | 
      
         | 2061 |  |  |                 e->dest->frequency = 0;
 | 
      
         | 2062 |  |  |             }
 | 
      
         | 2063 |  |  |           count += e->count;
 | 
      
         | 2064 |  |  |           probability += e->probability;
 | 
      
         | 2065 |  |  |           remove_edge (e);
 | 
      
         | 2066 |  |  |         }
 | 
      
         | 2067 |  |  |       else
 | 
      
         | 2068 |  |  |         ei_next (&ei);
 | 
      
         | 2069 |  |  |     }
 | 
      
         | 2070 |  |  |  
 | 
      
         | 2071 |  |  |   /* This is somewhat ugly: the call_expr expander often emits instructions
 | 
      
         | 2072 |  |  |      after the sibcall (to perform the function return).  These confuse the
 | 
      
         | 2073 |  |  |      find_many_sub_basic_blocks code, so we need to get rid of these.  */
 | 
      
         | 2074 |  |  |   last = NEXT_INSN (last);
 | 
      
         | 2075 |  |  |   gcc_assert (BARRIER_P (last));
 | 
      
         | 2076 |  |  |  
 | 
      
         | 2077 |  |  |   *can_fallthru = false;
 | 
      
         | 2078 |  |  |   while (NEXT_INSN (last))
 | 
      
         | 2079 |  |  |     {
 | 
      
         | 2080 |  |  |       /* For instance an sqrt builtin expander expands if with
 | 
      
         | 2081 |  |  |          sibcall in the then and label for `else`.  */
 | 
      
         | 2082 |  |  |       if (LABEL_P (NEXT_INSN (last)))
 | 
      
         | 2083 |  |  |         {
 | 
      
         | 2084 |  |  |           *can_fallthru = true;
 | 
      
         | 2085 |  |  |           break;
 | 
      
         | 2086 |  |  |         }
 | 
      
         | 2087 |  |  |       delete_insn (NEXT_INSN (last));
 | 
      
         | 2088 |  |  |     }
 | 
      
         | 2089 |  |  |  
 | 
      
         | 2090 |  |  |   e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
 | 
      
         | 2091 |  |  |   e->probability += probability;
 | 
      
         | 2092 |  |  |   e->count += count;
 | 
      
         | 2093 |  |  |   BB_END (bb) = last;
 | 
      
         | 2094 |  |  |   update_bb_for_insn (bb);
 | 
      
         | 2095 |  |  |  
 | 
      
         | 2096 |  |  |   if (NEXT_INSN (last))
 | 
      
         | 2097 |  |  |     {
 | 
      
         | 2098 |  |  |       bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
 | 
      
         | 2099 |  |  |  
 | 
      
         | 2100 |  |  |       last = BB_END (bb);
 | 
      
         | 2101 |  |  |       if (BARRIER_P (last))
 | 
      
         | 2102 |  |  |         BB_END (bb) = PREV_INSN (last);
 | 
      
         | 2103 |  |  |     }
 | 
      
         | 2104 |  |  |  
 | 
      
         | 2105 |  |  |   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
 | 
      
         | 2106 |  |  |  
 | 
      
         | 2107 |  |  |   return bb;
 | 
      
         | 2108 |  |  | }
 | 
      
         | 2109 |  |  |  
 | 
      
         | 2110 |  |  | /* Return the difference between the floor and the truncated result of
 | 
      
         | 2111 |  |  |    a signed division by OP1 with remainder MOD.  */
 | 
      
         | 2112 |  |  | static rtx
 | 
      
         | 2113 |  |  | floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
 | 
      
         | 2114 |  |  | {
 | 
      
         | 2115 |  |  |   /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
 | 
      
         | 2116 |  |  |   return gen_rtx_IF_THEN_ELSE
 | 
      
         | 2117 |  |  |     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
 | 
      
         | 2118 |  |  |      gen_rtx_IF_THEN_ELSE
 | 
      
         | 2119 |  |  |      (mode, gen_rtx_LT (BImode,
 | 
      
         | 2120 |  |  |                         gen_rtx_DIV (mode, op1, mod),
 | 
      
         | 2121 |  |  |                         const0_rtx),
 | 
      
         | 2122 |  |  |       constm1_rtx, const0_rtx),
 | 
      
         | 2123 |  |  |      const0_rtx);
 | 
      
         | 2124 |  |  | }
 | 
      
         | 2125 |  |  |  
 | 
      
         | 2126 |  |  | /* Return the difference between the ceil and the truncated result of
 | 
      
         | 2127 |  |  |    a signed division by OP1 with remainder MOD.  */
 | 
      
         | 2128 |  |  | static rtx
 | 
      
         | 2129 |  |  | ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
 | 
      
         | 2130 |  |  | {
 | 
      
         | 2131 |  |  |   /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
 | 
      
         | 2132 |  |  |   return gen_rtx_IF_THEN_ELSE
 | 
      
         | 2133 |  |  |     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
 | 
      
         | 2134 |  |  |      gen_rtx_IF_THEN_ELSE
 | 
      
         | 2135 |  |  |      (mode, gen_rtx_GT (BImode,
 | 
      
         | 2136 |  |  |                         gen_rtx_DIV (mode, op1, mod),
 | 
      
         | 2137 |  |  |                         const0_rtx),
 | 
      
         | 2138 |  |  |       const1_rtx, const0_rtx),
 | 
      
         | 2139 |  |  |      const0_rtx);
 | 
      
         | 2140 |  |  | }
 | 
      
         | 2141 |  |  |  
 | 
      
         | 2142 |  |  | /* Return the difference between the ceil and the truncated result of
 | 
      
         | 2143 |  |  |    an unsigned division by OP1 with remainder MOD.  */
 | 
      
         | 2144 |  |  | static rtx
 | 
      
         | 2145 |  |  | ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
 | 
      
         | 2146 |  |  | {
 | 
      
         | 2147 |  |  |   /* (mod != 0 ? 1 : 0) */
 | 
      
         | 2148 |  |  |   return gen_rtx_IF_THEN_ELSE
 | 
      
         | 2149 |  |  |     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
 | 
      
         | 2150 |  |  |      const1_rtx, const0_rtx);
 | 
      
         | 2151 |  |  | }
 | 
      
         | 2152 |  |  |  
 | 
      
         | 2153 |  |  | /* Return the difference between the rounded and the truncated result
 | 
      
         | 2154 |  |  |    of a signed division by OP1 with remainder MOD.  Halfway cases are
 | 
      
         | 2155 |  |  |    rounded away from zero, rather than to the nearest even number.  */
 | 
      
         | 2156 |  |  | static rtx
 | 
      
         | 2157 |  |  | round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
 | 
      
         | 2158 |  |  | {
 | 
      
         | 2159 |  |  |   /* (abs (mod) >= abs (op1) - abs (mod)
 | 
      
         | 2160 |  |  |       ? (op1 / mod > 0 ? 1 : -1)
 | 
      
         | 2161 |  |  |       : 0) */
 | 
      
         | 2162 |  |  |   return gen_rtx_IF_THEN_ELSE
 | 
      
         | 2163 |  |  |     (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
 | 
      
         | 2164 |  |  |                        gen_rtx_MINUS (mode,
 | 
      
         | 2165 |  |  |                                       gen_rtx_ABS (mode, op1),
 | 
      
         | 2166 |  |  |                                       gen_rtx_ABS (mode, mod))),
 | 
      
         | 2167 |  |  |      gen_rtx_IF_THEN_ELSE
 | 
      
         | 2168 |  |  |      (mode, gen_rtx_GT (BImode,
 | 
      
         | 2169 |  |  |                         gen_rtx_DIV (mode, op1, mod),
 | 
      
         | 2170 |  |  |                         const0_rtx),
 | 
      
         | 2171 |  |  |       const1_rtx, constm1_rtx),
 | 
      
         | 2172 |  |  |      const0_rtx);
 | 
      
         | 2173 |  |  | }
 | 
      
         | 2174 |  |  |  
 | 
      
         | 2175 |  |  | /* Return the difference between the rounded and the truncated result
 | 
      
         | 2176 |  |  |    of a unsigned division by OP1 with remainder MOD.  Halfway cases
 | 
      
         | 2177 |  |  |    are rounded away from zero, rather than to the nearest even
 | 
      
         | 2178 |  |  |    number.  */
 | 
      
         | 2179 |  |  | static rtx
 | 
      
         | 2180 |  |  | round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
 | 
      
         | 2181 |  |  | {
 | 
      
         | 2182 |  |  |   /* (mod >= op1 - mod ? 1 : 0) */
 | 
      
         | 2183 |  |  |   return gen_rtx_IF_THEN_ELSE
 | 
      
         | 2184 |  |  |     (mode, gen_rtx_GE (BImode, mod,
 | 
      
         | 2185 |  |  |                        gen_rtx_MINUS (mode, op1, mod)),
 | 
      
         | 2186 |  |  |      const1_rtx, const0_rtx);
 | 
      
         | 2187 |  |  | }
 | 
      
         | 2188 |  |  |  
 | 
      
         | 2189 |  |  | /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
 | 
      
         | 2190 |  |  |    any rtl.  */
 | 
      
         | 2191 |  |  |  
 | 
      
         | 2192 |  |  | static rtx
 | 
      
         | 2193 |  |  | convert_debug_memory_address (enum machine_mode mode, rtx x)
 | 
      
         | 2194 |  |  | {
 | 
      
         | 2195 |  |  |   enum machine_mode xmode = GET_MODE (x);
 | 
      
         | 2196 |  |  |  
 | 
      
         | 2197 |  |  | #ifndef POINTERS_EXTEND_UNSIGNED
 | 
      
         | 2198 |  |  |   gcc_assert (mode == Pmode);
 | 
      
         | 2199 |  |  |   gcc_assert (xmode == mode || xmode == VOIDmode);
 | 
      
         | 2200 |  |  | #else
 | 
      
         | 2201 |  |  |   gcc_assert (mode == Pmode || mode == ptr_mode);
 | 
      
         | 2202 |  |  |  
 | 
      
         | 2203 |  |  |   if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
 | 
      
         | 2204 |  |  |     return x;
 | 
      
         | 2205 |  |  |  
 | 
      
         | 2206 |  |  |   if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
 | 
      
         | 2207 |  |  |     x = simplify_gen_subreg (mode, x, xmode,
 | 
      
         | 2208 |  |  |                              subreg_lowpart_offset
 | 
      
         | 2209 |  |  |                              (mode, xmode));
 | 
      
         | 2210 |  |  |   else if (POINTERS_EXTEND_UNSIGNED > 0)
 | 
      
         | 2211 |  |  |     x = gen_rtx_ZERO_EXTEND (mode, x);
 | 
      
         | 2212 |  |  |   else if (!POINTERS_EXTEND_UNSIGNED)
 | 
      
         | 2213 |  |  |     x = gen_rtx_SIGN_EXTEND (mode, x);
 | 
      
         | 2214 |  |  |   else
 | 
      
         | 2215 |  |  |     gcc_unreachable ();
 | 
      
         | 2216 |  |  | #endif /* POINTERS_EXTEND_UNSIGNED */
 | 
      
         | 2217 |  |  |  
 | 
      
         | 2218 |  |  |   return x;
 | 
      
         | 2219 |  |  | }
 | 
      
         | 2220 |  |  |  
 | 
      
         | 2221 |  |  | /* Return an RTX equivalent to the value of the tree expression
 | 
      
         | 2222 |  |  |    EXP.  */
 | 
      
         | 2223 |  |  |  
 | 
      
         | 2224 |  |  | static rtx
 | 
      
         | 2225 |  |  | expand_debug_expr (tree exp)
 | 
      
         | 2226 |  |  | {
 | 
      
         | 2227 |  |  |   rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
 | 
      
         | 2228 |  |  |   enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
 | 
      
         | 2229 |  |  |   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
 | 
      
         | 2230 |  |  |   addr_space_t as;
 | 
      
         | 2231 |  |  |   enum machine_mode address_mode;
 | 
      
         | 2232 |  |  |  
 | 
      
         | 2233 |  |  |   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
 | 
      
         | 2234 |  |  |     {
 | 
      
         | 2235 |  |  |     case tcc_expression:
 | 
      
         | 2236 |  |  |       switch (TREE_CODE (exp))
 | 
      
         | 2237 |  |  |         {
 | 
      
         | 2238 |  |  |         case COND_EXPR:
 | 
      
         | 2239 |  |  |         case DOT_PROD_EXPR:
 | 
      
         | 2240 |  |  |           goto ternary;
 | 
      
         | 2241 |  |  |  
 | 
      
         | 2242 |  |  |         case TRUTH_ANDIF_EXPR:
 | 
      
         | 2243 |  |  |         case TRUTH_ORIF_EXPR:
 | 
      
         | 2244 |  |  |         case TRUTH_AND_EXPR:
 | 
      
         | 2245 |  |  |         case TRUTH_OR_EXPR:
 | 
      
         | 2246 |  |  |         case TRUTH_XOR_EXPR:
 | 
      
         | 2247 |  |  |           goto binary;
 | 
      
         | 2248 |  |  |  
 | 
      
         | 2249 |  |  |         case TRUTH_NOT_EXPR:
 | 
      
         | 2250 |  |  |           goto unary;
 | 
      
         | 2251 |  |  |  
 | 
      
         | 2252 |  |  |         default:
 | 
      
         | 2253 |  |  |           break;
 | 
      
         | 2254 |  |  |         }
 | 
      
         | 2255 |  |  |       break;
 | 
      
         | 2256 |  |  |  
 | 
      
         | 2257 |  |  |     ternary:
 | 
      
         | 2258 |  |  |       op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
 | 
      
         | 2259 |  |  |       if (!op2)
 | 
      
         | 2260 |  |  |         return NULL_RTX;
 | 
      
         | 2261 |  |  |       /* Fall through.  */
 | 
      
         | 2262 |  |  |  
 | 
      
         | 2263 |  |  |     binary:
 | 
      
         | 2264 |  |  |     case tcc_binary:
 | 
      
         | 2265 |  |  |     case tcc_comparison:
 | 
      
         | 2266 |  |  |       op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
 | 
      
         | 2267 |  |  |       if (!op1)
 | 
      
         | 2268 |  |  |         return NULL_RTX;
 | 
      
         | 2269 |  |  |       /* Fall through.  */
 | 
      
         | 2270 |  |  |  
 | 
      
         | 2271 |  |  |     unary:
 | 
      
         | 2272 |  |  |     case tcc_unary:
 | 
      
         | 2273 |  |  |       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
 | 
      
         | 2274 |  |  |       if (!op0)
 | 
      
         | 2275 |  |  |         return NULL_RTX;
 | 
      
         | 2276 |  |  |       break;
 | 
      
         | 2277 |  |  |  
 | 
      
         | 2278 |  |  |     case tcc_type:
 | 
      
         | 2279 |  |  |     case tcc_statement:
 | 
      
         | 2280 |  |  |       gcc_unreachable ();
 | 
      
         | 2281 |  |  |  
 | 
      
         | 2282 |  |  |     case tcc_constant:
 | 
      
         | 2283 |  |  |     case tcc_exceptional:
 | 
      
         | 2284 |  |  |     case tcc_declaration:
 | 
      
         | 2285 |  |  |     case tcc_reference:
 | 
      
         | 2286 |  |  |     case tcc_vl_exp:
 | 
      
         | 2287 |  |  |       break;
 | 
      
         | 2288 |  |  |     }
 | 
      
         | 2289 |  |  |  
 | 
      
         | 2290 |  |  |   switch (TREE_CODE (exp))
 | 
      
         | 2291 |  |  |     {
 | 
      
         | 2292 |  |  |     case STRING_CST:
 | 
      
         | 2293 |  |  |       if (!lookup_constant_def (exp))
 | 
      
         | 2294 |  |  |         {
 | 
      
         | 2295 |  |  |           if (strlen (TREE_STRING_POINTER (exp)) + 1
 | 
      
         | 2296 |  |  |               != (size_t) TREE_STRING_LENGTH (exp))
 | 
      
         | 2297 |  |  |             return NULL_RTX;
 | 
      
         | 2298 |  |  |           op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
 | 
      
         | 2299 |  |  |           op0 = gen_rtx_MEM (BLKmode, op0);
 | 
      
         | 2300 |  |  |           set_mem_attributes (op0, exp, 0);
 | 
      
         | 2301 |  |  |           return op0;
 | 
      
         | 2302 |  |  |         }
 | 
      
         | 2303 |  |  |       /* Fall through...  */
 | 
      
         | 2304 |  |  |  
 | 
      
         | 2305 |  |  |     case INTEGER_CST:
 | 
      
         | 2306 |  |  |     case REAL_CST:
 | 
      
         | 2307 |  |  |     case FIXED_CST:
 | 
      
         | 2308 |  |  |       op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
 | 
      
         | 2309 |  |  |       return op0;
 | 
      
         | 2310 |  |  |  
 | 
      
         | 2311 |  |  |     case COMPLEX_CST:
 | 
      
         | 2312 |  |  |       gcc_assert (COMPLEX_MODE_P (mode));
 | 
      
         | 2313 |  |  |       op0 = expand_debug_expr (TREE_REALPART (exp));
 | 
      
         | 2314 |  |  |       op1 = expand_debug_expr (TREE_IMAGPART (exp));
 | 
      
         | 2315 |  |  |       return gen_rtx_CONCAT (mode, op0, op1);
 | 
      
         | 2316 |  |  |  
 | 
      
         | 2317 |  |  |     case DEBUG_EXPR_DECL:
 | 
      
         | 2318 |  |  |       op0 = DECL_RTL_IF_SET (exp);
 | 
      
         | 2319 |  |  |  
 | 
      
         | 2320 |  |  |       if (op0)
 | 
      
         | 2321 |  |  |         return op0;
 | 
      
         | 2322 |  |  |  
 | 
      
         | 2323 |  |  |       op0 = gen_rtx_DEBUG_EXPR (mode);
 | 
      
         | 2324 |  |  |       DEBUG_EXPR_TREE_DECL (op0) = exp;
 | 
      
         | 2325 |  |  |       SET_DECL_RTL (exp, op0);
 | 
      
         | 2326 |  |  |  
 | 
      
         | 2327 |  |  |       return op0;
 | 
      
         | 2328 |  |  |  
 | 
      
         | 2329 |  |  |     case VAR_DECL:
 | 
      
         | 2330 |  |  |     case PARM_DECL:
 | 
      
         | 2331 |  |  |     case FUNCTION_DECL:
 | 
      
         | 2332 |  |  |     case LABEL_DECL:
 | 
      
         | 2333 |  |  |     case CONST_DECL:
 | 
      
         | 2334 |  |  |     case RESULT_DECL:
 | 
      
         | 2335 |  |  |       op0 = DECL_RTL_IF_SET (exp);
 | 
      
         | 2336 |  |  |  
 | 
      
         | 2337 |  |  |       /* This decl was probably optimized away.  */
 | 
      
         | 2338 |  |  |       if (!op0)
 | 
      
         | 2339 |  |  |         {
 | 
      
         | 2340 |  |  |           if (TREE_CODE (exp) != VAR_DECL
 | 
      
         | 2341 |  |  |               || DECL_EXTERNAL (exp)
 | 
      
         | 2342 |  |  |               || !TREE_STATIC (exp)
 | 
      
         | 2343 |  |  |               || !DECL_NAME (exp)
 | 
      
         | 2344 |  |  |               || DECL_HARD_REGISTER (exp)
 | 
      
         | 2345 |  |  |               || mode == VOIDmode)
 | 
      
         | 2346 |  |  |             return NULL;
 | 
      
         | 2347 |  |  |  
 | 
      
         | 2348 |  |  |           op0 = make_decl_rtl_for_debug (exp);
 | 
      
         | 2349 |  |  |           if (!MEM_P (op0)
 | 
      
         | 2350 |  |  |               || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
 | 
      
         | 2351 |  |  |               || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
 | 
      
         | 2352 |  |  |             return NULL;
 | 
      
         | 2353 |  |  |         }
 | 
      
         | 2354 |  |  |       else
 | 
      
         | 2355 |  |  |         op0 = copy_rtx (op0);
 | 
      
         | 2356 |  |  |  
 | 
      
         | 2357 |  |  |       if (GET_MODE (op0) == BLKmode
 | 
      
         | 2358 |  |  |           /* If op0 is not BLKmode, but BLKmode is, adjust_mode
 | 
      
         | 2359 |  |  |              below would ICE.  While it is likely a FE bug,
 | 
      
         | 2360 |  |  |              try to be robust here.  See PR43166.  */
 | 
      
         | 2361 |  |  |           || mode == BLKmode
 | 
      
         | 2362 |  |  |           || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
 | 
      
         | 2363 |  |  |         {
 | 
      
         | 2364 |  |  |           gcc_assert (MEM_P (op0));
 | 
      
         | 2365 |  |  |           op0 = adjust_address_nv (op0, mode, 0);
 | 
      
         | 2366 |  |  |           return op0;
 | 
      
         | 2367 |  |  |         }
 | 
      
         | 2368 |  |  |  
 | 
      
         | 2369 |  |  |       /* Fall through.  */
 | 
      
         | 2370 |  |  |  
 | 
      
         | 2371 |  |  |     adjust_mode:
 | 
      
         | 2372 |  |  |     case PAREN_EXPR:
 | 
      
         | 2373 |  |  |     case NOP_EXPR:
 | 
      
         | 2374 |  |  |     case CONVERT_EXPR:
 | 
      
         | 2375 |  |  |       {
 | 
      
         | 2376 |  |  |         enum machine_mode inner_mode = GET_MODE (op0);
 | 
      
         | 2377 |  |  |  
 | 
      
         | 2378 |  |  |         if (mode == inner_mode)
 | 
      
         | 2379 |  |  |           return op0;
 | 
      
         | 2380 |  |  |  
 | 
      
         | 2381 |  |  |         if (inner_mode == VOIDmode)
 | 
      
         | 2382 |  |  |           {
 | 
      
         | 2383 |  |  |             if (TREE_CODE (exp) == SSA_NAME)
 | 
      
         | 2384 |  |  |               inner_mode = TYPE_MODE (TREE_TYPE (exp));
 | 
      
         | 2385 |  |  |             else
 | 
      
         | 2386 |  |  |               inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
 | 
      
         | 2387 |  |  |             if (mode == inner_mode)
 | 
      
         | 2388 |  |  |               return op0;
 | 
      
         | 2389 |  |  |           }
 | 
      
         | 2390 |  |  |  
 | 
      
         | 2391 |  |  |         if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
 | 
      
         | 2392 |  |  |           {
 | 
      
         | 2393 |  |  |             if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
 | 
      
         | 2394 |  |  |               op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
 | 
      
         | 2395 |  |  |             else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
 | 
      
         | 2396 |  |  |               op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
 | 
      
         | 2397 |  |  |             else
 | 
      
         | 2398 |  |  |               op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
 | 
      
         | 2399 |  |  |           }
 | 
      
         | 2400 |  |  |         else if (FLOAT_MODE_P (mode))
 | 
      
         | 2401 |  |  |           {
 | 
      
         | 2402 |  |  |             gcc_assert (TREE_CODE (exp) != SSA_NAME);
 | 
      
         | 2403 |  |  |             if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
 | 
      
         | 2404 |  |  |               op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
 | 
      
         | 2405 |  |  |             else
 | 
      
         | 2406 |  |  |               op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
 | 
      
         | 2407 |  |  |           }
 | 
      
         | 2408 |  |  |         else if (FLOAT_MODE_P (inner_mode))
 | 
      
         | 2409 |  |  |           {
 | 
      
         | 2410 |  |  |             if (unsignedp)
 | 
      
         | 2411 |  |  |               op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
 | 
      
         | 2412 |  |  |             else
 | 
      
         | 2413 |  |  |               op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
 | 
      
         | 2414 |  |  |           }
 | 
      
         | 2415 |  |  |         else if (CONSTANT_P (op0)
 | 
      
         | 2416 |  |  |                  || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
 | 
      
         | 2417 |  |  |           op0 = simplify_gen_subreg (mode, op0, inner_mode,
 | 
      
         | 2418 |  |  |                                      subreg_lowpart_offset (mode,
 | 
      
         | 2419 |  |  |                                                             inner_mode));
 | 
      
         | 2420 |  |  |         else if (unsignedp)
 | 
      
         | 2421 |  |  |           op0 = gen_rtx_ZERO_EXTEND (mode, op0);
 | 
      
         | 2422 |  |  |         else
 | 
      
         | 2423 |  |  |           op0 = gen_rtx_SIGN_EXTEND (mode, op0);
 | 
      
         | 2424 |  |  |  
 | 
      
         | 2425 |  |  |         return op0;
 | 
      
         | 2426 |  |  |       }
 | 
      
         | 2427 |  |  |  
 | 
      
         | 2428 |  |  |     case INDIRECT_REF:
 | 
      
         | 2429 |  |  |     case ALIGN_INDIRECT_REF:
 | 
      
         | 2430 |  |  |     case MISALIGNED_INDIRECT_REF:
 | 
      
         | 2431 |  |  |       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
 | 
      
         | 2432 |  |  |       if (!op0)
 | 
      
         | 2433 |  |  |         return NULL;
 | 
      
         | 2434 |  |  |  
 | 
      
         | 2435 |  |  |       if (POINTER_TYPE_P (TREE_TYPE (exp)))
 | 
      
         | 2436 |  |  |         {
 | 
      
         | 2437 |  |  |           as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
 | 
      
         | 2438 |  |  |           address_mode = targetm.addr_space.address_mode (as);
 | 
      
         | 2439 |  |  |         }
 | 
      
         | 2440 |  |  |       else
 | 
      
         | 2441 |  |  |         {
 | 
      
         | 2442 |  |  |           as = ADDR_SPACE_GENERIC;
 | 
      
         | 2443 |  |  |           address_mode = Pmode;
 | 
      
         | 2444 |  |  |         }
 | 
      
         | 2445 |  |  |  
 | 
      
         | 2446 |  |  |       if (TREE_CODE (exp) == ALIGN_INDIRECT_REF)
 | 
      
         | 2447 |  |  |         {
 | 
      
         | 2448 |  |  |           int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp));
 | 
      
         | 2449 |  |  |           op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align));
 | 
      
         | 2450 |  |  |         }
 | 
      
         | 2451 |  |  |  
 | 
      
         | 2452 |  |  |       op0 = gen_rtx_MEM (mode, op0);
 | 
      
         | 2453 |  |  |  
 | 
      
         | 2454 |  |  |       set_mem_attributes (op0, exp, 0);
 | 
      
         | 2455 |  |  |       set_mem_addr_space (op0, as);
 | 
      
         | 2456 |  |  |  
 | 
      
         | 2457 |  |  |       return op0;
 | 
      
         | 2458 |  |  |  
 | 
      
         | 2459 |  |  |     case TARGET_MEM_REF:
 | 
      
         | 2460 |  |  |       if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp)))
 | 
      
         | 2461 |  |  |         return NULL;
 | 
      
         | 2462 |  |  |  
 | 
      
         | 2463 |  |  |       op0 = expand_debug_expr
 | 
      
         | 2464 |  |  |             (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
 | 
      
         | 2465 |  |  |       if (!op0)
 | 
      
         | 2466 |  |  |         return NULL;
 | 
      
         | 2467 |  |  |  
 | 
      
         | 2468 |  |  |       as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
 | 
      
         | 2469 |  |  |  
 | 
      
         | 2470 |  |  |       op0 = gen_rtx_MEM (mode, op0);
 | 
      
         | 2471 |  |  |  
 | 
      
         | 2472 |  |  |       set_mem_attributes (op0, exp, 0);
 | 
      
         | 2473 |  |  |       set_mem_addr_space (op0, as);
 | 
      
         | 2474 |  |  |  
 | 
      
         | 2475 |  |  |       return op0;
 | 
      
         | 2476 |  |  |  
 | 
      
         | 2477 |  |  |     case ARRAY_REF:
 | 
      
         | 2478 |  |  |     case ARRAY_RANGE_REF:
 | 
      
         | 2479 |  |  |     case COMPONENT_REF:
 | 
      
         | 2480 |  |  |     case BIT_FIELD_REF:
 | 
      
         | 2481 |  |  |     case REALPART_EXPR:
 | 
      
         | 2482 |  |  |     case IMAGPART_EXPR:
 | 
      
         | 2483 |  |  |     case VIEW_CONVERT_EXPR:
 | 
      
         | 2484 |  |  |       {
 | 
      
         | 2485 |  |  |         enum machine_mode mode1;
 | 
      
         | 2486 |  |  |         HOST_WIDE_INT bitsize, bitpos;
 | 
      
         | 2487 |  |  |         tree offset;
 | 
      
         | 2488 |  |  |         int volatilep = 0;
 | 
      
         | 2489 |  |  |         tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
 | 
      
         | 2490 |  |  |                                         &mode1, &unsignedp, &volatilep, false);
 | 
      
         | 2491 |  |  |         rtx orig_op0;
 | 
      
         | 2492 |  |  |  
 | 
      
         | 2493 |  |  |         if (bitsize == 0)
 | 
      
         | 2494 |  |  |           return NULL;
 | 
      
         | 2495 |  |  |  
 | 
      
         | 2496 |  |  |         orig_op0 = op0 = expand_debug_expr (tem);
 | 
      
         | 2497 |  |  |  
 | 
      
         | 2498 |  |  |         if (!op0)
 | 
      
         | 2499 |  |  |           return NULL;
 | 
      
         | 2500 |  |  |  
 | 
      
         | 2501 |  |  |         if (offset)
 | 
      
         | 2502 |  |  |           {
 | 
      
         | 2503 |  |  |             enum machine_mode addrmode, offmode;
 | 
      
         | 2504 |  |  |  
 | 
      
         | 2505 |  |  |             if (!MEM_P (op0))
 | 
      
         | 2506 |  |  |               return NULL;
 | 
      
         | 2507 |  |  |  
 | 
      
         | 2508 |  |  |             op0 = XEXP (op0, 0);
 | 
      
         | 2509 |  |  |             addrmode = GET_MODE (op0);
 | 
      
         | 2510 |  |  |             if (addrmode == VOIDmode)
 | 
      
         | 2511 |  |  |               addrmode = Pmode;
 | 
      
         | 2512 |  |  |  
 | 
      
         | 2513 |  |  |             op1 = expand_debug_expr (offset);
 | 
      
         | 2514 |  |  |             if (!op1)
 | 
      
         | 2515 |  |  |               return NULL;
 | 
      
         | 2516 |  |  |  
 | 
      
         | 2517 |  |  |             offmode = GET_MODE (op1);
 | 
      
         | 2518 |  |  |             if (offmode == VOIDmode)
 | 
      
         | 2519 |  |  |               offmode = TYPE_MODE (TREE_TYPE (offset));
 | 
      
         | 2520 |  |  |  
 | 
      
         | 2521 |  |  |             if (addrmode != offmode)
 | 
      
         | 2522 |  |  |               op1 = simplify_gen_subreg (addrmode, op1, offmode,
 | 
      
         | 2523 |  |  |                                          subreg_lowpart_offset (addrmode,
 | 
      
         | 2524 |  |  |                                                                 offmode));
 | 
      
         | 2525 |  |  |  
 | 
      
         | 2526 |  |  |             /* Don't use offset_address here, we don't need a
 | 
      
         | 2527 |  |  |                recognizable address, and we don't want to generate
 | 
      
         | 2528 |  |  |                code.  */
 | 
      
         | 2529 |  |  |             op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
 | 
      
         | 2530 |  |  |           }
 | 
      
         | 2531 |  |  |  
 | 
      
         | 2532 |  |  |         if (MEM_P (op0))
 | 
      
         | 2533 |  |  |           {
 | 
      
         | 2534 |  |  |             if (mode1 == VOIDmode)
 | 
      
         | 2535 |  |  |               /* Bitfield.  */
 | 
      
         | 2536 |  |  |               mode1 = smallest_mode_for_size (bitsize, MODE_INT);
 | 
      
         | 2537 |  |  |             if (bitpos >= BITS_PER_UNIT)
 | 
      
         | 2538 |  |  |               {
 | 
      
         | 2539 |  |  |                 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
 | 
      
         | 2540 |  |  |                 bitpos %= BITS_PER_UNIT;
 | 
      
         | 2541 |  |  |               }
 | 
      
         | 2542 |  |  |             else if (bitpos < 0)
 | 
      
         | 2543 |  |  |               {
 | 
      
         | 2544 |  |  |                 HOST_WIDE_INT units
 | 
      
         | 2545 |  |  |                   = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
 | 
      
         | 2546 |  |  |                 op0 = adjust_address_nv (op0, mode1, units);
 | 
      
         | 2547 |  |  |                 bitpos += units * BITS_PER_UNIT;
 | 
      
         | 2548 |  |  |               }
 | 
      
         | 2549 |  |  |             else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
 | 
      
         | 2550 |  |  |               op0 = adjust_address_nv (op0, mode, 0);
 | 
      
         | 2551 |  |  |             else if (GET_MODE (op0) != mode1)
 | 
      
         | 2552 |  |  |               op0 = adjust_address_nv (op0, mode1, 0);
 | 
      
         | 2553 |  |  |             else
 | 
      
         | 2554 |  |  |               op0 = copy_rtx (op0);
 | 
      
         | 2555 |  |  |             if (op0 == orig_op0)
 | 
      
         | 2556 |  |  |               op0 = shallow_copy_rtx (op0);
 | 
      
         | 2557 |  |  |             set_mem_attributes (op0, exp, 0);
 | 
      
         | 2558 |  |  |           }
 | 
      
         | 2559 |  |  |  
 | 
      
         | 2560 |  |  |         if (bitpos == 0 && mode == GET_MODE (op0))
 | 
      
         | 2561 |  |  |           return op0;
 | 
      
         | 2562 |  |  |  
 | 
      
         | 2563 |  |  |         if (bitpos < 0)
 | 
      
         | 2564 |  |  |           return NULL;
 | 
      
         | 2565 |  |  |  
 | 
      
         | 2566 |  |  |         if (GET_MODE (op0) == BLKmode)
 | 
      
         | 2567 |  |  |           return NULL;
 | 
      
         | 2568 |  |  |  
 | 
      
         | 2569 |  |  |         if ((bitpos % BITS_PER_UNIT) == 0
 | 
      
         | 2570 |  |  |             && bitsize == GET_MODE_BITSIZE (mode1))
 | 
      
         | 2571 |  |  |           {
 | 
      
         | 2572 |  |  |             enum machine_mode opmode = GET_MODE (op0);
 | 
      
         | 2573 |  |  |  
 | 
      
         | 2574 |  |  |             if (opmode == VOIDmode)
 | 
      
         | 2575 |  |  |               opmode = mode1;
 | 
      
         | 2576 |  |  |  
 | 
      
         | 2577 |  |  |             /* This condition may hold if we're expanding the address
 | 
      
         | 2578 |  |  |                right past the end of an array that turned out not to
 | 
      
         | 2579 |  |  |                be addressable (i.e., the address was only computed in
 | 
      
         | 2580 |  |  |                debug stmts).  The gen_subreg below would rightfully
 | 
      
         | 2581 |  |  |                crash, and the address doesn't really exist, so just
 | 
      
         | 2582 |  |  |                drop it.  */
 | 
      
         | 2583 |  |  |             if (bitpos >= GET_MODE_BITSIZE (opmode))
 | 
      
         | 2584 |  |  |               return NULL;
 | 
      
         | 2585 |  |  |  
 | 
      
         | 2586 |  |  |             if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
 | 
      
         | 2587 |  |  |               return simplify_gen_subreg (mode, op0, opmode,
 | 
      
         | 2588 |  |  |                                           bitpos / BITS_PER_UNIT);
 | 
      
         | 2589 |  |  |           }
 | 
      
         | 2590 |  |  |  
 | 
      
         | 2591 |  |  |         return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
 | 
      
         | 2592 |  |  |                                      && TYPE_UNSIGNED (TREE_TYPE (exp))
 | 
      
         | 2593 |  |  |                                      ? SIGN_EXTRACT
 | 
      
         | 2594 |  |  |                                      : ZERO_EXTRACT, mode,
 | 
      
         | 2595 |  |  |                                      GET_MODE (op0) != VOIDmode
 | 
      
         | 2596 |  |  |                                      ? GET_MODE (op0) : mode1,
 | 
      
         | 2597 |  |  |                                      op0, GEN_INT (bitsize), GEN_INT (bitpos));
 | 
      
         | 2598 |  |  |       }
 | 
      
         | 2599 |  |  |  
 | 
      
         | 2600 |  |  |     case ABS_EXPR:
 | 
      
         | 2601 |  |  |       return gen_rtx_ABS (mode, op0);
 | 
      
         | 2602 |  |  |  
 | 
      
         | 2603 |  |  |     case NEGATE_EXPR:
 | 
      
         | 2604 |  |  |       return gen_rtx_NEG (mode, op0);
 | 
      
         | 2605 |  |  |  
 | 
      
         | 2606 |  |  |     case BIT_NOT_EXPR:
 | 
      
         | 2607 |  |  |       return gen_rtx_NOT (mode, op0);
 | 
      
         | 2608 |  |  |  
 | 
      
         | 2609 |  |  |     case FLOAT_EXPR:
 | 
      
         | 2610 |  |  |       if (unsignedp)
 | 
      
         | 2611 |  |  |         return gen_rtx_UNSIGNED_FLOAT (mode, op0);
 | 
      
         | 2612 |  |  |       else
 | 
      
         | 2613 |  |  |         return gen_rtx_FLOAT (mode, op0);
 | 
      
         | 2614 |  |  |  
 | 
      
         | 2615 |  |  |     case FIX_TRUNC_EXPR:
 | 
      
         | 2616 |  |  |       if (unsignedp)
 | 
      
         | 2617 |  |  |         return gen_rtx_UNSIGNED_FIX (mode, op0);
 | 
      
         | 2618 |  |  |       else
 | 
      
         | 2619 |  |  |         return gen_rtx_FIX (mode, op0);
 | 
      
         | 2620 |  |  |  
 | 
      
         | 2621 |  |  |     case POINTER_PLUS_EXPR:
 | 
      
         | 2622 |  |  |       /* For the rare target where pointers are not the same size as
 | 
      
         | 2623 |  |  |          size_t, we need to check for mis-matched modes and correct
 | 
      
         | 2624 |  |  |          the addend.  */
 | 
      
         | 2625 |  |  |       if (op0 && op1
 | 
      
         | 2626 |  |  |           && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
 | 
      
         | 2627 |  |  |           && GET_MODE (op0) != GET_MODE (op1))
 | 
      
         | 2628 |  |  |         {
 | 
      
         | 2629 |  |  |           if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
 | 
      
         | 2630 |  |  |             op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
 | 
      
         | 2631 |  |  |           else
 | 
      
         | 2632 |  |  |             /* We always sign-extend, regardless of the signedness of
 | 
      
         | 2633 |  |  |                the operand, because the operand is always unsigned
 | 
      
         | 2634 |  |  |                here even if the original C expression is signed.  */
 | 
      
         | 2635 |  |  |             op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
 | 
      
         | 2636 |  |  |         }
 | 
      
         | 2637 |  |  |       /* Fall through.  */
 | 
      
         | 2638 |  |  |     case PLUS_EXPR:
 | 
      
         | 2639 |  |  |       return gen_rtx_PLUS (mode, op0, op1);
 | 
      
         | 2640 |  |  |  
 | 
      
         | 2641 |  |  |     case MINUS_EXPR:
 | 
      
         | 2642 |  |  |       return gen_rtx_MINUS (mode, op0, op1);
 | 
      
         | 2643 |  |  |  
 | 
      
         | 2644 |  |  |     case MULT_EXPR:
 | 
      
         | 2645 |  |  |       return gen_rtx_MULT (mode, op0, op1);
 | 
      
         | 2646 |  |  |  
 | 
      
         | 2647 |  |  |     case RDIV_EXPR:
 | 
      
         | 2648 |  |  |     case TRUNC_DIV_EXPR:
 | 
      
         | 2649 |  |  |     case EXACT_DIV_EXPR:
 | 
      
         | 2650 |  |  |       if (unsignedp)
 | 
      
         | 2651 |  |  |         return gen_rtx_UDIV (mode, op0, op1);
 | 
      
         | 2652 |  |  |       else
 | 
      
         | 2653 |  |  |         return gen_rtx_DIV (mode, op0, op1);
 | 
      
         | 2654 |  |  |  
 | 
      
         | 2655 |  |  |     case TRUNC_MOD_EXPR:
 | 
      
         | 2656 |  |  |       if (unsignedp)
 | 
      
         | 2657 |  |  |         return gen_rtx_UMOD (mode, op0, op1);
 | 
      
         | 2658 |  |  |       else
 | 
      
         | 2659 |  |  |         return gen_rtx_MOD (mode, op0, op1);
 | 
      
         | 2660 |  |  |  
 | 
      
         | 2661 |  |  |     case FLOOR_DIV_EXPR:
 | 
      
         | 2662 |  |  |       if (unsignedp)
 | 
      
         | 2663 |  |  |         return gen_rtx_UDIV (mode, op0, op1);
 | 
      
         | 2664 |  |  |       else
 | 
      
         | 2665 |  |  |         {
 | 
      
         | 2666 |  |  |           rtx div = gen_rtx_DIV (mode, op0, op1);
 | 
      
         | 2667 |  |  |           rtx mod = gen_rtx_MOD (mode, op0, op1);
 | 
      
         | 2668 |  |  |           rtx adj = floor_sdiv_adjust (mode, mod, op1);
 | 
      
         | 2669 |  |  |           return gen_rtx_PLUS (mode, div, adj);
 | 
      
         | 2670 |  |  |         }
 | 
      
         | 2671 |  |  |  
 | 
      
         | 2672 |  |  |     case FLOOR_MOD_EXPR:
 | 
      
         | 2673 |  |  |       if (unsignedp)
 | 
      
         | 2674 |  |  |         return gen_rtx_UMOD (mode, op0, op1);
 | 
      
         | 2675 |  |  |       else
 | 
      
         | 2676 |  |  |         {
 | 
      
         | 2677 |  |  |           rtx mod = gen_rtx_MOD (mode, op0, op1);
 | 
      
         | 2678 |  |  |           rtx adj = floor_sdiv_adjust (mode, mod, op1);
 | 
      
         | 2679 |  |  |           adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
 | 
      
         | 2680 |  |  |           return gen_rtx_PLUS (mode, mod, adj);
 | 
      
         | 2681 |  |  |         }
 | 
      
         | 2682 |  |  |  
 | 
      
         | 2683 |  |  |     case CEIL_DIV_EXPR:
 | 
      
         | 2684 |  |  |       if (unsignedp)
 | 
      
         | 2685 |  |  |         {
 | 
      
         | 2686 |  |  |           rtx div = gen_rtx_UDIV (mode, op0, op1);
 | 
      
         | 2687 |  |  |           rtx mod = gen_rtx_UMOD (mode, op0, op1);
 | 
      
         | 2688 |  |  |           rtx adj = ceil_udiv_adjust (mode, mod, op1);
 | 
      
         | 2689 |  |  |           return gen_rtx_PLUS (mode, div, adj);
 | 
      
         | 2690 |  |  |         }
 | 
      
         | 2691 |  |  |       else
 | 
      
         | 2692 |  |  |         {
 | 
      
         | 2693 |  |  |           rtx div = gen_rtx_DIV (mode, op0, op1);
 | 
      
         | 2694 |  |  |           rtx mod = gen_rtx_MOD (mode, op0, op1);
 | 
      
         | 2695 |  |  |           rtx adj = ceil_sdiv_adjust (mode, mod, op1);
 | 
      
         | 2696 |  |  |           return gen_rtx_PLUS (mode, div, adj);
 | 
      
         | 2697 |  |  |         }
 | 
      
         | 2698 |  |  |  
 | 
      
         | 2699 |  |  |     case CEIL_MOD_EXPR:
 | 
      
         | 2700 |  |  |       if (unsignedp)
 | 
      
         | 2701 |  |  |         {
 | 
      
         | 2702 |  |  |           rtx mod = gen_rtx_UMOD (mode, op0, op1);
 | 
      
         | 2703 |  |  |           rtx adj = ceil_udiv_adjust (mode, mod, op1);
 | 
      
         | 2704 |  |  |           adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
 | 
      
         | 2705 |  |  |           return gen_rtx_PLUS (mode, mod, adj);
 | 
      
         | 2706 |  |  |         }
 | 
      
         | 2707 |  |  |       else
 | 
      
         | 2708 |  |  |         {
 | 
      
         | 2709 |  |  |           rtx mod = gen_rtx_MOD (mode, op0, op1);
 | 
      
         | 2710 |  |  |           rtx adj = ceil_sdiv_adjust (mode, mod, op1);
 | 
      
         | 2711 |  |  |           adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
 | 
      
         | 2712 |  |  |           return gen_rtx_PLUS (mode, mod, adj);
 | 
      
         | 2713 |  |  |         }
 | 
      
         | 2714 |  |  |  
 | 
      
         | 2715 |  |  |     case ROUND_DIV_EXPR:
 | 
      
         | 2716 |  |  |       if (unsignedp)
 | 
      
         | 2717 |  |  |         {
 | 
      
         | 2718 |  |  |           rtx div = gen_rtx_UDIV (mode, op0, op1);
 | 
      
         | 2719 |  |  |           rtx mod = gen_rtx_UMOD (mode, op0, op1);
 | 
      
         | 2720 |  |  |           rtx adj = round_udiv_adjust (mode, mod, op1);
 | 
      
         | 2721 |  |  |           return gen_rtx_PLUS (mode, div, adj);
 | 
      
         | 2722 |  |  |         }
 | 
      
         | 2723 |  |  |       else
 | 
      
         | 2724 |  |  |         {
 | 
      
         | 2725 |  |  |           rtx div = gen_rtx_DIV (mode, op0, op1);
 | 
      
         | 2726 |  |  |           rtx mod = gen_rtx_MOD (mode, op0, op1);
 | 
      
         | 2727 |  |  |           rtx adj = round_sdiv_adjust (mode, mod, op1);
 | 
      
         | 2728 |  |  |           return gen_rtx_PLUS (mode, div, adj);
 | 
      
         | 2729 |  |  |         }
 | 
      
         | 2730 |  |  |  
 | 
      
         | 2731 |  |  |     case ROUND_MOD_EXPR:
 | 
      
         | 2732 |  |  |       if (unsignedp)
 | 
      
         | 2733 |  |  |         {
 | 
      
         | 2734 |  |  |           rtx mod = gen_rtx_UMOD (mode, op0, op1);
 | 
      
         | 2735 |  |  |           rtx adj = round_udiv_adjust (mode, mod, op1);
 | 
      
         | 2736 |  |  |           adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
 | 
      
         | 2737 |  |  |           return gen_rtx_PLUS (mode, mod, adj);
 | 
      
         | 2738 |  |  |         }
 | 
      
         | 2739 |  |  |       else
 | 
      
         | 2740 |  |  |         {
 | 
      
         | 2741 |  |  |           rtx mod = gen_rtx_MOD (mode, op0, op1);
 | 
      
         | 2742 |  |  |           rtx adj = round_sdiv_adjust (mode, mod, op1);
 | 
      
         | 2743 |  |  |           adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
 | 
      
         | 2744 |  |  |           return gen_rtx_PLUS (mode, mod, adj);
 | 
      
         | 2745 |  |  |         }
 | 
      
         | 2746 |  |  |  
 | 
      
         | 2747 |  |  |     case LSHIFT_EXPR:
 | 
      
         | 2748 |  |  |       return gen_rtx_ASHIFT (mode, op0, op1);
 | 
      
         | 2749 |  |  |  
 | 
      
         | 2750 |  |  |     case RSHIFT_EXPR:
 | 
      
         | 2751 |  |  |       if (unsignedp)
 | 
      
         | 2752 |  |  |         return gen_rtx_LSHIFTRT (mode, op0, op1);
 | 
      
         | 2753 |  |  |       else
 | 
      
         | 2754 |  |  |         return gen_rtx_ASHIFTRT (mode, op0, op1);
 | 
      
         | 2755 |  |  |  
 | 
      
         | 2756 |  |  |     case LROTATE_EXPR:
 | 
      
         | 2757 |  |  |       return gen_rtx_ROTATE (mode, op0, op1);
 | 
      
         | 2758 |  |  |  
 | 
      
         | 2759 |  |  |     case RROTATE_EXPR:
 | 
      
         | 2760 |  |  |       return gen_rtx_ROTATERT (mode, op0, op1);
 | 
      
         | 2761 |  |  |  
 | 
      
         | 2762 |  |  |     case MIN_EXPR:
 | 
      
         | 2763 |  |  |       if (unsignedp)
 | 
      
         | 2764 |  |  |         return gen_rtx_UMIN (mode, op0, op1);
 | 
      
         | 2765 |  |  |       else
 | 
      
         | 2766 |  |  |         return gen_rtx_SMIN (mode, op0, op1);
 | 
      
         | 2767 |  |  |  
 | 
      
         | 2768 |  |  |     case MAX_EXPR:
 | 
      
         | 2769 |  |  |       if (unsignedp)
 | 
      
         | 2770 |  |  |         return gen_rtx_UMAX (mode, op0, op1);
 | 
      
         | 2771 |  |  |       else
 | 
      
         | 2772 |  |  |         return gen_rtx_SMAX (mode, op0, op1);
 | 
      
         | 2773 |  |  |  
 | 
      
         | 2774 |  |  |     case BIT_AND_EXPR:
 | 
      
         | 2775 |  |  |     case TRUTH_AND_EXPR:
 | 
      
         | 2776 |  |  |       return gen_rtx_AND (mode, op0, op1);
 | 
      
         | 2777 |  |  |  
 | 
      
         | 2778 |  |  |     case BIT_IOR_EXPR:
 | 
      
         | 2779 |  |  |     case TRUTH_OR_EXPR:
 | 
      
         | 2780 |  |  |       return gen_rtx_IOR (mode, op0, op1);
 | 
      
         | 2781 |  |  |  
 | 
      
         | 2782 |  |  |     case BIT_XOR_EXPR:
 | 
      
         | 2783 |  |  |     case TRUTH_XOR_EXPR:
 | 
      
         | 2784 |  |  |       return gen_rtx_XOR (mode, op0, op1);
 | 
      
         | 2785 |  |  |  
 | 
      
         | 2786 |  |  |     case TRUTH_ANDIF_EXPR:
 | 
      
         | 2787 |  |  |       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
 | 
      
         | 2788 |  |  |  
 | 
      
         | 2789 |  |  |     case TRUTH_ORIF_EXPR:
 | 
      
         | 2790 |  |  |       return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
 | 
      
         | 2791 |  |  |  
 | 
      
         | 2792 |  |  |     case TRUTH_NOT_EXPR:
 | 
      
         | 2793 |  |  |       return gen_rtx_EQ (mode, op0, const0_rtx);
 | 
      
         | 2794 |  |  |  
 | 
      
         | 2795 |  |  |     case LT_EXPR:
 | 
      
         | 2796 |  |  |       if (unsignedp)
 | 
      
         | 2797 |  |  |         return gen_rtx_LTU (mode, op0, op1);
 | 
      
         | 2798 |  |  |       else
 | 
      
         | 2799 |  |  |         return gen_rtx_LT (mode, op0, op1);
 | 
      
         | 2800 |  |  |  
 | 
      
         | 2801 |  |  |     case LE_EXPR:
 | 
      
         | 2802 |  |  |       if (unsignedp)
 | 
      
         | 2803 |  |  |         return gen_rtx_LEU (mode, op0, op1);
 | 
      
         | 2804 |  |  |       else
 | 
      
         | 2805 |  |  |         return gen_rtx_LE (mode, op0, op1);
 | 
      
         | 2806 |  |  |  
 | 
      
         | 2807 |  |  |     case GT_EXPR:
 | 
      
         | 2808 |  |  |       if (unsignedp)
 | 
      
         | 2809 |  |  |         return gen_rtx_GTU (mode, op0, op1);
 | 
      
         | 2810 |  |  |       else
 | 
      
         | 2811 |  |  |         return gen_rtx_GT (mode, op0, op1);
 | 
      
         | 2812 |  |  |  
 | 
      
         | 2813 |  |  |     case GE_EXPR:
 | 
      
         | 2814 |  |  |       if (unsignedp)
 | 
      
         | 2815 |  |  |         return gen_rtx_GEU (mode, op0, op1);
 | 
      
         | 2816 |  |  |       else
 | 
      
         | 2817 |  |  |         return gen_rtx_GE (mode, op0, op1);
 | 
      
         | 2818 |  |  |  
 | 
      
         | 2819 |  |  |     case EQ_EXPR:
 | 
      
         | 2820 |  |  |       return gen_rtx_EQ (mode, op0, op1);
 | 
      
         | 2821 |  |  |  
 | 
      
         | 2822 |  |  |     case NE_EXPR:
 | 
      
         | 2823 |  |  |       return gen_rtx_NE (mode, op0, op1);
 | 
      
         | 2824 |  |  |  
 | 
      
         | 2825 |  |  |     case UNORDERED_EXPR:
 | 
      
         | 2826 |  |  |       return gen_rtx_UNORDERED (mode, op0, op1);
 | 
      
         | 2827 |  |  |  
 | 
      
         | 2828 |  |  |     case ORDERED_EXPR:
 | 
      
         | 2829 |  |  |       return gen_rtx_ORDERED (mode, op0, op1);
 | 
      
         | 2830 |  |  |  
 | 
      
         | 2831 |  |  |     case UNLT_EXPR:
 | 
      
         | 2832 |  |  |       return gen_rtx_UNLT (mode, op0, op1);
 | 
      
         | 2833 |  |  |  
 | 
      
         | 2834 |  |  |     case UNLE_EXPR:
 | 
      
         | 2835 |  |  |       return gen_rtx_UNLE (mode, op0, op1);
 | 
      
         | 2836 |  |  |  
 | 
      
         | 2837 |  |  |     case UNGT_EXPR:
 | 
      
         | 2838 |  |  |       return gen_rtx_UNGT (mode, op0, op1);
 | 
      
         | 2839 |  |  |  
 | 
      
         | 2840 |  |  |     case UNGE_EXPR:
 | 
      
         | 2841 |  |  |       return gen_rtx_UNGE (mode, op0, op1);
 | 
      
         | 2842 |  |  |  
 | 
      
         | 2843 |  |  |     case UNEQ_EXPR:
 | 
      
         | 2844 |  |  |       return gen_rtx_UNEQ (mode, op0, op1);
 | 
      
         | 2845 |  |  |  
 | 
      
         | 2846 |  |  |     case LTGT_EXPR:
 | 
      
         | 2847 |  |  |       return gen_rtx_LTGT (mode, op0, op1);
 | 
      
         | 2848 |  |  |  
 | 
      
         | 2849 |  |  |     case COND_EXPR:
 | 
      
         | 2850 |  |  |       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
 | 
      
         | 2851 |  |  |  
 | 
      
         | 2852 |  |  |     case COMPLEX_EXPR:
 | 
      
         | 2853 |  |  |       gcc_assert (COMPLEX_MODE_P (mode));
 | 
      
         | 2854 |  |  |       if (GET_MODE (op0) == VOIDmode)
 | 
      
         | 2855 |  |  |         op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
 | 
      
         | 2856 |  |  |       if (GET_MODE (op1) == VOIDmode)
 | 
      
         | 2857 |  |  |         op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
 | 
      
         | 2858 |  |  |       return gen_rtx_CONCAT (mode, op0, op1);
 | 
      
         | 2859 |  |  |  
 | 
      
         | 2860 |  |  |     case CONJ_EXPR:
 | 
      
         | 2861 |  |  |       if (GET_CODE (op0) == CONCAT)
 | 
      
         | 2862 |  |  |         return gen_rtx_CONCAT (mode, XEXP (op0, 0),
 | 
      
         | 2863 |  |  |                                gen_rtx_NEG (GET_MODE_INNER (mode),
 | 
      
         | 2864 |  |  |                                             XEXP (op0, 1)));
 | 
      
         | 2865 |  |  |       else
 | 
      
         | 2866 |  |  |         {
 | 
      
         | 2867 |  |  |           enum machine_mode imode = GET_MODE_INNER (mode);
 | 
      
         | 2868 |  |  |           rtx re, im;
 | 
      
         | 2869 |  |  |  
 | 
      
         | 2870 |  |  |           if (MEM_P (op0))
 | 
      
         | 2871 |  |  |             {
 | 
      
         | 2872 |  |  |               re = adjust_address_nv (op0, imode, 0);
 | 
      
         | 2873 |  |  |               im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
 | 
      
         | 2874 |  |  |             }
 | 
      
         | 2875 |  |  |           else
 | 
      
         | 2876 |  |  |             {
 | 
      
         | 2877 |  |  |               enum machine_mode ifmode = int_mode_for_mode (mode);
 | 
      
         | 2878 |  |  |               enum machine_mode ihmode = int_mode_for_mode (imode);
 | 
      
         | 2879 |  |  |               rtx halfsize;
 | 
      
         | 2880 |  |  |               if (ifmode == BLKmode || ihmode == BLKmode)
 | 
      
         | 2881 |  |  |                 return NULL;
 | 
      
         | 2882 |  |  |               halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
 | 
      
         | 2883 |  |  |               re = op0;
 | 
      
         | 2884 |  |  |               if (mode != ifmode)
 | 
      
         | 2885 |  |  |                 re = gen_rtx_SUBREG (ifmode, re, 0);
 | 
      
         | 2886 |  |  |               re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
 | 
      
         | 2887 |  |  |               if (imode != ihmode)
 | 
      
         | 2888 |  |  |                 re = gen_rtx_SUBREG (imode, re, 0);
 | 
      
         | 2889 |  |  |               im = copy_rtx (op0);
 | 
      
         | 2890 |  |  |               if (mode != ifmode)
 | 
      
         | 2891 |  |  |                 im = gen_rtx_SUBREG (ifmode, im, 0);
 | 
      
         | 2892 |  |  |               im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
 | 
      
         | 2893 |  |  |               if (imode != ihmode)
 | 
      
         | 2894 |  |  |                 im = gen_rtx_SUBREG (imode, im, 0);
 | 
      
         | 2895 |  |  |             }
 | 
      
         | 2896 |  |  |           im = gen_rtx_NEG (imode, im);
 | 
      
         | 2897 |  |  |           return gen_rtx_CONCAT (mode, re, im);
 | 
      
         | 2898 |  |  |         }
 | 
      
         | 2899 |  |  |  
 | 
      
         | 2900 |  |  |     case ADDR_EXPR:
 | 
      
         | 2901 |  |  |       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
 | 
      
         | 2902 |  |  |       if (!op0 || !MEM_P (op0))
 | 
      
         | 2903 |  |  |         return NULL;
 | 
      
         | 2904 |  |  |  
 | 
      
         | 2905 |  |  |       op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
 | 
      
         | 2906 |  |  |  
 | 
      
         | 2907 |  |  |       return op0;
 | 
      
         | 2908 |  |  |  
 | 
      
         | 2909 |  |  |     case VECTOR_CST:
 | 
      
         | 2910 |  |  |       exp = build_constructor_from_list (TREE_TYPE (exp),
 | 
      
         | 2911 |  |  |                                          TREE_VECTOR_CST_ELTS (exp));
 | 
      
         | 2912 |  |  |       /* Fall through.  */
 | 
      
         | 2913 |  |  |  
 | 
      
         | 2914 |  |  |     case CONSTRUCTOR:
 | 
      
         | 2915 |  |  |       if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
 | 
      
         | 2916 |  |  |         {
 | 
      
         | 2917 |  |  |           unsigned i;
 | 
      
         | 2918 |  |  |           tree val;
 | 
      
         | 2919 |  |  |  
 | 
      
         | 2920 |  |  |           op0 = gen_rtx_CONCATN
 | 
      
         | 2921 |  |  |             (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
 | 
      
         | 2922 |  |  |  
 | 
      
         | 2923 |  |  |           FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
 | 
      
         | 2924 |  |  |             {
 | 
      
         | 2925 |  |  |               op1 = expand_debug_expr (val);
 | 
      
         | 2926 |  |  |               if (!op1)
 | 
      
         | 2927 |  |  |                 return NULL;
 | 
      
         | 2928 |  |  |               XVECEXP (op0, 0, i) = op1;
 | 
      
         | 2929 |  |  |             }
 | 
      
         | 2930 |  |  |  
 | 
      
         | 2931 |  |  |           if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
 | 
      
         | 2932 |  |  |             {
 | 
      
         | 2933 |  |  |               op1 = expand_debug_expr
 | 
      
         | 2934 |  |  |                 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
 | 
      
         | 2935 |  |  |  
 | 
      
         | 2936 |  |  |               if (!op1)
 | 
      
         | 2937 |  |  |                 return NULL;
 | 
      
         | 2938 |  |  |  
 | 
      
         | 2939 |  |  |               for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
 | 
      
         | 2940 |  |  |                 XVECEXP (op0, 0, i) = op1;
 | 
      
         | 2941 |  |  |             }
 | 
      
         | 2942 |  |  |  
 | 
      
         | 2943 |  |  |           return op0;
 | 
      
         | 2944 |  |  |         }
 | 
      
         | 2945 |  |  |       else
 | 
      
         | 2946 |  |  |         goto flag_unsupported;
 | 
      
         | 2947 |  |  |  
 | 
      
         | 2948 |  |  |     case CALL_EXPR:
 | 
      
         | 2949 |  |  |       /* ??? Maybe handle some builtins?  */
 | 
      
         | 2950 |  |  |       return NULL;
 | 
      
         | 2951 |  |  |  
 | 
      
         | 2952 |  |  |     case SSA_NAME:
 | 
      
         | 2953 |  |  |       {
 | 
      
         | 2954 |  |  |         gimple g = get_gimple_for_ssa_name (exp);
 | 
      
         | 2955 |  |  |         if (g)
 | 
      
         | 2956 |  |  |           {
 | 
      
         | 2957 |  |  |             op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
 | 
      
         | 2958 |  |  |             if (!op0)
 | 
      
         | 2959 |  |  |               return NULL;
 | 
      
         | 2960 |  |  |           }
 | 
      
         | 2961 |  |  |         else
 | 
      
         | 2962 |  |  |           {
 | 
      
         | 2963 |  |  |             int part = var_to_partition (SA.map, exp);
 | 
      
         | 2964 |  |  |  
 | 
      
         | 2965 |  |  |             if (part == NO_PARTITION)
 | 
      
         | 2966 |  |  |               return NULL;
 | 
      
         | 2967 |  |  |  
 | 
      
         | 2968 |  |  |             gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
 | 
      
         | 2969 |  |  |  
 | 
      
         | 2970 |  |  |             op0 = SA.partition_to_pseudo[part];
 | 
      
         | 2971 |  |  |           }
 | 
      
         | 2972 |  |  |         goto adjust_mode;
 | 
      
         | 2973 |  |  |       }
 | 
      
         | 2974 |  |  |  
 | 
      
         | 2975 |  |  |     case ERROR_MARK:
 | 
      
         | 2976 |  |  |       return NULL;
 | 
      
         | 2977 |  |  |  
 | 
      
         | 2978 |  |  |     /* Vector stuff.  For most of the codes we don't have rtl codes.  */
 | 
      
         | 2979 |  |  |     case REALIGN_LOAD_EXPR:
 | 
      
         | 2980 |  |  |     case REDUC_MAX_EXPR:
 | 
      
         | 2981 |  |  |     case REDUC_MIN_EXPR:
 | 
      
         | 2982 |  |  |     case REDUC_PLUS_EXPR:
 | 
      
         | 2983 |  |  |     case VEC_COND_EXPR:
 | 
      
         | 2984 |  |  |     case VEC_EXTRACT_EVEN_EXPR:
 | 
      
         | 2985 |  |  |     case VEC_EXTRACT_ODD_EXPR:
 | 
      
         | 2986 |  |  |     case VEC_INTERLEAVE_HIGH_EXPR:
 | 
      
         | 2987 |  |  |     case VEC_INTERLEAVE_LOW_EXPR:
 | 
      
         | 2988 |  |  |     case VEC_LSHIFT_EXPR:
 | 
      
         | 2989 |  |  |     case VEC_PACK_FIX_TRUNC_EXPR:
 | 
      
         | 2990 |  |  |     case VEC_PACK_SAT_EXPR:
 | 
      
         | 2991 |  |  |     case VEC_PACK_TRUNC_EXPR:
 | 
      
         | 2992 |  |  |     case VEC_RSHIFT_EXPR:
 | 
      
         | 2993 |  |  |     case VEC_UNPACK_FLOAT_HI_EXPR:
 | 
      
         | 2994 |  |  |     case VEC_UNPACK_FLOAT_LO_EXPR:
 | 
      
         | 2995 |  |  |     case VEC_UNPACK_HI_EXPR:
 | 
      
         | 2996 |  |  |     case VEC_UNPACK_LO_EXPR:
 | 
      
         | 2997 |  |  |     case VEC_WIDEN_MULT_HI_EXPR:
 | 
      
         | 2998 |  |  |     case VEC_WIDEN_MULT_LO_EXPR:
 | 
      
         | 2999 |  |  |       return NULL;
 | 
      
         | 3000 |  |  |  
 | 
      
         | 3001 |  |  |    /* Misc codes.  */
 | 
      
         | 3002 |  |  |     case ADDR_SPACE_CONVERT_EXPR:
 | 
      
         | 3003 |  |  |     case FIXED_CONVERT_EXPR:
 | 
      
         | 3004 |  |  |     case OBJ_TYPE_REF:
 | 
      
         | 3005 |  |  |     case WITH_SIZE_EXPR:
 | 
      
         | 3006 |  |  |       return NULL;
 | 
      
         | 3007 |  |  |  
 | 
      
         | 3008 |  |  |     case DOT_PROD_EXPR:
 | 
      
         | 3009 |  |  |       if (SCALAR_INT_MODE_P (GET_MODE (op0))
 | 
      
         | 3010 |  |  |           && SCALAR_INT_MODE_P (mode))
 | 
      
         | 3011 |  |  |         {
 | 
      
         | 3012 |  |  |           if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
 | 
      
         | 3013 |  |  |             op0 = gen_rtx_ZERO_EXTEND (mode, op0);
 | 
      
         | 3014 |  |  |           else
 | 
      
         | 3015 |  |  |             op0 = gen_rtx_SIGN_EXTEND (mode, op0);
 | 
      
         | 3016 |  |  |           if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
 | 
      
         | 3017 |  |  |             op1 = gen_rtx_ZERO_EXTEND (mode, op1);
 | 
      
         | 3018 |  |  |           else
 | 
      
         | 3019 |  |  |             op1 = gen_rtx_SIGN_EXTEND (mode, op1);
 | 
      
         | 3020 |  |  |           op0 = gen_rtx_MULT (mode, op0, op1);
 | 
      
         | 3021 |  |  |           return gen_rtx_PLUS (mode, op0, op2);
 | 
      
         | 3022 |  |  |         }
 | 
      
         | 3023 |  |  |       return NULL;
 | 
      
         | 3024 |  |  |  
 | 
      
         | 3025 |  |  |     case WIDEN_MULT_EXPR:
 | 
      
         | 3026 |  |  |       if (SCALAR_INT_MODE_P (GET_MODE (op0))
 | 
      
         | 3027 |  |  |           && SCALAR_INT_MODE_P (mode))
 | 
      
         | 3028 |  |  |         {
 | 
      
         | 3029 |  |  |           if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
 | 
      
         | 3030 |  |  |             op0 = gen_rtx_ZERO_EXTEND (mode, op0);
 | 
      
         | 3031 |  |  |           else
 | 
      
         | 3032 |  |  |             op0 = gen_rtx_SIGN_EXTEND (mode, op0);
 | 
      
         | 3033 |  |  |           if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
 | 
      
         | 3034 |  |  |             op1 = gen_rtx_ZERO_EXTEND (mode, op1);
 | 
      
         | 3035 |  |  |           else
 | 
      
         | 3036 |  |  |             op1 = gen_rtx_SIGN_EXTEND (mode, op1);
 | 
      
         | 3037 |  |  |           return gen_rtx_MULT (mode, op0, op1);
 | 
      
         | 3038 |  |  |         }
 | 
      
         | 3039 |  |  |       return NULL;
 | 
      
         | 3040 |  |  |  
 | 
      
         | 3041 |  |  |     case WIDEN_SUM_EXPR:
 | 
      
         | 3042 |  |  |       if (SCALAR_INT_MODE_P (GET_MODE (op0))
 | 
      
         | 3043 |  |  |           && SCALAR_INT_MODE_P (mode))
 | 
      
         | 3044 |  |  |         {
 | 
      
         | 3045 |  |  |           if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
 | 
      
         | 3046 |  |  |             op0 = gen_rtx_ZERO_EXTEND (mode, op0);
 | 
      
         | 3047 |  |  |           else
 | 
      
         | 3048 |  |  |             op0 = gen_rtx_SIGN_EXTEND (mode, op0);
 | 
      
         | 3049 |  |  |           return gen_rtx_PLUS (mode, op0, op1);
 | 
      
         | 3050 |  |  |         }
 | 
      
         | 3051 |  |  |       return NULL;
 | 
      
         | 3052 |  |  |  
 | 
      
         | 3053 |  |  |     default:
 | 
      
         | 3054 |  |  |     flag_unsupported:
 | 
      
         | 3055 |  |  | #ifdef ENABLE_CHECKING
 | 
      
         | 3056 |  |  |       debug_tree (exp);
 | 
      
         | 3057 |  |  |       gcc_unreachable ();
 | 
      
         | 3058 |  |  | #else
 | 
      
         | 3059 |  |  |       return NULL;
 | 
      
         | 3060 |  |  | #endif
 | 
      
         | 3061 |  |  |     }
 | 
      
         | 3062 |  |  | }
 | 
      
         | 3063 |  |  |  
 | 
      
         | 3064 |  |  | /* Expand the _LOCs in debug insns.  We run this after expanding all
 | 
      
         | 3065 |  |  |    regular insns, so that any variables referenced in the function
 | 
      
         | 3066 |  |  |    will have their DECL_RTLs set.  */
 | 
      
         | 3067 |  |  |  
 | 
      
         | 3068 |  |  | static void
 | 
      
         | 3069 |  |  | expand_debug_locations (void)
 | 
      
         | 3070 |  |  | {
 | 
      
         | 3071 |  |  |   rtx insn;
 | 
      
         | 3072 |  |  |   rtx last = get_last_insn ();
 | 
      
         | 3073 |  |  |   int save_strict_alias = flag_strict_aliasing;
 | 
      
         | 3074 |  |  |  
 | 
      
         | 3075 |  |  |   /* New alias sets while setting up memory attributes cause
 | 
      
         | 3076 |  |  |      -fcompare-debug failures, even though it doesn't bring about any
 | 
      
         | 3077 |  |  |      codegen changes.  */
 | 
      
         | 3078 |  |  |   flag_strict_aliasing = 0;
 | 
      
         | 3079 |  |  |  
 | 
      
         | 3080 |  |  |   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
 | 
      
         | 3081 |  |  |     if (DEBUG_INSN_P (insn))
 | 
      
         | 3082 |  |  |       {
 | 
      
         | 3083 |  |  |         tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
 | 
      
         | 3084 |  |  |         rtx val;
 | 
      
         | 3085 |  |  |         enum machine_mode mode;
 | 
      
         | 3086 |  |  |  
 | 
      
         | 3087 |  |  |         if (value == NULL_TREE)
 | 
      
         | 3088 |  |  |           val = NULL_RTX;
 | 
      
         | 3089 |  |  |         else
 | 
      
         | 3090 |  |  |           {
 | 
      
         | 3091 |  |  |             val = expand_debug_expr (value);
 | 
      
         | 3092 |  |  |             gcc_assert (last == get_last_insn ());
 | 
      
         | 3093 |  |  |           }
 | 
      
         | 3094 |  |  |  
 | 
      
         | 3095 |  |  |         if (!val)
 | 
      
         | 3096 |  |  |           val = gen_rtx_UNKNOWN_VAR_LOC ();
 | 
      
         | 3097 |  |  |         else
 | 
      
         | 3098 |  |  |           {
 | 
      
         | 3099 |  |  |             mode = GET_MODE (INSN_VAR_LOCATION (insn));
 | 
      
         | 3100 |  |  |  
 | 
      
         | 3101 |  |  |             gcc_assert (mode == GET_MODE (val)
 | 
      
         | 3102 |  |  |                         || (GET_MODE (val) == VOIDmode
 | 
      
         | 3103 |  |  |                             && (CONST_INT_P (val)
 | 
      
         | 3104 |  |  |                                 || GET_CODE (val) == CONST_FIXED
 | 
      
         | 3105 |  |  |                                 || GET_CODE (val) == CONST_DOUBLE
 | 
      
         | 3106 |  |  |                                 || GET_CODE (val) == LABEL_REF)));
 | 
      
         | 3107 |  |  |           }
 | 
      
         | 3108 |  |  |  
 | 
      
         | 3109 |  |  |         INSN_VAR_LOCATION_LOC (insn) = val;
 | 
      
         | 3110 |  |  |       }
 | 
      
         | 3111 |  |  |  
 | 
      
         | 3112 |  |  |   flag_strict_aliasing = save_strict_alias;
 | 
      
         | 3113 |  |  | }
 | 
      
         | 3114 |  |  |  
 | 
      
         | 3115 |  |  | /* Expand basic block BB from GIMPLE trees to RTL.  */
 | 
      
         | 3116 |  |  |  
 | 
      
         | 3117 |  |  | static basic_block
 | 
      
         | 3118 |  |  | expand_gimple_basic_block (basic_block bb)
 | 
      
         | 3119 |  |  | {
 | 
      
         | 3120 |  |  |   gimple_stmt_iterator gsi;
 | 
      
         | 3121 |  |  |   gimple_seq stmts;
 | 
      
         | 3122 |  |  |   gimple stmt = NULL;
 | 
      
         | 3123 |  |  |   rtx note, last;
 | 
      
         | 3124 |  |  |   edge e;
 | 
      
         | 3125 |  |  |   edge_iterator ei;
 | 
      
         | 3126 |  |  |   void **elt;
 | 
      
         | 3127 |  |  |  
 | 
      
         | 3128 |  |  |   if (dump_file)
 | 
      
         | 3129 |  |  |     fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
 | 
      
         | 3130 |  |  |              bb->index);
 | 
      
         | 3131 |  |  |  
 | 
      
         | 3132 |  |  |   /* Note that since we are now transitioning from GIMPLE to RTL, we
 | 
      
         | 3133 |  |  |      cannot use the gsi_*_bb() routines because they expect the basic
 | 
      
         | 3134 |  |  |      block to be in GIMPLE, instead of RTL.  Therefore, we need to
 | 
      
         | 3135 |  |  |      access the BB sequence directly.  */
 | 
      
         | 3136 |  |  |   stmts = bb_seq (bb);
 | 
      
         | 3137 |  |  |   bb->il.gimple = NULL;
 | 
      
         | 3138 |  |  |   rtl_profile_for_bb (bb);
 | 
      
         | 3139 |  |  |   init_rtl_bb_info (bb);
 | 
      
         | 3140 |  |  |   bb->flags |= BB_RTL;
 | 
      
         | 3141 |  |  |  
 | 
      
         | 3142 |  |  |   /* Remove the RETURN_EXPR if we may fall though to the exit
 | 
      
         | 3143 |  |  |      instead.  */
 | 
      
         | 3144 |  |  |   gsi = gsi_last (stmts);
 | 
      
         | 3145 |  |  |   if (!gsi_end_p (gsi)
 | 
      
         | 3146 |  |  |       && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
 | 
      
         | 3147 |  |  |     {
 | 
      
         | 3148 |  |  |       gimple ret_stmt = gsi_stmt (gsi);
 | 
      
         | 3149 |  |  |  
 | 
      
         | 3150 |  |  |       gcc_assert (single_succ_p (bb));
 | 
      
         | 3151 |  |  |       gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
 | 
      
         | 3152 |  |  |  
 | 
      
         | 3153 |  |  |       if (bb->next_bb == EXIT_BLOCK_PTR
 | 
      
         | 3154 |  |  |           && !gimple_return_retval (ret_stmt))
 | 
      
         | 3155 |  |  |         {
 | 
      
         | 3156 |  |  |           gsi_remove (&gsi, false);
 | 
      
         | 3157 |  |  |           single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
 | 
      
         | 3158 |  |  |         }
 | 
      
         | 3159 |  |  |     }
 | 
      
         | 3160 |  |  |  
 | 
      
         | 3161 |  |  |   gsi = gsi_start (stmts);
 | 
      
         | 3162 |  |  |   if (!gsi_end_p (gsi))
 | 
      
         | 3163 |  |  |     {
 | 
      
         | 3164 |  |  |       stmt = gsi_stmt (gsi);
 | 
      
         | 3165 |  |  |       if (gimple_code (stmt) != GIMPLE_LABEL)
 | 
      
         | 3166 |  |  |         stmt = NULL;
 | 
      
         | 3167 |  |  |     }
 | 
      
         | 3168 |  |  |  
 | 
      
         | 3169 |  |  |   elt = pointer_map_contains (lab_rtx_for_bb, bb);
 | 
      
         | 3170 |  |  |  
 | 
      
         | 3171 |  |  |   if (stmt || elt)
 | 
      
         | 3172 |  |  |     {
 | 
      
         | 3173 |  |  |       last = get_last_insn ();
 | 
      
         | 3174 |  |  |  
 | 
      
         | 3175 |  |  |       if (stmt)
 | 
      
         | 3176 |  |  |         {
 | 
      
         | 3177 |  |  |           expand_gimple_stmt (stmt);
 | 
      
         | 3178 |  |  |           gsi_next (&gsi);
 | 
      
         | 3179 |  |  |         }
 | 
      
         | 3180 |  |  |  
 | 
      
         | 3181 |  |  |       if (elt)
 | 
      
         | 3182 |  |  |         emit_label ((rtx) *elt);
 | 
      
         | 3183 |  |  |  
 | 
      
         | 3184 |  |  |       /* Java emits line number notes in the top of labels.
 | 
      
         | 3185 |  |  |          ??? Make this go away once line number notes are obsoleted.  */
 | 
      
         | 3186 |  |  |       BB_HEAD (bb) = NEXT_INSN (last);
 | 
      
         | 3187 |  |  |       if (NOTE_P (BB_HEAD (bb)))
 | 
      
         | 3188 |  |  |         BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
 | 
      
         | 3189 |  |  |       note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
 | 
      
         | 3190 |  |  |  
 | 
      
         | 3191 |  |  |       maybe_dump_rtl_for_gimple_stmt (stmt, last);
 | 
      
         | 3192 |  |  |     }
 | 
      
         | 3193 |  |  |   else
 | 
      
         | 3194 |  |  |     note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
 | 
      
         | 3195 |  |  |  
 | 
      
         | 3196 |  |  |   NOTE_BASIC_BLOCK (note) = bb;
 | 
      
         | 3197 |  |  |  
 | 
      
         | 3198 |  |  |   for (; !gsi_end_p (gsi); gsi_next (&gsi))
 | 
      
         | 3199 |  |  |     {
 | 
      
         | 3200 |  |  |       basic_block new_bb;
 | 
      
         | 3201 |  |  |  
 | 
      
         | 3202 |  |  |       stmt = gsi_stmt (gsi);
 | 
      
         | 3203 |  |  |  
 | 
      
         | 3204 |  |  |       /* If this statement is a non-debug one, and we generate debug
 | 
      
         | 3205 |  |  |          insns, then this one might be the last real use of a TERed
 | 
      
         | 3206 |  |  |          SSA_NAME, but where there are still some debug uses further
 | 
      
         | 3207 |  |  |          down.  Expanding the current SSA name in such further debug
 | 
      
         | 3208 |  |  |          uses by their RHS might lead to wrong debug info, as coalescing
 | 
      
         | 3209 |  |  |          might make the operands of such RHS be placed into the same
 | 
      
         | 3210 |  |  |          pseudo as something else.  Like so:
 | 
      
         | 3211 |  |  |            a_1 = a_0 + 1;   // Assume a_1 is TERed and a_0 is dead
 | 
      
         | 3212 |  |  |            use(a_1);
 | 
      
         | 3213 |  |  |            a_2 = ...
 | 
      
         | 3214 |  |  |            #DEBUG ... => a_1
 | 
      
         | 3215 |  |  |          As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
 | 
      
         | 3216 |  |  |          If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
 | 
      
         | 3217 |  |  |          the write to a_2 would actually have clobbered the place which
 | 
      
         | 3218 |  |  |          formerly held a_0.
 | 
      
         | 3219 |  |  |  
 | 
      
         | 3220 |  |  |          So, instead of that, we recognize the situation, and generate
 | 
      
         | 3221 |  |  |          debug temporaries at the last real use of TERed SSA names:
 | 
      
         | 3222 |  |  |            a_1 = a_0 + 1;
 | 
      
         | 3223 |  |  |            #DEBUG #D1 => a_1
 | 
      
         | 3224 |  |  |            use(a_1);
 | 
      
         | 3225 |  |  |            a_2 = ...
 | 
      
         | 3226 |  |  |            #DEBUG ... => #D1
 | 
      
         | 3227 |  |  |          */
 | 
      
         | 3228 |  |  |       if (MAY_HAVE_DEBUG_INSNS
 | 
      
         | 3229 |  |  |           && SA.values
 | 
      
         | 3230 |  |  |           && !is_gimple_debug (stmt))
 | 
      
         | 3231 |  |  |         {
 | 
      
         | 3232 |  |  |           ssa_op_iter iter;
 | 
      
         | 3233 |  |  |           tree op;
 | 
      
         | 3234 |  |  |           gimple def;
 | 
      
         | 3235 |  |  |  
 | 
      
         | 3236 |  |  |           location_t sloc = get_curr_insn_source_location ();
 | 
      
         | 3237 |  |  |           tree sblock = get_curr_insn_block ();
 | 
      
         | 3238 |  |  |  
 | 
      
         | 3239 |  |  |           /* Look for SSA names that have their last use here (TERed
 | 
      
         | 3240 |  |  |              names always have only one real use).  */
 | 
      
         | 3241 |  |  |           FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
 | 
      
         | 3242 |  |  |             if ((def = get_gimple_for_ssa_name (op)))
 | 
      
         | 3243 |  |  |               {
 | 
      
         | 3244 |  |  |                 imm_use_iterator imm_iter;
 | 
      
         | 3245 |  |  |                 use_operand_p use_p;
 | 
      
         | 3246 |  |  |                 bool have_debug_uses = false;
 | 
      
         | 3247 |  |  |  
 | 
      
         | 3248 |  |  |                 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
 | 
      
         | 3249 |  |  |                   {
 | 
      
         | 3250 |  |  |                     if (gimple_debug_bind_p (USE_STMT (use_p)))
 | 
      
         | 3251 |  |  |                       {
 | 
      
         | 3252 |  |  |                         have_debug_uses = true;
 | 
      
         | 3253 |  |  |                         break;
 | 
      
         | 3254 |  |  |                       }
 | 
      
         | 3255 |  |  |                   }
 | 
      
         | 3256 |  |  |  
 | 
      
         | 3257 |  |  |                 if (have_debug_uses)
 | 
      
         | 3258 |  |  |                   {
 | 
      
         | 3259 |  |  |                     /* OP is a TERed SSA name, with DEF it's defining
 | 
      
         | 3260 |  |  |                        statement, and where OP is used in further debug
 | 
      
         | 3261 |  |  |                        instructions.  Generate a debug temporary, and
 | 
      
         | 3262 |  |  |                        replace all uses of OP in debug insns with that
 | 
      
         | 3263 |  |  |                        temporary.  */
 | 
      
         | 3264 |  |  |                     gimple debugstmt;
 | 
      
         | 3265 |  |  |                     tree value = gimple_assign_rhs_to_tree (def);
 | 
      
         | 3266 |  |  |                     tree vexpr = make_node (DEBUG_EXPR_DECL);
 | 
      
         | 3267 |  |  |                     rtx val;
 | 
      
         | 3268 |  |  |                     enum machine_mode mode;
 | 
      
         | 3269 |  |  |  
 | 
      
         | 3270 |  |  |                     set_curr_insn_source_location (gimple_location (def));
 | 
      
         | 3271 |  |  |                     set_curr_insn_block (gimple_block (def));
 | 
      
         | 3272 |  |  |  
 | 
      
         | 3273 |  |  |                     DECL_ARTIFICIAL (vexpr) = 1;
 | 
      
         | 3274 |  |  |                     TREE_TYPE (vexpr) = TREE_TYPE (value);
 | 
      
         | 3275 |  |  |                     if (DECL_P (value))
 | 
      
         | 3276 |  |  |                       mode = DECL_MODE (value);
 | 
      
         | 3277 |  |  |                     else
 | 
      
         | 3278 |  |  |                       mode = TYPE_MODE (TREE_TYPE (value));
 | 
      
         | 3279 |  |  |                     DECL_MODE (vexpr) = mode;
 | 
      
         | 3280 |  |  |  
 | 
      
         | 3281 |  |  |                     val = gen_rtx_VAR_LOCATION
 | 
      
         | 3282 |  |  |                         (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
 | 
      
         | 3283 |  |  |  
 | 
      
         | 3284 |  |  |                     val = emit_debug_insn (val);
 | 
      
         | 3285 |  |  |  
 | 
      
         | 3286 |  |  |                     FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
 | 
      
         | 3287 |  |  |                       {
 | 
      
         | 3288 |  |  |                         if (!gimple_debug_bind_p (debugstmt))
 | 
      
         | 3289 |  |  |                           continue;
 | 
      
         | 3290 |  |  |  
 | 
      
         | 3291 |  |  |                         FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
 | 
      
         | 3292 |  |  |                           SET_USE (use_p, vexpr);
 | 
      
         | 3293 |  |  |  
 | 
      
         | 3294 |  |  |                         update_stmt (debugstmt);
 | 
      
         | 3295 |  |  |                       }
 | 
      
         | 3296 |  |  |                   }
 | 
      
         | 3297 |  |  |               }
 | 
      
         | 3298 |  |  |           set_curr_insn_source_location (sloc);
 | 
      
         | 3299 |  |  |           set_curr_insn_block (sblock);
 | 
      
         | 3300 |  |  |         }
 | 
      
         | 3301 |  |  |  
 | 
      
         | 3302 |  |  |       currently_expanding_gimple_stmt = stmt;
 | 
      
         | 3303 |  |  |  
 | 
      
         | 3304 |  |  |       /* Expand this statement, then evaluate the resulting RTL and
 | 
      
         | 3305 |  |  |          fixup the CFG accordingly.  */
 | 
      
         | 3306 |  |  |       if (gimple_code (stmt) == GIMPLE_COND)
 | 
      
         | 3307 |  |  |         {
 | 
      
         | 3308 |  |  |           new_bb = expand_gimple_cond (bb, stmt);
 | 
      
         | 3309 |  |  |           if (new_bb)
 | 
      
         | 3310 |  |  |             return new_bb;
 | 
      
         | 3311 |  |  |         }
 | 
      
         | 3312 |  |  |       else if (gimple_debug_bind_p (stmt))
 | 
      
         | 3313 |  |  |         {
 | 
      
         | 3314 |  |  |           location_t sloc = get_curr_insn_source_location ();
 | 
      
         | 3315 |  |  |           tree sblock = get_curr_insn_block ();
 | 
      
         | 3316 |  |  |           gimple_stmt_iterator nsi = gsi;
 | 
      
         | 3317 |  |  |  
 | 
      
         | 3318 |  |  |           for (;;)
 | 
      
         | 3319 |  |  |             {
 | 
      
         | 3320 |  |  |               tree var = gimple_debug_bind_get_var (stmt);
 | 
      
         | 3321 |  |  |               tree value;
 | 
      
         | 3322 |  |  |               rtx val;
 | 
      
         | 3323 |  |  |               enum machine_mode mode;
 | 
      
         | 3324 |  |  |  
 | 
      
         | 3325 |  |  |               if (gimple_debug_bind_has_value_p (stmt))
 | 
      
         | 3326 |  |  |                 value = gimple_debug_bind_get_value (stmt);
 | 
      
         | 3327 |  |  |               else
 | 
      
         | 3328 |  |  |                 value = NULL_TREE;
 | 
      
         | 3329 |  |  |  
 | 
      
         | 3330 |  |  |               last = get_last_insn ();
 | 
      
         | 3331 |  |  |  
 | 
      
         | 3332 |  |  |               set_curr_insn_source_location (gimple_location (stmt));
 | 
      
         | 3333 |  |  |               set_curr_insn_block (gimple_block (stmt));
 | 
      
         | 3334 |  |  |  
 | 
      
         | 3335 |  |  |               if (DECL_P (var))
 | 
      
         | 3336 |  |  |                 mode = DECL_MODE (var);
 | 
      
         | 3337 |  |  |               else
 | 
      
         | 3338 |  |  |                 mode = TYPE_MODE (TREE_TYPE (var));
 | 
      
         | 3339 |  |  |  
 | 
      
         | 3340 |  |  |               val = gen_rtx_VAR_LOCATION
 | 
      
         | 3341 |  |  |                 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
 | 
      
         | 3342 |  |  |  
 | 
      
         | 3343 |  |  |               val = emit_debug_insn (val);
 | 
      
         | 3344 |  |  |  
 | 
      
         | 3345 |  |  |               if (dump_file && (dump_flags & TDF_DETAILS))
 | 
      
         | 3346 |  |  |                 {
 | 
      
         | 3347 |  |  |                   /* We can't dump the insn with a TREE where an RTX
 | 
      
         | 3348 |  |  |                      is expected.  */
 | 
      
         | 3349 |  |  |                   INSN_VAR_LOCATION_LOC (val) = const0_rtx;
 | 
      
         | 3350 |  |  |                   maybe_dump_rtl_for_gimple_stmt (stmt, last);
 | 
      
         | 3351 |  |  |                   INSN_VAR_LOCATION_LOC (val) = (rtx)value;
 | 
      
         | 3352 |  |  |                 }
 | 
      
         | 3353 |  |  |  
 | 
      
         | 3354 |  |  |               /* In order not to generate too many debug temporaries,
 | 
      
         | 3355 |  |  |                  we delink all uses of debug statements we already expanded.
 | 
      
         | 3356 |  |  |                  Therefore debug statements between definition and real
 | 
      
         | 3357 |  |  |                  use of TERed SSA names will continue to use the SSA name,
 | 
      
         | 3358 |  |  |                  and not be replaced with debug temps.  */
 | 
      
         | 3359 |  |  |               delink_stmt_imm_use (stmt);
 | 
      
         | 3360 |  |  |  
 | 
      
         | 3361 |  |  |               gsi = nsi;
 | 
      
         | 3362 |  |  |               gsi_next (&nsi);
 | 
      
         | 3363 |  |  |               if (gsi_end_p (nsi))
 | 
      
         | 3364 |  |  |                 break;
 | 
      
         | 3365 |  |  |               stmt = gsi_stmt (nsi);
 | 
      
         | 3366 |  |  |               if (!gimple_debug_bind_p (stmt))
 | 
      
         | 3367 |  |  |                 break;
 | 
      
         | 3368 |  |  |             }
 | 
      
         | 3369 |  |  |  
 | 
      
         | 3370 |  |  |           set_curr_insn_source_location (sloc);
 | 
      
         | 3371 |  |  |           set_curr_insn_block (sblock);
 | 
      
         | 3372 |  |  |         }
 | 
      
         | 3373 |  |  |       else
 | 
      
         | 3374 |  |  |         {
 | 
      
         | 3375 |  |  |           if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
 | 
      
         | 3376 |  |  |             {
 | 
      
         | 3377 |  |  |               bool can_fallthru;
 | 
      
         | 3378 |  |  |               new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
 | 
      
         | 3379 |  |  |               if (new_bb)
 | 
      
         | 3380 |  |  |                 {
 | 
      
         | 3381 |  |  |                   if (can_fallthru)
 | 
      
         | 3382 |  |  |                     bb = new_bb;
 | 
      
         | 3383 |  |  |                   else
 | 
      
         | 3384 |  |  |                     return new_bb;
 | 
      
         | 3385 |  |  |                 }
 | 
      
         | 3386 |  |  |             }
 | 
      
         | 3387 |  |  |           else
 | 
      
         | 3388 |  |  |             {
 | 
      
         | 3389 |  |  |               def_operand_p def_p;
 | 
      
         | 3390 |  |  |               def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
 | 
      
         | 3391 |  |  |  
 | 
      
         | 3392 |  |  |               if (def_p != NULL)
 | 
      
         | 3393 |  |  |                 {
 | 
      
         | 3394 |  |  |                   /* Ignore this stmt if it is in the list of
 | 
      
         | 3395 |  |  |                      replaceable expressions.  */
 | 
      
         | 3396 |  |  |                   if (SA.values
 | 
      
         | 3397 |  |  |                       && bitmap_bit_p (SA.values,
 | 
      
         | 3398 |  |  |                                        SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
 | 
      
         | 3399 |  |  |                     continue;
 | 
      
         | 3400 |  |  |                 }
 | 
      
         | 3401 |  |  |               last = expand_gimple_stmt (stmt);
 | 
      
         | 3402 |  |  |               maybe_dump_rtl_for_gimple_stmt (stmt, last);
 | 
      
         | 3403 |  |  |             }
 | 
      
         | 3404 |  |  |         }
 | 
      
         | 3405 |  |  |     }
 | 
      
         | 3406 |  |  |  
 | 
      
         | 3407 |  |  |   currently_expanding_gimple_stmt = NULL;
 | 
      
         | 3408 |  |  |  
 | 
      
         | 3409 |  |  |   /* Expand implicit goto and convert goto_locus.  */
 | 
      
         | 3410 |  |  |   FOR_EACH_EDGE (e, ei, bb->succs)
 | 
      
         | 3411 |  |  |     {
 | 
      
         | 3412 |  |  |       if (e->goto_locus && e->goto_block)
 | 
      
         | 3413 |  |  |         {
 | 
      
         | 3414 |  |  |           set_curr_insn_source_location (e->goto_locus);
 | 
      
         | 3415 |  |  |           set_curr_insn_block (e->goto_block);
 | 
      
         | 3416 |  |  |           e->goto_locus = curr_insn_locator ();
 | 
      
         | 3417 |  |  |         }
 | 
      
         | 3418 |  |  |       e->goto_block = NULL;
 | 
      
         | 3419 |  |  |       if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
 | 
      
         | 3420 |  |  |         {
 | 
      
         | 3421 |  |  |           emit_jump (label_rtx_for_bb (e->dest));
 | 
      
         | 3422 |  |  |           e->flags &= ~EDGE_FALLTHRU;
 | 
      
         | 3423 |  |  |         }
 | 
      
         | 3424 |  |  |     }
 | 
      
         | 3425 |  |  |  
 | 
      
         | 3426 |  |  |   /* Expanded RTL can create a jump in the last instruction of block.
 | 
      
         | 3427 |  |  |      This later might be assumed to be a jump to successor and break edge insertion.
 | 
      
         | 3428 |  |  |      We need to insert dummy move to prevent this. PR41440. */
 | 
      
         | 3429 |  |  |   if (single_succ_p (bb)
 | 
      
         | 3430 |  |  |       && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
 | 
      
         | 3431 |  |  |       && (last = get_last_insn ())
 | 
      
         | 3432 |  |  |       && JUMP_P (last))
 | 
      
         | 3433 |  |  |     {
 | 
      
         | 3434 |  |  |       rtx dummy = gen_reg_rtx (SImode);
 | 
      
         | 3435 |  |  |       emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
 | 
      
         | 3436 |  |  |     }
 | 
      
         | 3437 |  |  |  
 | 
      
         | 3438 |  |  |   do_pending_stack_adjust ();
 | 
      
         | 3439 |  |  |  
 | 
      
         | 3440 |  |  |   /* Find the block tail.  The last insn in the block is the insn
 | 
      
         | 3441 |  |  |      before a barrier and/or table jump insn.  */
 | 
      
         | 3442 |  |  |   last = get_last_insn ();
 | 
      
         | 3443 |  |  |   if (BARRIER_P (last))
 | 
      
         | 3444 |  |  |     last = PREV_INSN (last);
 | 
      
         | 3445 |  |  |   if (JUMP_TABLE_DATA_P (last))
 | 
      
         | 3446 |  |  |     last = PREV_INSN (PREV_INSN (last));
 | 
      
         | 3447 |  |  |   BB_END (bb) = last;
 | 
      
         | 3448 |  |  |  
 | 
      
         | 3449 |  |  |   update_bb_for_insn (bb);
 | 
      
         | 3450 |  |  |  
 | 
      
         | 3451 |  |  |   return bb;
 | 
      
         | 3452 |  |  | }
 | 
      
         | 3453 |  |  |  
 | 
      
         | 3454 |  |  |  
 | 
      
         | 3455 |  |  | /* Create a basic block for initialization code.  */
 | 
      
         | 3456 |  |  |  
 | 
      
         | 3457 |  |  | static basic_block
 | 
      
         | 3458 |  |  | construct_init_block (void)
 | 
      
         | 3459 |  |  | {
 | 
      
         | 3460 |  |  |   basic_block init_block, first_block;
 | 
      
         | 3461 |  |  |   edge e = NULL;
 | 
      
         | 3462 |  |  |   int flags;
 | 
      
         | 3463 |  |  |  
 | 
      
         | 3464 |  |  |   /* Multiple entry points not supported yet.  */
 | 
      
         | 3465 |  |  |   gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
 | 
      
         | 3466 |  |  |   init_rtl_bb_info (ENTRY_BLOCK_PTR);
 | 
      
         | 3467 |  |  |   init_rtl_bb_info (EXIT_BLOCK_PTR);
 | 
      
         | 3468 |  |  |   ENTRY_BLOCK_PTR->flags |= BB_RTL;
 | 
      
         | 3469 |  |  |   EXIT_BLOCK_PTR->flags |= BB_RTL;
 | 
      
         | 3470 |  |  |  
 | 
      
         | 3471 |  |  |   e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
 | 
      
         | 3472 |  |  |  
 | 
      
         | 3473 |  |  |   /* When entry edge points to first basic block, we don't need jump,
 | 
      
         | 3474 |  |  |      otherwise we have to jump into proper target.  */
 | 
      
         | 3475 |  |  |   if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
 | 
      
         | 3476 |  |  |     {
 | 
      
         | 3477 |  |  |       tree label = gimple_block_label (e->dest);
 | 
      
         | 3478 |  |  |  
 | 
      
         | 3479 |  |  |       emit_jump (label_rtx (label));
 | 
      
         | 3480 |  |  |       flags = 0;
 | 
      
         | 3481 |  |  |     }
 | 
      
         | 3482 |  |  |   else
 | 
      
         | 3483 |  |  |     flags = EDGE_FALLTHRU;
 | 
      
         | 3484 |  |  |  
 | 
      
         | 3485 |  |  |   init_block = create_basic_block (NEXT_INSN (get_insns ()),
 | 
      
         | 3486 |  |  |                                    get_last_insn (),
 | 
      
         | 3487 |  |  |                                    ENTRY_BLOCK_PTR);
 | 
      
         | 3488 |  |  |   init_block->frequency = ENTRY_BLOCK_PTR->frequency;
 | 
      
         | 3489 |  |  |   init_block->count = ENTRY_BLOCK_PTR->count;
 | 
      
         | 3490 |  |  |   if (e)
 | 
      
         | 3491 |  |  |     {
 | 
      
         | 3492 |  |  |       first_block = e->dest;
 | 
      
         | 3493 |  |  |       redirect_edge_succ (e, init_block);
 | 
      
         | 3494 |  |  |       e = make_edge (init_block, first_block, flags);
 | 
      
         | 3495 |  |  |     }
 | 
      
         | 3496 |  |  |   else
 | 
      
         | 3497 |  |  |     e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
 | 
      
         | 3498 |  |  |   e->probability = REG_BR_PROB_BASE;
 | 
      
         | 3499 |  |  |   e->count = ENTRY_BLOCK_PTR->count;
 | 
      
         | 3500 |  |  |  
 | 
      
         | 3501 |  |  |   update_bb_for_insn (init_block);
 | 
      
         | 3502 |  |  |   return init_block;
 | 
      
         | 3503 |  |  | }
 | 
      
         | 3504 |  |  |  
 | 
      
         | 3505 |  |  | /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
 | 
      
         | 3506 |  |  |    found in the block tree.  */
 | 
      
         | 3507 |  |  |  
 | 
      
         | 3508 |  |  | static void
 | 
      
         | 3509 |  |  | set_block_levels (tree block, int level)
 | 
      
         | 3510 |  |  | {
 | 
      
         | 3511 |  |  |   while (block)
 | 
      
         | 3512 |  |  |     {
 | 
      
         | 3513 |  |  |       BLOCK_NUMBER (block) = level;
 | 
      
         | 3514 |  |  |       set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
 | 
      
         | 3515 |  |  |       block = BLOCK_CHAIN (block);
 | 
      
         | 3516 |  |  |     }
 | 
      
         | 3517 |  |  | }
 | 
      
         | 3518 |  |  |  
 | 
      
         | 3519 |  |  | /* Create a block containing landing pads and similar stuff.  */
 | 
      
         | 3520 |  |  |  
 | 
      
         | 3521 |  |  | static void
 | 
      
         | 3522 |  |  | construct_exit_block (void)
 | 
      
         | 3523 |  |  | {
 | 
      
         | 3524 |  |  |   rtx head = get_last_insn ();
 | 
      
         | 3525 |  |  |   rtx end;
 | 
      
         | 3526 |  |  |   basic_block exit_block;
 | 
      
         | 3527 |  |  |   edge e, e2;
 | 
      
         | 3528 |  |  |   unsigned ix;
 | 
      
         | 3529 |  |  |   edge_iterator ei;
 | 
      
         | 3530 |  |  |   rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
 | 
      
         | 3531 |  |  |  
 | 
      
         | 3532 |  |  |   rtl_profile_for_bb (EXIT_BLOCK_PTR);
 | 
      
         | 3533 |  |  |  
 | 
      
         | 3534 |  |  |   /* Make sure the locus is set to the end of the function, so that
 | 
      
         | 3535 |  |  |      epilogue line numbers and warnings are set properly.  */
 | 
      
         | 3536 |  |  |   if (cfun->function_end_locus != UNKNOWN_LOCATION)
 | 
      
         | 3537 |  |  |     input_location = cfun->function_end_locus;
 | 
      
         | 3538 |  |  |  
 | 
      
         | 3539 |  |  |   /* The following insns belong to the top scope.  */
 | 
      
         | 3540 |  |  |   set_curr_insn_block (DECL_INITIAL (current_function_decl));
 | 
      
         | 3541 |  |  |  
 | 
      
         | 3542 |  |  |   /* Generate rtl for function exit.  */
 | 
      
         | 3543 |  |  |   expand_function_end ();
 | 
      
         | 3544 |  |  |  
 | 
      
         | 3545 |  |  |   end = get_last_insn ();
 | 
      
         | 3546 |  |  |   if (head == end)
 | 
      
         | 3547 |  |  |     return;
 | 
      
         | 3548 |  |  |   /* While emitting the function end we could move end of the last basic block.
 | 
      
         | 3549 |  |  |    */
 | 
      
         | 3550 |  |  |   BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
 | 
      
         | 3551 |  |  |   while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
 | 
      
         | 3552 |  |  |     head = NEXT_INSN (head);
 | 
      
         | 3553 |  |  |   exit_block = create_basic_block (NEXT_INSN (head), end,
 | 
      
         | 3554 |  |  |                                    EXIT_BLOCK_PTR->prev_bb);
 | 
      
         | 3555 |  |  |   exit_block->frequency = EXIT_BLOCK_PTR->frequency;
 | 
      
         | 3556 |  |  |   exit_block->count = EXIT_BLOCK_PTR->count;
 | 
      
         | 3557 |  |  |  
 | 
      
         | 3558 |  |  |   ix = 0;
 | 
      
         | 3559 |  |  |   while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
 | 
      
         | 3560 |  |  |     {
 | 
      
         | 3561 |  |  |       e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
 | 
      
         | 3562 |  |  |       if (!(e->flags & EDGE_ABNORMAL))
 | 
      
         | 3563 |  |  |         redirect_edge_succ (e, exit_block);
 | 
      
         | 3564 |  |  |       else
 | 
      
         | 3565 |  |  |         ix++;
 | 
      
         | 3566 |  |  |     }
 | 
      
         | 3567 |  |  |  
 | 
      
         | 3568 |  |  |   e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
 | 
      
         | 3569 |  |  |   e->probability = REG_BR_PROB_BASE;
 | 
      
         | 3570 |  |  |   e->count = EXIT_BLOCK_PTR->count;
 | 
      
         | 3571 |  |  |   FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
 | 
      
         | 3572 |  |  |     if (e2 != e)
 | 
      
         | 3573 |  |  |       {
 | 
      
         | 3574 |  |  |         e->count -= e2->count;
 | 
      
         | 3575 |  |  |         exit_block->count -= e2->count;
 | 
      
         | 3576 |  |  |         exit_block->frequency -= EDGE_FREQUENCY (e2);
 | 
      
         | 3577 |  |  |       }
 | 
      
         | 3578 |  |  |   if (e->count < 0)
 | 
      
         | 3579 |  |  |     e->count = 0;
 | 
      
         | 3580 |  |  |   if (exit_block->count < 0)
 | 
      
         | 3581 |  |  |     exit_block->count = 0;
 | 
      
         | 3582 |  |  |   if (exit_block->frequency < 0)
 | 
      
         | 3583 |  |  |     exit_block->frequency = 0;
 | 
      
         | 3584 |  |  |   update_bb_for_insn (exit_block);
 | 
      
         | 3585 |  |  | }
 | 
      
         | 3586 |  |  |  
 | 
      
         | 3587 |  |  | /* Helper function for discover_nonconstant_array_refs.
 | 
      
         | 3588 |  |  |    Look for ARRAY_REF nodes with non-constant indexes and mark them
 | 
      
         | 3589 |  |  |    addressable.  */
 | 
      
         | 3590 |  |  |  
 | 
      
         | 3591 |  |  | static tree
 | 
      
         | 3592 |  |  | discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
 | 
      
         | 3593 |  |  |                                    void *data ATTRIBUTE_UNUSED)
 | 
      
         | 3594 |  |  | {
 | 
      
         | 3595 |  |  |   tree t = *tp;
 | 
      
         | 3596 |  |  |  
 | 
      
         | 3597 |  |  |   if (IS_TYPE_OR_DECL_P (t))
 | 
      
         | 3598 |  |  |     *walk_subtrees = 0;
 | 
      
         | 3599 |  |  |   else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
 | 
      
         | 3600 |  |  |     {
 | 
      
         | 3601 |  |  |       while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
 | 
      
         | 3602 |  |  |               && is_gimple_min_invariant (TREE_OPERAND (t, 1))
 | 
      
         | 3603 |  |  |               && (!TREE_OPERAND (t, 2)
 | 
      
         | 3604 |  |  |                   || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
 | 
      
         | 3605 |  |  |              || (TREE_CODE (t) == COMPONENT_REF
 | 
      
         | 3606 |  |  |                  && (!TREE_OPERAND (t,2)
 | 
      
         | 3607 |  |  |                      || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
 | 
      
         | 3608 |  |  |              || TREE_CODE (t) == BIT_FIELD_REF
 | 
      
         | 3609 |  |  |              || TREE_CODE (t) == REALPART_EXPR
 | 
      
         | 3610 |  |  |              || TREE_CODE (t) == IMAGPART_EXPR
 | 
      
         | 3611 |  |  |              || TREE_CODE (t) == VIEW_CONVERT_EXPR
 | 
      
         | 3612 |  |  |              || CONVERT_EXPR_P (t))
 | 
      
         | 3613 |  |  |         t = TREE_OPERAND (t, 0);
 | 
      
         | 3614 |  |  |  
 | 
      
         | 3615 |  |  |       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
 | 
      
         | 3616 |  |  |         {
 | 
      
         | 3617 |  |  |           t = get_base_address (t);
 | 
      
         | 3618 |  |  |           if (t && DECL_P (t)
 | 
      
         | 3619 |  |  |               && DECL_MODE (t) != BLKmode)
 | 
      
         | 3620 |  |  |             TREE_ADDRESSABLE (t) = 1;
 | 
      
         | 3621 |  |  |         }
 | 
      
         | 3622 |  |  |  
 | 
      
         | 3623 |  |  |       *walk_subtrees = 0;
 | 
      
         | 3624 |  |  |     }
 | 
      
         | 3625 |  |  |  
 | 
      
         | 3626 |  |  |   return NULL_TREE;
 | 
      
         | 3627 |  |  | }
 | 
      
         | 3628 |  |  |  
 | 
      
         | 3629 |  |  | /* RTL expansion is not able to compile array references with variable
 | 
      
         | 3630 |  |  |    offsets for arrays stored in single register.  Discover such
 | 
      
         | 3631 |  |  |    expressions and mark variables as addressable to avoid this
 | 
      
         | 3632 |  |  |    scenario.  */
 | 
      
         | 3633 |  |  |  
 | 
      
         | 3634 |  |  | static void
 | 
      
         | 3635 |  |  | discover_nonconstant_array_refs (void)
 | 
      
         | 3636 |  |  | {
 | 
      
         | 3637 |  |  |   basic_block bb;
 | 
      
         | 3638 |  |  |   gimple_stmt_iterator gsi;
 | 
      
         | 3639 |  |  |  
 | 
      
         | 3640 |  |  |   FOR_EACH_BB (bb)
 | 
      
         | 3641 |  |  |     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
 | 
      
         | 3642 |  |  |       {
 | 
      
         | 3643 |  |  |         gimple stmt = gsi_stmt (gsi);
 | 
      
         | 3644 |  |  |         if (!is_gimple_debug (stmt))
 | 
      
         | 3645 |  |  |           walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
 | 
      
         | 3646 |  |  |       }
 | 
      
         | 3647 |  |  | }
 | 
      
         | 3648 |  |  |  
 | 
      
         | 3649 |  |  | /* This function sets crtl->args.internal_arg_pointer to a virtual
 | 
      
         | 3650 |  |  |    register if DRAP is needed.  Local register allocator will replace
 | 
      
         | 3651 |  |  |    virtual_incoming_args_rtx with the virtual register.  */
 | 
      
         | 3652 |  |  |  
 | 
      
         | 3653 |  |  | static void
 | 
      
         | 3654 |  |  | expand_stack_alignment (void)
 | 
      
         | 3655 |  |  | {
 | 
      
         | 3656 |  |  |   rtx drap_rtx;
 | 
      
         | 3657 |  |  |   unsigned int preferred_stack_boundary;
 | 
      
         | 3658 |  |  |  
 | 
      
         | 3659 |  |  |   if (! SUPPORTS_STACK_ALIGNMENT)
 | 
      
         | 3660 |  |  |     return;
 | 
      
         | 3661 |  |  |  
 | 
      
         | 3662 |  |  |   if (cfun->calls_alloca
 | 
      
         | 3663 |  |  |       || cfun->has_nonlocal_label
 | 
      
         | 3664 |  |  |       || crtl->has_nonlocal_goto)
 | 
      
         | 3665 |  |  |     crtl->need_drap = true;
 | 
      
         | 3666 |  |  |  
 | 
      
         | 3667 |  |  |   /* Call update_stack_boundary here again to update incoming stack
 | 
      
         | 3668 |  |  |      boundary.  It may set incoming stack alignment to a different
 | 
      
         | 3669 |  |  |      value after RTL expansion.  TARGET_FUNCTION_OK_FOR_SIBCALL may
 | 
      
         | 3670 |  |  |      use the minimum incoming stack alignment to check if it is OK
 | 
      
         | 3671 |  |  |      to perform sibcall optimization since sibcall optimization will
 | 
      
         | 3672 |  |  |      only align the outgoing stack to incoming stack boundary.  */
 | 
      
         | 3673 |  |  |   if (targetm.calls.update_stack_boundary)
 | 
      
         | 3674 |  |  |     targetm.calls.update_stack_boundary ();
 | 
      
         | 3675 |  |  |  
 | 
      
         | 3676 |  |  |   /* The incoming stack frame has to be aligned at least at
 | 
      
         | 3677 |  |  |      parm_stack_boundary.  */
 | 
      
         | 3678 |  |  |   gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
 | 
      
         | 3679 |  |  |  
 | 
      
         | 3680 |  |  |   /* Update crtl->stack_alignment_estimated and use it later to align
 | 
      
         | 3681 |  |  |      stack.  We check PREFERRED_STACK_BOUNDARY if there may be non-call
 | 
      
         | 3682 |  |  |      exceptions since callgraph doesn't collect incoming stack alignment
 | 
      
         | 3683 |  |  |      in this case.  */
 | 
      
         | 3684 |  |  |   if (flag_non_call_exceptions
 | 
      
         | 3685 |  |  |       && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
 | 
      
         | 3686 |  |  |     preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
 | 
      
         | 3687 |  |  |   else
 | 
      
         | 3688 |  |  |     preferred_stack_boundary = crtl->preferred_stack_boundary;
 | 
      
         | 3689 |  |  |   if (preferred_stack_boundary > crtl->stack_alignment_estimated)
 | 
      
         | 3690 |  |  |     crtl->stack_alignment_estimated = preferred_stack_boundary;
 | 
      
         | 3691 |  |  |   if (preferred_stack_boundary > crtl->stack_alignment_needed)
 | 
      
         | 3692 |  |  |     crtl->stack_alignment_needed = preferred_stack_boundary;
 | 
      
         | 3693 |  |  |  
 | 
      
         | 3694 |  |  |   gcc_assert (crtl->stack_alignment_needed
 | 
      
         | 3695 |  |  |               <= crtl->stack_alignment_estimated);
 | 
      
         | 3696 |  |  |  
 | 
      
         | 3697 |  |  |   crtl->stack_realign_needed
 | 
      
         | 3698 |  |  |     = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
 | 
      
         | 3699 |  |  |   crtl->stack_realign_tried = crtl->stack_realign_needed;
 | 
      
         | 3700 |  |  |  
 | 
      
         | 3701 |  |  |   crtl->stack_realign_processed = true;
 | 
      
         | 3702 |  |  |  
 | 
      
         | 3703 |  |  |   /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
 | 
      
         | 3704 |  |  |      alignment.  */
 | 
      
         | 3705 |  |  |   gcc_assert (targetm.calls.get_drap_rtx != NULL);
 | 
      
         | 3706 |  |  |   drap_rtx = targetm.calls.get_drap_rtx ();
 | 
      
         | 3707 |  |  |  
 | 
      
         | 3708 |  |  |   /* stack_realign_drap and drap_rtx must match.  */
 | 
      
         | 3709 |  |  |   gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
 | 
      
         | 3710 |  |  |  
 | 
      
         | 3711 |  |  |   /* Do nothing if NULL is returned, which means DRAP is not needed.  */
 | 
      
         | 3712 |  |  |   if (NULL != drap_rtx)
 | 
      
         | 3713 |  |  |     {
 | 
      
         | 3714 |  |  |       crtl->args.internal_arg_pointer = drap_rtx;
 | 
      
         | 3715 |  |  |  
 | 
      
         | 3716 |  |  |       /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
 | 
      
         | 3717 |  |  |          needed. */
 | 
      
         | 3718 |  |  |       fixup_tail_calls ();
 | 
      
         | 3719 |  |  |     }
 | 
      
         | 3720 |  |  | }
 | 
      
         | 3721 |  |  |  
 | 
      
         | 3722 |  |  | /* Translate the intermediate representation contained in the CFG
 | 
      
         | 3723 |  |  |    from GIMPLE trees to RTL.
 | 
      
         | 3724 |  |  |  
 | 
      
         | 3725 |  |  |    We do conversion per basic block and preserve/update the tree CFG.
 | 
      
         | 3726 |  |  |    This implies we have to do some magic as the CFG can simultaneously
 | 
      
         | 3727 |  |  |    consist of basic blocks containing RTL and GIMPLE trees.  This can
 | 
      
         | 3728 |  |  |    confuse the CFG hooks, so be careful to not manipulate CFG during
 | 
      
         | 3729 |  |  |    the expansion.  */
 | 
      
         | 3730 |  |  |  
 | 
      
         | 3731 |  |  | static unsigned int
 | 
      
         | 3732 |  |  | gimple_expand_cfg (void)
 | 
      
         | 3733 |  |  | {
 | 
      
         | 3734 |  |  |   basic_block bb, init_block;
 | 
      
         | 3735 |  |  |   sbitmap blocks;
 | 
      
         | 3736 |  |  |   edge_iterator ei;
 | 
      
         | 3737 |  |  |   edge e;
 | 
      
         | 3738 |  |  |   unsigned i;
 | 
      
         | 3739 |  |  |  
 | 
      
         | 3740 |  |  |   rewrite_out_of_ssa (&SA);
 | 
      
         | 3741 |  |  |   SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
 | 
      
         | 3742 |  |  |                                            sizeof (rtx));
 | 
      
         | 3743 |  |  |  
 | 
      
         | 3744 |  |  |   /* Some backends want to know that we are expanding to RTL.  */
 | 
      
         | 3745 |  |  |   currently_expanding_to_rtl = 1;
 | 
      
         | 3746 |  |  |  
 | 
      
         | 3747 |  |  |   rtl_profile_for_bb (ENTRY_BLOCK_PTR);
 | 
      
         | 3748 |  |  |  
 | 
      
         | 3749 |  |  |   insn_locators_alloc ();
 | 
      
         | 3750 |  |  |   if (!DECL_IS_BUILTIN (current_function_decl))
 | 
      
         | 3751 |  |  |     {
 | 
      
         | 3752 |  |  |       /* Eventually, all FEs should explicitly set function_start_locus.  */
 | 
      
         | 3753 |  |  |       if (cfun->function_start_locus == UNKNOWN_LOCATION)
 | 
      
         | 3754 |  |  |        set_curr_insn_source_location
 | 
      
         | 3755 |  |  |          (DECL_SOURCE_LOCATION (current_function_decl));
 | 
      
         | 3756 |  |  |       else
 | 
      
         | 3757 |  |  |        set_curr_insn_source_location (cfun->function_start_locus);
 | 
      
         | 3758 |  |  |     }
 | 
      
         | 3759 |  |  |   set_curr_insn_block (DECL_INITIAL (current_function_decl));
 | 
      
         | 3760 |  |  |   prologue_locator = curr_insn_locator ();
 | 
      
         | 3761 |  |  |  
 | 
      
         | 3762 |  |  |   /* Make sure first insn is a note even if we don't want linenums.
 | 
      
         | 3763 |  |  |      This makes sure the first insn will never be deleted.
 | 
      
         | 3764 |  |  |      Also, final expects a note to appear there.  */
 | 
      
         | 3765 |  |  |   emit_note (NOTE_INSN_DELETED);
 | 
      
         | 3766 |  |  |  
 | 
      
         | 3767 |  |  |   /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
 | 
      
         | 3768 |  |  |   discover_nonconstant_array_refs ();
 | 
      
         | 3769 |  |  |  
 | 
      
         | 3770 |  |  |   targetm.expand_to_rtl_hook ();
 | 
      
         | 3771 |  |  |   crtl->stack_alignment_needed = STACK_BOUNDARY;
 | 
      
         | 3772 |  |  |   crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
 | 
      
         | 3773 |  |  |   crtl->stack_alignment_estimated = 0;
 | 
      
         | 3774 |  |  |   crtl->preferred_stack_boundary = STACK_BOUNDARY;
 | 
      
         | 3775 |  |  |   cfun->cfg->max_jumptable_ents = 0;
 | 
      
         | 3776 |  |  |  
 | 
      
         | 3777 |  |  |  
 | 
      
         | 3778 |  |  |   /* Expand the variables recorded during gimple lowering.  */
 | 
      
         | 3779 |  |  |   expand_used_vars ();
 | 
      
         | 3780 |  |  |  
 | 
      
         | 3781 |  |  |   /* Honor stack protection warnings.  */
 | 
      
         | 3782 |  |  |   if (warn_stack_protect)
 | 
      
         | 3783 |  |  |     {
 | 
      
         | 3784 |  |  |       if (cfun->calls_alloca)
 | 
      
         | 3785 |  |  |         warning (OPT_Wstack_protector,
 | 
      
         | 3786 |  |  |                  "not protecting local variables: variable length buffer");
 | 
      
         | 3787 |  |  |       if (has_short_buffer && !crtl->stack_protect_guard)
 | 
      
         | 3788 |  |  |         warning (OPT_Wstack_protector,
 | 
      
         | 3789 |  |  |                  "not protecting function: no buffer at least %d bytes long",
 | 
      
         | 3790 |  |  |                  (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
 | 
      
         | 3791 |  |  |     }
 | 
      
         | 3792 |  |  |  
 | 
      
         | 3793 |  |  |   /* Set up parameters and prepare for return, for the function.  */
 | 
      
         | 3794 |  |  |   expand_function_start (current_function_decl);
 | 
      
         | 3795 |  |  |  
 | 
      
         | 3796 |  |  |   /* Now that we also have the parameter RTXs, copy them over to our
 | 
      
         | 3797 |  |  |      partitions.  */
 | 
      
         | 3798 |  |  |   for (i = 0; i < SA.map->num_partitions; i++)
 | 
      
         | 3799 |  |  |     {
 | 
      
         | 3800 |  |  |       tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
 | 
      
         | 3801 |  |  |  
 | 
      
         | 3802 |  |  |       if (TREE_CODE (var) != VAR_DECL
 | 
      
         | 3803 |  |  |           && !SA.partition_to_pseudo[i])
 | 
      
         | 3804 |  |  |         SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
 | 
      
         | 3805 |  |  |       gcc_assert (SA.partition_to_pseudo[i]);
 | 
      
         | 3806 |  |  |  
 | 
      
         | 3807 |  |  |       /* If this decl was marked as living in multiple places, reset
 | 
      
         | 3808 |  |  |          this now to NULL.  */
 | 
      
         | 3809 |  |  |       if (DECL_RTL_IF_SET (var) == pc_rtx)
 | 
      
         | 3810 |  |  |         SET_DECL_RTL (var, NULL);
 | 
      
         | 3811 |  |  |  
 | 
      
         | 3812 |  |  |       /* Some RTL parts really want to look at DECL_RTL(x) when x
 | 
      
         | 3813 |  |  |          was a decl marked in REG_ATTR or MEM_ATTR.  We could use
 | 
      
         | 3814 |  |  |          SET_DECL_RTL here making this available, but that would mean
 | 
      
         | 3815 |  |  |          to select one of the potentially many RTLs for one DECL.  Instead
 | 
      
         | 3816 |  |  |          of doing that we simply reset the MEM_EXPR of the RTL in question,
 | 
      
         | 3817 |  |  |          then nobody can get at it and hence nobody can call DECL_RTL on it.  */
 | 
      
         | 3818 |  |  |       if (!DECL_RTL_SET_P (var))
 | 
      
         | 3819 |  |  |         {
 | 
      
         | 3820 |  |  |           if (MEM_P (SA.partition_to_pseudo[i]))
 | 
      
         | 3821 |  |  |             set_mem_expr (SA.partition_to_pseudo[i], NULL);
 | 
      
         | 3822 |  |  |         }
 | 
      
         | 3823 |  |  |     }
 | 
      
         | 3824 |  |  |  
 | 
      
         | 3825 |  |  |   /* If this function is `main', emit a call to `__main'
 | 
      
         | 3826 |  |  |      to run global initializers, etc.  */
 | 
      
         | 3827 |  |  |   if (DECL_NAME (current_function_decl)
 | 
      
         | 3828 |  |  |       && MAIN_NAME_P (DECL_NAME (current_function_decl))
 | 
      
         | 3829 |  |  |       && DECL_FILE_SCOPE_P (current_function_decl))
 | 
      
         | 3830 |  |  |     expand_main_function ();
 | 
      
         | 3831 |  |  |  
 | 
      
         | 3832 |  |  |   /* Initialize the stack_protect_guard field.  This must happen after the
 | 
      
         | 3833 |  |  |      call to __main (if any) so that the external decl is initialized.  */
 | 
      
         | 3834 |  |  |   if (crtl->stack_protect_guard)
 | 
      
         | 3835 |  |  |     stack_protect_prologue ();
 | 
      
         | 3836 |  |  |  
 | 
      
         | 3837 |  |  |   expand_phi_nodes (&SA);
 | 
      
         | 3838 |  |  |  
 | 
      
         | 3839 |  |  |   /* Register rtl specific functions for cfg.  */
 | 
      
         | 3840 |  |  |   rtl_register_cfg_hooks ();
 | 
      
         | 3841 |  |  |  
 | 
      
         | 3842 |  |  |   init_block = construct_init_block ();
 | 
      
         | 3843 |  |  |  
 | 
      
         | 3844 |  |  |   /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
 | 
      
         | 3845 |  |  |      remaining edges later.  */
 | 
      
         | 3846 |  |  |   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
 | 
      
         | 3847 |  |  |     e->flags &= ~EDGE_EXECUTABLE;
 | 
      
         | 3848 |  |  |  
 | 
      
         | 3849 |  |  |   lab_rtx_for_bb = pointer_map_create ();
 | 
      
         | 3850 |  |  |   FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
 | 
      
         | 3851 |  |  |     bb = expand_gimple_basic_block (bb);
 | 
      
         | 3852 |  |  |  
 | 
      
         | 3853 |  |  |   if (MAY_HAVE_DEBUG_INSNS)
 | 
      
         | 3854 |  |  |     expand_debug_locations ();
 | 
      
         | 3855 |  |  |  
 | 
      
         | 3856 |  |  |   execute_free_datastructures ();
 | 
      
         | 3857 |  |  |   finish_out_of_ssa (&SA);
 | 
      
         | 3858 |  |  |  
 | 
      
         | 3859 |  |  |   /* We are no longer in SSA form.  */
 | 
      
         | 3860 |  |  |   cfun->gimple_df->in_ssa_p = false;
 | 
      
         | 3861 |  |  |  
 | 
      
         | 3862 |  |  |   /* Expansion is used by optimization passes too, set maybe_hot_insn_p
 | 
      
         | 3863 |  |  |      conservatively to true until they are all profile aware.  */
 | 
      
         | 3864 |  |  |   pointer_map_destroy (lab_rtx_for_bb);
 | 
      
         | 3865 |  |  |   free_histograms ();
 | 
      
         | 3866 |  |  |  
 | 
      
         | 3867 |  |  |   construct_exit_block ();
 | 
      
         | 3868 |  |  |   set_curr_insn_block (DECL_INITIAL (current_function_decl));
 | 
      
         | 3869 |  |  |   insn_locators_finalize ();
 | 
      
         | 3870 |  |  |  
 | 
      
         | 3871 |  |  |   /* Zap the tree EH table.  */
 | 
      
         | 3872 |  |  |   set_eh_throw_stmt_table (cfun, NULL);
 | 
      
         | 3873 |  |  |  
 | 
      
         | 3874 |  |  |   rebuild_jump_labels (get_insns ());
 | 
      
         | 3875 |  |  |  
 | 
      
         | 3876 |  |  |   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
 | 
      
         | 3877 |  |  |     {
 | 
      
         | 3878 |  |  |       edge e;
 | 
      
         | 3879 |  |  |       edge_iterator ei;
 | 
      
         | 3880 |  |  |       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
 | 
      
         | 3881 |  |  |         {
 | 
      
         | 3882 |  |  |           if (e->insns.r)
 | 
      
         | 3883 |  |  |             commit_one_edge_insertion (e);
 | 
      
         | 3884 |  |  |           else
 | 
      
         | 3885 |  |  |             ei_next (&ei);
 | 
      
         | 3886 |  |  |         }
 | 
      
         | 3887 |  |  |     }
 | 
      
         | 3888 |  |  |  
 | 
      
         | 3889 |  |  |   /* We're done expanding trees to RTL.  */
 | 
      
         | 3890 |  |  |   currently_expanding_to_rtl = 0;
 | 
      
         | 3891 |  |  |  
 | 
      
         | 3892 |  |  |   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
 | 
      
         | 3893 |  |  |     {
 | 
      
         | 3894 |  |  |       edge e;
 | 
      
         | 3895 |  |  |       edge_iterator ei;
 | 
      
         | 3896 |  |  |       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
 | 
      
         | 3897 |  |  |         {
 | 
      
         | 3898 |  |  |           /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
 | 
      
         | 3899 |  |  |           e->flags &= ~EDGE_EXECUTABLE;
 | 
      
         | 3900 |  |  |  
 | 
      
         | 3901 |  |  |           /* At the moment not all abnormal edges match the RTL
 | 
      
         | 3902 |  |  |              representation.  It is safe to remove them here as
 | 
      
         | 3903 |  |  |              find_many_sub_basic_blocks will rediscover them.
 | 
      
         | 3904 |  |  |              In the future we should get this fixed properly.  */
 | 
      
         | 3905 |  |  |           if ((e->flags & EDGE_ABNORMAL)
 | 
      
         | 3906 |  |  |               && !(e->flags & EDGE_SIBCALL))
 | 
      
         | 3907 |  |  |             remove_edge (e);
 | 
      
         | 3908 |  |  |           else
 | 
      
         | 3909 |  |  |             ei_next (&ei);
 | 
      
         | 3910 |  |  |         }
 | 
      
         | 3911 |  |  |     }
 | 
      
         | 3912 |  |  |  
 | 
      
         | 3913 |  |  |   blocks = sbitmap_alloc (last_basic_block);
 | 
      
         | 3914 |  |  |   sbitmap_ones (blocks);
 | 
      
         | 3915 |  |  |   find_many_sub_basic_blocks (blocks);
 | 
      
         | 3916 |  |  |   sbitmap_free (blocks);
 | 
      
         | 3917 |  |  |   purge_all_dead_edges ();
 | 
      
         | 3918 |  |  |  
 | 
      
         | 3919 |  |  |   compact_blocks ();
 | 
      
         | 3920 |  |  |  
 | 
      
         | 3921 |  |  |   expand_stack_alignment ();
 | 
      
         | 3922 |  |  |  
 | 
      
         | 3923 |  |  | #ifdef ENABLE_CHECKING
 | 
      
         | 3924 |  |  |   verify_flow_info ();
 | 
      
         | 3925 |  |  | #endif
 | 
      
         | 3926 |  |  |  
 | 
      
         | 3927 |  |  |   /* There's no need to defer outputting this function any more; we
 | 
      
         | 3928 |  |  |      know we want to output it.  */
 | 
      
         | 3929 |  |  |   DECL_DEFER_OUTPUT (current_function_decl) = 0;
 | 
      
         | 3930 |  |  |  
 | 
      
         | 3931 |  |  |   /* Now that we're done expanding trees to RTL, we shouldn't have any
 | 
      
         | 3932 |  |  |      more CONCATs anywhere.  */
 | 
      
         | 3933 |  |  |   generating_concat_p = 0;
 | 
      
         | 3934 |  |  |  
 | 
      
         | 3935 |  |  |   if (dump_file)
 | 
      
         | 3936 |  |  |     {
 | 
      
         | 3937 |  |  |       fprintf (dump_file,
 | 
      
         | 3938 |  |  |                "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
 | 
      
         | 3939 |  |  |       /* And the pass manager will dump RTL for us.  */
 | 
      
         | 3940 |  |  |     }
 | 
      
         | 3941 |  |  |  
 | 
      
         | 3942 |  |  |   /* If we're emitting a nested function, make sure its parent gets
 | 
      
         | 3943 |  |  |      emitted as well.  Doing otherwise confuses debug info.  */
 | 
      
         | 3944 |  |  |   {
 | 
      
         | 3945 |  |  |     tree parent;
 | 
      
         | 3946 |  |  |     for (parent = DECL_CONTEXT (current_function_decl);
 | 
      
         | 3947 |  |  |          parent != NULL_TREE;
 | 
      
         | 3948 |  |  |          parent = get_containing_scope (parent))
 | 
      
         | 3949 |  |  |       if (TREE_CODE (parent) == FUNCTION_DECL)
 | 
      
         | 3950 |  |  |         TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
 | 
      
         | 3951 |  |  |   }
 | 
      
         | 3952 |  |  |  
 | 
      
         | 3953 |  |  |   /* We are now committed to emitting code for this function.  Do any
 | 
      
         | 3954 |  |  |      preparation, such as emitting abstract debug info for the inline
 | 
      
         | 3955 |  |  |      before it gets mangled by optimization.  */
 | 
      
         | 3956 |  |  |   if (cgraph_function_possibly_inlined_p (current_function_decl))
 | 
      
         | 3957 |  |  |     (*debug_hooks->outlining_inline_function) (current_function_decl);
 | 
      
         | 3958 |  |  |  
 | 
      
         | 3959 |  |  |   TREE_ASM_WRITTEN (current_function_decl) = 1;
 | 
      
         | 3960 |  |  |  
 | 
      
         | 3961 |  |  |   /* After expanding, the return labels are no longer needed. */
 | 
      
         | 3962 |  |  |   return_label = NULL;
 | 
      
         | 3963 |  |  |   naked_return_label = NULL;
 | 
      
         | 3964 |  |  |   /* Tag the blocks with a depth number so that change_scope can find
 | 
      
         | 3965 |  |  |      the common parent easily.  */
 | 
      
         | 3966 |  |  |   set_block_levels (DECL_INITIAL (cfun->decl), 0);
 | 
      
         | 3967 |  |  |   default_rtl_profile ();
 | 
      
         | 3968 |  |  |   return 0;
 | 
      
         | 3969 |  |  | }
 | 
      
         | 3970 |  |  |  
 | 
      
         | 3971 |  |  | struct rtl_opt_pass pass_expand =
 | 
      
         | 3972 |  |  | {
 | 
      
         | 3973 |  |  |  {
 | 
      
         | 3974 |  |  |   RTL_PASS,
 | 
      
         | 3975 |  |  |   "expand",                             /* name */
 | 
      
         | 3976 |  |  |   NULL,                                 /* gate */
 | 
      
         | 3977 |  |  |   gimple_expand_cfg,                    /* execute */
 | 
      
         | 3978 |  |  |   NULL,                                 /* sub */
 | 
      
         | 3979 |  |  |   NULL,                                 /* next */
 | 
      
         | 3980 |  |  |   0,                                    /* static_pass_number */
 | 
      
         | 3981 |  |  |   TV_EXPAND,                            /* tv_id */
 | 
      
         | 3982 |  |  |   PROP_ssa | PROP_gimple_leh | PROP_cfg
 | 
      
         | 3983 |  |  |     | PROP_gimple_lcx,                  /* properties_required */
 | 
      
         | 3984 |  |  |   PROP_rtl,                             /* properties_provided */
 | 
      
         | 3985 |  |  |   PROP_ssa | PROP_trees,                /* properties_destroyed */
 | 
      
         | 3986 |  |  |   TODO_verify_ssa | TODO_verify_flow
 | 
      
         | 3987 |  |  |     | TODO_verify_stmts,                /* todo_flags_start */
 | 
      
         | 3988 |  |  |   TODO_dump_func
 | 
      
         | 3989 |  |  |   | TODO_ggc_collect                    /* todo_flags_finish */
 | 
      
         | 3990 |  |  |  }
 | 
      
         | 3991 |  |  | };
 |