OpenCores
URL https://opencores.org/ocsvn/openrisc_2011-10-31/openrisc_2011-10-31/trunk

Subversion Repositories openrisc_2011-10-31

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [var-tracking.c] - Blame information for rev 328

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Variable tracking routines for the GNU compiler.
2
   Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3
   Free Software Foundation, Inc.
4
 
5
   This file is part of GCC.
6
 
7
   GCC is free software; you can redistribute it and/or modify it
8
   under the terms of the GNU General Public License as published by
9
   the Free Software Foundation; either version 3, or (at your option)
10
   any later version.
11
 
12
   GCC is distributed in the hope that it will be useful, but WITHOUT
13
   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14
   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
15
   License for more details.
16
 
17
   You should have received a copy of the GNU General Public License
18
   along with GCC; see the file COPYING3.  If not see
19
   <http://www.gnu.org/licenses/>.  */
20
 
21
/* This file contains the variable tracking pass.  It computes where
22
   variables are located (which registers or where in memory) at each position
23
   in instruction stream and emits notes describing the locations.
24
   Debug information (DWARF2 location lists) is finally generated from
25
   these notes.
26
   With this debug information, it is possible to show variables
27
   even when debugging optimized code.
28
 
29
   How does the variable tracking pass work?
30
 
31
   First, it scans RTL code for uses, stores and clobbers (register/memory
32
   references in instructions), for call insns and for stack adjustments
33
   separately for each basic block and saves them to an array of micro
34
   operations.
35
   The micro operations of one instruction are ordered so that
36
   pre-modifying stack adjustment < use < use with no var < call insn <
37
     < set < clobber < post-modifying stack adjustment
38
 
39
   Then, a forward dataflow analysis is performed to find out how locations
40
   of variables change through code and to propagate the variable locations
41
   along control flow graph.
42
   The IN set for basic block BB is computed as a union of OUT sets of BB's
43
   predecessors, the OUT set for BB is copied from the IN set for BB and
44
   is changed according to micro operations in BB.
45
 
46
   The IN and OUT sets for basic blocks consist of a current stack adjustment
47
   (used for adjusting offset of variables addressed using stack pointer),
48
   the table of structures describing the locations of parts of a variable
49
   and for each physical register a linked list for each physical register.
50
   The linked list is a list of variable parts stored in the register,
51
   i.e. it is a list of triplets (reg, decl, offset) where decl is
52
   REG_EXPR (reg) and offset is REG_OFFSET (reg).  The linked list is used for
53
   effective deleting appropriate variable parts when we set or clobber the
54
   register.
55
 
56
   There may be more than one variable part in a register.  The linked lists
57
   should be pretty short so it is a good data structure here.
58
   For example in the following code, register allocator may assign same
59
   register to variables A and B, and both of them are stored in the same
60
   register in CODE:
61
 
62
     if (cond)
63
       set A;
64
     else
65
       set B;
66
     CODE;
67
     if (cond)
68
       use A;
69
     else
70
       use B;
71
 
72
   Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
73
   are emitted to appropriate positions in RTL code.  Each such a note describes
74
   the location of one variable at the point in instruction stream where the
75
   note is.  There is no need to emit a note for each variable before each
76
   instruction, we only emit these notes where the location of variable changes
77
   (this means that we also emit notes for changes between the OUT set of the
78
   previous block and the IN set of the current block).
79
 
80
   The notes consist of two parts:
81
   1. the declaration (from REG_EXPR or MEM_EXPR)
82
   2. the location of a variable - it is either a simple register/memory
83
      reference (for simple variables, for example int),
84
      or a parallel of register/memory references (for a large variables
85
      which consist of several parts, for example long long).
86
 
87
*/
88
 
89
#include "config.h"
90
#include "system.h"
91
#include "coretypes.h"
92
#include "tm.h"
93
#include "rtl.h"
94
#include "tree.h"
95
#include "hard-reg-set.h"
96
#include "basic-block.h"
97
#include "flags.h"
98
#include "output.h"
99
#include "insn-config.h"
100
#include "reload.h"
101
#include "sbitmap.h"
102
#include "alloc-pool.h"
103
#include "fibheap.h"
104
#include "hashtab.h"
105
#include "regs.h"
106
#include "expr.h"
107
#include "timevar.h"
108
#include "tree-pass.h"
109
#include "tree-flow.h"
110
#include "cselib.h"
111
#include "target.h"
112
#include "toplev.h"
113
#include "params.h"
114
#include "diagnostic.h"
115
#include "pointer-set.h"
116
#include "recog.h"
117
 
118
/* var-tracking.c assumes that tree code with the same value as VALUE rtx code
119
   has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
120
   Currently the value is the same as IDENTIFIER_NODE, which has such
121
   a property.  If this compile time assertion ever fails, make sure that
122
   the new tree code that equals (int) VALUE has the same property.  */
123
extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
124
 
125
/* Type of micro operation.  */
126
enum micro_operation_type
127
{
128
  MO_USE,       /* Use location (REG or MEM).  */
129
  MO_USE_NO_VAR,/* Use location which is not associated with a variable
130
                   or the variable is not trackable.  */
131
  MO_VAL_USE,   /* Use location which is associated with a value.  */
132
  MO_VAL_LOC,   /* Use location which appears in a debug insn.  */
133
  MO_VAL_SET,   /* Set location associated with a value.  */
134
  MO_SET,       /* Set location.  */
135
  MO_COPY,      /* Copy the same portion of a variable from one
136
                   location to another.  */
137
  MO_CLOBBER,   /* Clobber location.  */
138
  MO_CALL,      /* Call insn.  */
139
  MO_ADJUST     /* Adjust stack pointer.  */
140
 
141
};
142
 
143
static const char * const ATTRIBUTE_UNUSED
144
micro_operation_type_name[] = {
145
  "MO_USE",
146
  "MO_USE_NO_VAR",
147
  "MO_VAL_USE",
148
  "MO_VAL_LOC",
149
  "MO_VAL_SET",
150
  "MO_SET",
151
  "MO_COPY",
152
  "MO_CLOBBER",
153
  "MO_CALL",
154
  "MO_ADJUST"
155
};
156
 
157
/* Where shall the note be emitted?  BEFORE or AFTER the instruction.
158
   Notes emitted as AFTER_CALL are to take effect during the call,
159
   rather than after the call.  */
160
enum emit_note_where
161
{
162
  EMIT_NOTE_BEFORE_INSN,
163
  EMIT_NOTE_AFTER_INSN,
164
  EMIT_NOTE_AFTER_CALL_INSN
165
};
166
 
167
/* Structure holding information about micro operation.  */
168
typedef struct micro_operation_def
169
{
170
  /* Type of micro operation.  */
171
  enum micro_operation_type type;
172
 
173
  /* The instruction which the micro operation is in, for MO_USE,
174
     MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
175
     instruction or note in the original flow (before any var-tracking
176
     notes are inserted, to simplify emission of notes), for MO_SET
177
     and MO_CLOBBER.  */
178
  rtx insn;
179
 
180
  union {
181
    /* Location.  For MO_SET and MO_COPY, this is the SET that
182
       performs the assignment, if known, otherwise it is the target
183
       of the assignment.  For MO_VAL_USE and MO_VAL_SET, it is a
184
       CONCAT of the VALUE and the LOC associated with it.  For
185
       MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
186
       associated with it.  */
187
    rtx loc;
188
 
189
    /* Stack adjustment.  */
190
    HOST_WIDE_INT adjust;
191
  } u;
192
} micro_operation;
193
 
194
DEF_VEC_O(micro_operation);
195
DEF_VEC_ALLOC_O(micro_operation,heap);
196
 
197
/* A declaration of a variable, or an RTL value being handled like a
198
   declaration.  */
199
typedef void *decl_or_value;
200
 
201
/* Structure for passing some other parameters to function
202
   emit_note_insn_var_location.  */
203
typedef struct emit_note_data_def
204
{
205
  /* The instruction which the note will be emitted before/after.  */
206
  rtx insn;
207
 
208
  /* Where the note will be emitted (before/after insn)?  */
209
  enum emit_note_where where;
210
 
211
  /* The variables and values active at this point.  */
212
  htab_t vars;
213
} emit_note_data;
214
 
215
/* Description of location of a part of a variable.  The content of a physical
216
   register is described by a chain of these structures.
217
   The chains are pretty short (usually 1 or 2 elements) and thus
218
   chain is the best data structure.  */
219
typedef struct attrs_def
220
{
221
  /* Pointer to next member of the list.  */
222
  struct attrs_def *next;
223
 
224
  /* The rtx of register.  */
225
  rtx loc;
226
 
227
  /* The declaration corresponding to LOC.  */
228
  decl_or_value dv;
229
 
230
  /* Offset from start of DECL.  */
231
  HOST_WIDE_INT offset;
232
} *attrs;
233
 
234
/* Structure holding a refcounted hash table.  If refcount > 1,
235
   it must be first unshared before modified.  */
236
typedef struct shared_hash_def
237
{
238
  /* Reference count.  */
239
  int refcount;
240
 
241
  /* Actual hash table.  */
242
  htab_t htab;
243
} *shared_hash;
244
 
245
/* Structure holding the IN or OUT set for a basic block.  */
246
typedef struct dataflow_set_def
247
{
248
  /* Adjustment of stack offset.  */
249
  HOST_WIDE_INT stack_adjust;
250
 
251
  /* Attributes for registers (lists of attrs).  */
252
  attrs regs[FIRST_PSEUDO_REGISTER];
253
 
254
  /* Variable locations.  */
255
  shared_hash vars;
256
 
257
  /* Vars that is being traversed.  */
258
  shared_hash traversed_vars;
259
} dataflow_set;
260
 
261
/* The structure (one for each basic block) containing the information
262
   needed for variable tracking.  */
263
typedef struct variable_tracking_info_def
264
{
265
  /* The vector of micro operations.  */
266
  VEC(micro_operation, heap) *mos;
267
 
268
  /* The IN and OUT set for dataflow analysis.  */
269
  dataflow_set in;
270
  dataflow_set out;
271
 
272
  /* The permanent-in dataflow set for this block.  This is used to
273
     hold values for which we had to compute entry values.  ??? This
274
     should probably be dynamically allocated, to avoid using more
275
     memory in non-debug builds.  */
276
  dataflow_set *permp;
277
 
278
  /* Has the block been visited in DFS?  */
279
  bool visited;
280
 
281
  /* Has the block been flooded in VTA?  */
282
  bool flooded;
283
 
284
} *variable_tracking_info;
285
 
286
/* Structure for chaining the locations.  */
287
typedef struct location_chain_def
288
{
289
  /* Next element in the chain.  */
290
  struct location_chain_def *next;
291
 
292
  /* The location (REG, MEM or VALUE).  */
293
  rtx loc;
294
 
295
  /* The "value" stored in this location.  */
296
  rtx set_src;
297
 
298
  /* Initialized? */
299
  enum var_init_status init;
300
} *location_chain;
301
 
302
/* Structure describing one part of variable.  */
303
typedef struct variable_part_def
304
{
305
  /* Chain of locations of the part.  */
306
  location_chain loc_chain;
307
 
308
  /* Location which was last emitted to location list.  */
309
  rtx cur_loc;
310
 
311
  /* The offset in the variable.  */
312
  HOST_WIDE_INT offset;
313
} variable_part;
314
 
315
/* Maximum number of location parts.  */
316
#define MAX_VAR_PARTS 16
317
 
318
/* Structure describing where the variable is located.  */
319
typedef struct variable_def
320
{
321
  /* The declaration of the variable, or an RTL value being handled
322
     like a declaration.  */
323
  decl_or_value dv;
324
 
325
  /* Reference count.  */
326
  int refcount;
327
 
328
  /* Number of variable parts.  */
329
  char n_var_parts;
330
 
331
  /* True if this variable changed (any of its) cur_loc fields
332
     during the current emit_notes_for_changes resp.
333
     emit_notes_for_differences call.  */
334
  bool cur_loc_changed;
335
 
336
  /* True if this variable_def struct is currently in the
337
     changed_variables hash table.  */
338
  bool in_changed_variables;
339
 
340
  /* The variable parts.  */
341
  variable_part var_part[1];
342
} *variable;
343
typedef const struct variable_def *const_variable;
344
 
345
/* Structure for chaining backlinks from referenced VALUEs to
346
   DVs that are referencing them.  */
347
typedef struct value_chain_def
348
{
349
  /* Next value_chain entry.  */
350
  struct value_chain_def *next;
351
 
352
  /* The declaration of the variable, or an RTL value
353
     being handled like a declaration, whose var_parts[0].loc_chain
354
     references the VALUE owning this value_chain.  */
355
  decl_or_value dv;
356
 
357
  /* Reference count.  */
358
  int refcount;
359
} *value_chain;
360
typedef const struct value_chain_def *const_value_chain;
361
 
362
/* Pointer to the BB's information specific to variable tracking pass.  */
363
#define VTI(BB) ((variable_tracking_info) (BB)->aux)
364
 
365
/* Macro to access MEM_OFFSET as an HOST_WIDE_INT.  Evaluates MEM twice.  */
366
#define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0)
367
 
368
/* Alloc pool for struct attrs_def.  */
369
static alloc_pool attrs_pool;
370
 
371
/* Alloc pool for struct variable_def with MAX_VAR_PARTS entries.  */
372
static alloc_pool var_pool;
373
 
374
/* Alloc pool for struct variable_def with a single var_part entry.  */
375
static alloc_pool valvar_pool;
376
 
377
/* Alloc pool for struct location_chain_def.  */
378
static alloc_pool loc_chain_pool;
379
 
380
/* Alloc pool for struct shared_hash_def.  */
381
static alloc_pool shared_hash_pool;
382
 
383
/* Alloc pool for struct value_chain_def.  */
384
static alloc_pool value_chain_pool;
385
 
386
/* Changed variables, notes will be emitted for them.  */
387
static htab_t changed_variables;
388
 
389
/* Links from VALUEs to DVs referencing them in their current loc_chains.  */
390
static htab_t value_chains;
391
 
392
/* Shall notes be emitted?  */
393
static bool emit_notes;
394
 
395
/* Empty shared hashtable.  */
396
static shared_hash empty_shared_hash;
397
 
398
/* Scratch register bitmap used by cselib_expand_value_rtx.  */
399
static bitmap scratch_regs = NULL;
400
 
401
/* Variable used to tell whether cselib_process_insn called our hook.  */
402
static bool cselib_hook_called;
403
 
404
/* Local function prototypes.  */
405
static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
406
                                          HOST_WIDE_INT *);
407
static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
408
                                               HOST_WIDE_INT *);
409
static bool vt_stack_adjustments (void);
410
static rtx compute_cfa_pointer (HOST_WIDE_INT);
411
static hashval_t variable_htab_hash (const void *);
412
static int variable_htab_eq (const void *, const void *);
413
static void variable_htab_free (void *);
414
 
415
static void init_attrs_list_set (attrs *);
416
static void attrs_list_clear (attrs *);
417
static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
418
static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
419
static void attrs_list_copy (attrs *, attrs);
420
static void attrs_list_union (attrs *, attrs);
421
 
422
static void **unshare_variable (dataflow_set *set, void **slot, variable var,
423
                                enum var_init_status);
424
static void vars_copy (htab_t, htab_t);
425
static tree var_debug_decl (tree);
426
static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
427
static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
428
                                    enum var_init_status, rtx);
429
static void var_reg_delete (dataflow_set *, rtx, bool);
430
static void var_regno_delete (dataflow_set *, int);
431
static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
432
static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
433
                                    enum var_init_status, rtx);
434
static void var_mem_delete (dataflow_set *, rtx, bool);
435
 
436
static void dataflow_set_init (dataflow_set *);
437
static void dataflow_set_clear (dataflow_set *);
438
static void dataflow_set_copy (dataflow_set *, dataflow_set *);
439
static int variable_union_info_cmp_pos (const void *, const void *);
440
static void dataflow_set_union (dataflow_set *, dataflow_set *);
441
static location_chain find_loc_in_1pdv (rtx, variable, htab_t);
442
static bool canon_value_cmp (rtx, rtx);
443
static int loc_cmp (rtx, rtx);
444
static bool variable_part_different_p (variable_part *, variable_part *);
445
static bool onepart_variable_different_p (variable, variable);
446
static bool variable_different_p (variable, variable);
447
static bool dataflow_set_different (dataflow_set *, dataflow_set *);
448
static void dataflow_set_destroy (dataflow_set *);
449
 
450
static bool contains_symbol_ref (rtx);
451
static bool track_expr_p (tree, bool);
452
static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
453
static int add_uses (rtx *, void *);
454
static void add_uses_1 (rtx *, void *);
455
static void add_stores (rtx, const_rtx, void *);
456
static bool compute_bb_dataflow (basic_block);
457
static bool vt_find_locations (void);
458
 
459
static void dump_attrs_list (attrs);
460
static int dump_var_slot (void **, void *);
461
static void dump_var (variable);
462
static void dump_vars (htab_t);
463
static void dump_dataflow_set (dataflow_set *);
464
static void dump_dataflow_sets (void);
465
 
466
static void variable_was_changed (variable, dataflow_set *);
467
static void **set_slot_part (dataflow_set *, rtx, void **,
468
                             decl_or_value, HOST_WIDE_INT,
469
                             enum var_init_status, rtx);
470
static void set_variable_part (dataflow_set *, rtx,
471
                               decl_or_value, HOST_WIDE_INT,
472
                               enum var_init_status, rtx, enum insert_option);
473
static void **clobber_slot_part (dataflow_set *, rtx,
474
                                 void **, HOST_WIDE_INT, rtx);
475
static void clobber_variable_part (dataflow_set *, rtx,
476
                                   decl_or_value, HOST_WIDE_INT, rtx);
477
static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT);
478
static void delete_variable_part (dataflow_set *, rtx,
479
                                  decl_or_value, HOST_WIDE_INT);
480
static int emit_note_insn_var_location (void **, void *);
481
static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash);
482
static int emit_notes_for_differences_1 (void **, void *);
483
static int emit_notes_for_differences_2 (void **, void *);
484
static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *);
485
static void emit_notes_in_bb (basic_block, dataflow_set *);
486
static void vt_emit_notes (void);
487
 
488
static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
489
static void vt_add_function_parameters (void);
490
static bool vt_initialize (void);
491
static void vt_finalize (void);
492
 
493
/* Given a SET, calculate the amount of stack adjustment it contains
494
   PRE- and POST-modifying stack pointer.
495
   This function is similar to stack_adjust_offset.  */
496
 
497
static void
498
stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
499
                              HOST_WIDE_INT *post)
500
{
501
  rtx src = SET_SRC (pattern);
502
  rtx dest = SET_DEST (pattern);
503
  enum rtx_code code;
504
 
505
  if (dest == stack_pointer_rtx)
506
    {
507
      /* (set (reg sp) (plus (reg sp) (const_int))) */
508
      code = GET_CODE (src);
509
      if (! (code == PLUS || code == MINUS)
510
          || XEXP (src, 0) != stack_pointer_rtx
511
          || !CONST_INT_P (XEXP (src, 1)))
512
        return;
513
 
514
      if (code == MINUS)
515
        *post += INTVAL (XEXP (src, 1));
516
      else
517
        *post -= INTVAL (XEXP (src, 1));
518
    }
519
  else if (MEM_P (dest))
520
    {
521
      /* (set (mem (pre_dec (reg sp))) (foo)) */
522
      src = XEXP (dest, 0);
523
      code = GET_CODE (src);
524
 
525
      switch (code)
526
        {
527
        case PRE_MODIFY:
528
        case POST_MODIFY:
529
          if (XEXP (src, 0) == stack_pointer_rtx)
530
            {
531
              rtx val = XEXP (XEXP (src, 1), 1);
532
              /* We handle only adjustments by constant amount.  */
533
              gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
534
                          CONST_INT_P (val));
535
 
536
              if (code == PRE_MODIFY)
537
                *pre -= INTVAL (val);
538
              else
539
                *post -= INTVAL (val);
540
              break;
541
            }
542
          return;
543
 
544
        case PRE_DEC:
545
          if (XEXP (src, 0) == stack_pointer_rtx)
546
            {
547
              *pre += GET_MODE_SIZE (GET_MODE (dest));
548
              break;
549
            }
550
          return;
551
 
552
        case POST_DEC:
553
          if (XEXP (src, 0) == stack_pointer_rtx)
554
            {
555
              *post += GET_MODE_SIZE (GET_MODE (dest));
556
              break;
557
            }
558
          return;
559
 
560
        case PRE_INC:
561
          if (XEXP (src, 0) == stack_pointer_rtx)
562
            {
563
              *pre -= GET_MODE_SIZE (GET_MODE (dest));
564
              break;
565
            }
566
          return;
567
 
568
        case POST_INC:
569
          if (XEXP (src, 0) == stack_pointer_rtx)
570
            {
571
              *post -= GET_MODE_SIZE (GET_MODE (dest));
572
              break;
573
            }
574
          return;
575
 
576
        default:
577
          return;
578
        }
579
    }
580
}
581
 
582
/* Given an INSN, calculate the amount of stack adjustment it contains
583
   PRE- and POST-modifying stack pointer.  */
584
 
585
static void
586
insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre,
587
                                   HOST_WIDE_INT *post)
588
{
589
  rtx pattern;
590
 
591
  *pre = 0;
592
  *post = 0;
593
 
594
  pattern = PATTERN (insn);
595
  if (RTX_FRAME_RELATED_P (insn))
596
    {
597
      rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
598
      if (expr)
599
        pattern = XEXP (expr, 0);
600
    }
601
 
602
  if (GET_CODE (pattern) == SET)
603
    stack_adjust_offset_pre_post (pattern, pre, post);
604
  else if (GET_CODE (pattern) == PARALLEL
605
           || GET_CODE (pattern) == SEQUENCE)
606
    {
607
      int i;
608
 
609
      /* There may be stack adjustments inside compound insns.  Search
610
         for them.  */
611
      for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
612
        if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
613
          stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
614
    }
615
}
616
 
617
/* Compute stack adjustments for all blocks by traversing DFS tree.
618
   Return true when the adjustments on all incoming edges are consistent.
619
   Heavily borrowed from pre_and_rev_post_order_compute.  */
620
 
621
static bool
622
vt_stack_adjustments (void)
623
{
624
  edge_iterator *stack;
625
  int sp;
626
 
627
  /* Initialize entry block.  */
628
  VTI (ENTRY_BLOCK_PTR)->visited = true;
629
  VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
630
  VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
631
 
632
  /* Allocate stack for back-tracking up CFG.  */
633
  stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
634
  sp = 0;
635
 
636
  /* Push the first edge on to the stack.  */
637
  stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
638
 
639
  while (sp)
640
    {
641
      edge_iterator ei;
642
      basic_block src;
643
      basic_block dest;
644
 
645
      /* Look at the edge on the top of the stack.  */
646
      ei = stack[sp - 1];
647
      src = ei_edge (ei)->src;
648
      dest = ei_edge (ei)->dest;
649
 
650
      /* Check if the edge destination has been visited yet.  */
651
      if (!VTI (dest)->visited)
652
        {
653
          rtx insn;
654
          HOST_WIDE_INT pre, post, offset;
655
          VTI (dest)->visited = true;
656
          VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
657
 
658
          if (dest != EXIT_BLOCK_PTR)
659
            for (insn = BB_HEAD (dest);
660
                 insn != NEXT_INSN (BB_END (dest));
661
                 insn = NEXT_INSN (insn))
662
              if (INSN_P (insn))
663
                {
664
                  insn_stack_adjust_offset_pre_post (insn, &pre, &post);
665
                  offset += pre + post;
666
                }
667
 
668
          VTI (dest)->out.stack_adjust = offset;
669
 
670
          if (EDGE_COUNT (dest->succs) > 0)
671
            /* Since the DEST node has been visited for the first
672
               time, check its successors.  */
673
            stack[sp++] = ei_start (dest->succs);
674
        }
675
      else
676
        {
677
          /* Check whether the adjustments on the edges are the same.  */
678
          if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
679
            {
680
              free (stack);
681
              return false;
682
            }
683
 
684
          if (! ei_one_before_end_p (ei))
685
            /* Go to the next edge.  */
686
            ei_next (&stack[sp - 1]);
687
          else
688
            /* Return to previous level if there are no more edges.  */
689
            sp--;
690
        }
691
    }
692
 
693
  free (stack);
694
  return true;
695
}
696
 
697
/* Compute a CFA-based value for the stack pointer.  */
698
 
699
static rtx
700
compute_cfa_pointer (HOST_WIDE_INT adjustment)
701
{
702
  rtx cfa;
703
 
704
#ifdef FRAME_POINTER_CFA_OFFSET
705
  adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl);
706
  cfa = plus_constant (frame_pointer_rtx, adjustment);
707
#else
708
  adjustment -= ARG_POINTER_CFA_OFFSET (current_function_decl);
709
  cfa = plus_constant (arg_pointer_rtx, adjustment);
710
#endif
711
 
712
  return cfa;
713
}
714
 
715
/* Adjustment for hard_frame_pointer_rtx to cfa base reg,
716
   or -1 if the replacement shouldn't be done.  */
717
static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
718
 
719
/* Data for adjust_mems callback.  */
720
 
721
struct adjust_mem_data
722
{
723
  bool store;
724
  enum machine_mode mem_mode;
725
  HOST_WIDE_INT stack_adjust;
726
  rtx side_effects;
727
};
728
 
729
/* Helper for adjust_mems.  Return 1 if *loc is unsuitable for
730
   transformation of wider mode arithmetics to narrower mode,
731
   -1 if it is suitable and subexpressions shouldn't be
732
   traversed and 0 if it is suitable and subexpressions should
733
   be traversed.  Called through for_each_rtx.  */
734
 
735
static int
736
use_narrower_mode_test (rtx *loc, void *data)
737
{
738
  rtx subreg = (rtx) data;
739
 
740
  if (CONSTANT_P (*loc))
741
    return -1;
742
  switch (GET_CODE (*loc))
743
    {
744
    case REG:
745
      if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0))
746
        return 1;
747
      return -1;
748
    case PLUS:
749
    case MINUS:
750
    case MULT:
751
      return 0;
752
    case ASHIFT:
753
      if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data))
754
        return 1;
755
      else
756
        return -1;
757
    default:
758
      return 1;
759
    }
760
}
761
 
762
/* Transform X into narrower mode MODE from wider mode WMODE.  */
763
 
764
static rtx
765
use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode)
766
{
767
  rtx op0, op1;
768
  if (CONSTANT_P (x))
769
    return lowpart_subreg (mode, x, wmode);
770
  switch (GET_CODE (x))
771
    {
772
    case REG:
773
      return lowpart_subreg (mode, x, wmode);
774
    case PLUS:
775
    case MINUS:
776
    case MULT:
777
      op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
778
      op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
779
      return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
780
    case ASHIFT:
781
      op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
782
      return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1));
783
    default:
784
      gcc_unreachable ();
785
    }
786
}
787
 
788
/* Helper function for adjusting used MEMs.  */
789
 
790
static rtx
791
adjust_mems (rtx loc, const_rtx old_rtx, void *data)
792
{
793
  struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
794
  rtx mem, addr = loc, tem;
795
  enum machine_mode mem_mode_save;
796
  bool store_save;
797
  switch (GET_CODE (loc))
798
    {
799
    case REG:
800
      /* Don't do any sp or fp replacements outside of MEM addresses
801
         on the LHS.  */
802
      if (amd->mem_mode == VOIDmode && amd->store)
803
        return loc;
804
      if (loc == stack_pointer_rtx
805
          && !frame_pointer_needed)
806
        return compute_cfa_pointer (amd->stack_adjust);
807
      else if (loc == hard_frame_pointer_rtx
808
               && frame_pointer_needed
809
               && hard_frame_pointer_adjustment != -1)
810
        return compute_cfa_pointer (hard_frame_pointer_adjustment);
811
      return loc;
812
    case MEM:
813
      mem = loc;
814
      if (!amd->store)
815
        {
816
          mem = targetm.delegitimize_address (mem);
817
          if (mem != loc && !MEM_P (mem))
818
            return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
819
        }
820
 
821
      addr = XEXP (mem, 0);
822
      mem_mode_save = amd->mem_mode;
823
      amd->mem_mode = GET_MODE (mem);
824
      store_save = amd->store;
825
      amd->store = false;
826
      addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
827
      amd->store = store_save;
828
      amd->mem_mode = mem_mode_save;
829
      if (mem == loc)
830
        addr = targetm.delegitimize_address (addr);
831
      if (addr != XEXP (mem, 0))
832
        mem = replace_equiv_address_nv (mem, addr);
833
      if (!amd->store)
834
        mem = avoid_constant_pool_reference (mem);
835
      return mem;
836
    case PRE_INC:
837
    case PRE_DEC:
838
      addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
839
                           GEN_INT (GET_CODE (loc) == PRE_INC
840
                                    ? GET_MODE_SIZE (amd->mem_mode)
841
                                    : -GET_MODE_SIZE (amd->mem_mode)));
842
    case POST_INC:
843
    case POST_DEC:
844
      if (addr == loc)
845
        addr = XEXP (loc, 0);
846
      gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
847
      addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
848
      tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
849
                           GEN_INT ((GET_CODE (loc) == PRE_INC
850
                                     || GET_CODE (loc) == POST_INC)
851
                                    ? GET_MODE_SIZE (amd->mem_mode)
852
                                    : -GET_MODE_SIZE (amd->mem_mode)));
853
      amd->side_effects = alloc_EXPR_LIST (0,
854
                                           gen_rtx_SET (VOIDmode,
855
                                                        XEXP (loc, 0),
856
                                                        tem),
857
                                           amd->side_effects);
858
      return addr;
859
    case PRE_MODIFY:
860
      addr = XEXP (loc, 1);
861
    case POST_MODIFY:
862
      if (addr == loc)
863
        addr = XEXP (loc, 0);
864
      gcc_assert (amd->mem_mode != VOIDmode);
865
      addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
866
      amd->side_effects = alloc_EXPR_LIST (0,
867
                                           gen_rtx_SET (VOIDmode,
868
                                                        XEXP (loc, 0),
869
                                                        XEXP (loc, 1)),
870
                                           amd->side_effects);
871
      return addr;
872
    case SUBREG:
873
      /* First try without delegitimization of whole MEMs and
874
         avoid_constant_pool_reference, which is more likely to succeed.  */
875
      store_save = amd->store;
876
      amd->store = true;
877
      addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
878
                                      data);
879
      amd->store = store_save;
880
      mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
881
      if (mem == SUBREG_REG (loc))
882
        {
883
          tem = loc;
884
          goto finish_subreg;
885
        }
886
      tem = simplify_gen_subreg (GET_MODE (loc), mem,
887
                                 GET_MODE (SUBREG_REG (loc)),
888
                                 SUBREG_BYTE (loc));
889
      if (tem)
890
        goto finish_subreg;
891
      tem = simplify_gen_subreg (GET_MODE (loc), addr,
892
                                 GET_MODE (SUBREG_REG (loc)),
893
                                 SUBREG_BYTE (loc));
894
      if (tem == NULL_RTX)
895
        tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
896
    finish_subreg:
897
      if (MAY_HAVE_DEBUG_INSNS
898
          && GET_CODE (tem) == SUBREG
899
          && (GET_CODE (SUBREG_REG (tem)) == PLUS
900
              || GET_CODE (SUBREG_REG (tem)) == MINUS
901
              || GET_CODE (SUBREG_REG (tem)) == MULT
902
              || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
903
          && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
904
          && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
905
          && GET_MODE_SIZE (GET_MODE (tem))
906
             < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem)))
907
          && subreg_lowpart_p (tem)
908
          && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem))
909
        return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
910
                                  GET_MODE (SUBREG_REG (tem)));
911
      return tem;
912
    case ASM_OPERANDS:
913
      /* Don't do any replacements in second and following
914
         ASM_OPERANDS of inline-asm with multiple sets.
915
         ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
916
         and ASM_OPERANDS_LABEL_VEC need to be equal between
917
         all the ASM_OPERANDs in the insn and adjust_insn will
918
         fix this up.  */
919
      if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
920
        return loc;
921
      break;
922
    default:
923
      break;
924
    }
925
  return NULL_RTX;
926
}
927
 
928
/* Helper function for replacement of uses.  */
929
 
930
static void
931
adjust_mem_uses (rtx *x, void *data)
932
{
933
  rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
934
  if (new_x != *x)
935
    validate_change (NULL_RTX, x, new_x, true);
936
}
937
 
938
/* Helper function for replacement of stores.  */
939
 
940
static void
941
adjust_mem_stores (rtx loc, const_rtx expr, void *data)
942
{
943
  if (MEM_P (loc))
944
    {
945
      rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
946
                                              adjust_mems, data);
947
      if (new_dest != SET_DEST (expr))
948
        {
949
          rtx xexpr = CONST_CAST_RTX (expr);
950
          validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
951
        }
952
    }
953
}
954
 
955
/* Simplify INSN.  Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
956
   replace them with their value in the insn and add the side-effects
957
   as other sets to the insn.  */
958
 
959
static void
960
adjust_insn (basic_block bb, rtx insn)
961
{
962
  struct adjust_mem_data amd;
963
  rtx set;
964
  amd.mem_mode = VOIDmode;
965
  amd.stack_adjust = -VTI (bb)->out.stack_adjust;
966
  amd.side_effects = NULL_RTX;
967
 
968
  amd.store = true;
969
  note_stores (PATTERN (insn), adjust_mem_stores, &amd);
970
 
971
  amd.store = false;
972
  if (GET_CODE (PATTERN (insn)) == PARALLEL
973
      && asm_noperands (PATTERN (insn)) > 0
974
      && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
975
    {
976
      rtx body, set0;
977
      int i;
978
 
979
      /* inline-asm with multiple sets is tiny bit more complicated,
980
         because the 3 vectors in ASM_OPERANDS need to be shared between
981
         all ASM_OPERANDS in the instruction.  adjust_mems will
982
         not touch ASM_OPERANDS other than the first one, asm_noperands
983
         test above needs to be called before that (otherwise it would fail)
984
         and afterwards this code fixes it up.  */
985
      note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
986
      body = PATTERN (insn);
987
      set0 = XVECEXP (body, 0, 0);
988
#ifdef ENABLE_CHECKING
989
      gcc_assert (GET_CODE (set0) == SET
990
                  && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
991
                  && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
992
#endif
993
      for (i = 1; i < XVECLEN (body, 0); i++)
994
        if (GET_CODE (XVECEXP (body, 0, i)) != SET)
995
          break;
996
        else
997
          {
998
            set = XVECEXP (body, 0, i);
999
#ifdef ENABLE_CHECKING
1000
            gcc_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1001
                        && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set)) == i);
1002
#endif
1003
            if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1004
                != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1005
                || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1006
                   != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1007
                || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1008
                   != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1009
              {
1010
                rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1011
                ASM_OPERANDS_INPUT_VEC (newsrc)
1012
                  = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1013
                ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1014
                  = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1015
                ASM_OPERANDS_LABEL_VEC (newsrc)
1016
                  = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1017
                validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1018
              }
1019
          }
1020
    }
1021
  else
1022
    note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1023
 
1024
  /* For read-only MEMs containing some constant, prefer those
1025
     constants.  */
1026
  set = single_set (insn);
1027
  if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1028
    {
1029
      rtx note = find_reg_equal_equiv_note (insn);
1030
 
1031
      if (note && CONSTANT_P (XEXP (note, 0)))
1032
        validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1033
    }
1034
 
1035
  if (amd.side_effects)
1036
    {
1037
      rtx *pat, new_pat, s;
1038
      int i, oldn, newn;
1039
 
1040
      pat = &PATTERN (insn);
1041
      if (GET_CODE (*pat) == COND_EXEC)
1042
        pat = &COND_EXEC_CODE (*pat);
1043
      if (GET_CODE (*pat) == PARALLEL)
1044
        oldn = XVECLEN (*pat, 0);
1045
      else
1046
        oldn = 1;
1047
      for (s = amd.side_effects, newn = 0; s; newn++)
1048
        s = XEXP (s, 1);
1049
      new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1050
      if (GET_CODE (*pat) == PARALLEL)
1051
        for (i = 0; i < oldn; i++)
1052
          XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1053
      else
1054
        XVECEXP (new_pat, 0, 0) = *pat;
1055
      for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1056
        XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1057
      free_EXPR_LIST_list (&amd.side_effects);
1058
      validate_change (NULL_RTX, pat, new_pat, true);
1059
    }
1060
}
1061
 
1062
/* Return true if a decl_or_value DV is a DECL or NULL.  */
1063
static inline bool
1064
dv_is_decl_p (decl_or_value dv)
1065
{
1066
  return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
1067
}
1068
 
1069
/* Return true if a decl_or_value is a VALUE rtl.  */
1070
static inline bool
1071
dv_is_value_p (decl_or_value dv)
1072
{
1073
  return dv && !dv_is_decl_p (dv);
1074
}
1075
 
1076
/* Return the decl in the decl_or_value.  */
1077
static inline tree
1078
dv_as_decl (decl_or_value dv)
1079
{
1080
#ifdef ENABLE_CHECKING
1081
  gcc_assert (dv_is_decl_p (dv));
1082
#endif
1083
  return (tree) dv;
1084
}
1085
 
1086
/* Return the value in the decl_or_value.  */
1087
static inline rtx
1088
dv_as_value (decl_or_value dv)
1089
{
1090
#ifdef ENABLE_CHECKING
1091
  gcc_assert (dv_is_value_p (dv));
1092
#endif
1093
  return (rtx)dv;
1094
}
1095
 
1096
/* Return the opaque pointer in the decl_or_value.  */
1097
static inline void *
1098
dv_as_opaque (decl_or_value dv)
1099
{
1100
  return dv;
1101
}
1102
 
1103
/* Return true if a decl_or_value must not have more than one variable
1104
   part.  */
1105
static inline bool
1106
dv_onepart_p (decl_or_value dv)
1107
{
1108
  tree decl;
1109
 
1110
  if (!MAY_HAVE_DEBUG_INSNS)
1111
    return false;
1112
 
1113
  if (dv_is_value_p (dv))
1114
    return true;
1115
 
1116
  decl = dv_as_decl (dv);
1117
 
1118
  if (!decl)
1119
    return true;
1120
 
1121
  if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1122
    return true;
1123
 
1124
  return (target_for_debug_bind (decl) != NULL_TREE);
1125
}
1126
 
1127
/* Return the variable pool to be used for dv, depending on whether it
1128
   can have multiple parts or not.  */
1129
static inline alloc_pool
1130
dv_pool (decl_or_value dv)
1131
{
1132
  return dv_onepart_p (dv) ? valvar_pool : var_pool;
1133
}
1134
 
1135
/* Build a decl_or_value out of a decl.  */
1136
static inline decl_or_value
1137
dv_from_decl (tree decl)
1138
{
1139
  decl_or_value dv;
1140
  dv = decl;
1141
#ifdef ENABLE_CHECKING
1142
  gcc_assert (dv_is_decl_p (dv));
1143
#endif
1144
  return dv;
1145
}
1146
 
1147
/* Build a decl_or_value out of a value.  */
1148
static inline decl_or_value
1149
dv_from_value (rtx value)
1150
{
1151
  decl_or_value dv;
1152
  dv = value;
1153
#ifdef ENABLE_CHECKING
1154
  gcc_assert (dv_is_value_p (dv));
1155
#endif
1156
  return dv;
1157
}
1158
 
1159
extern void debug_dv (decl_or_value dv);
1160
 
1161
void
1162
debug_dv (decl_or_value dv)
1163
{
1164
  if (dv_is_value_p (dv))
1165
    debug_rtx (dv_as_value (dv));
1166
  else
1167
    debug_generic_stmt (dv_as_decl (dv));
1168
}
1169
 
1170
typedef unsigned int dvuid;
1171
 
1172
/* Return the uid of DV.  */
1173
 
1174
static inline dvuid
1175
dv_uid (decl_or_value dv)
1176
{
1177
  if (dv_is_value_p (dv))
1178
    return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
1179
  else
1180
    return DECL_UID (dv_as_decl (dv));
1181
}
1182
 
1183
/* Compute the hash from the uid.  */
1184
 
1185
static inline hashval_t
1186
dv_uid2hash (dvuid uid)
1187
{
1188
  return uid;
1189
}
1190
 
1191
/* The hash function for a mask table in a shared_htab chain.  */
1192
 
1193
static inline hashval_t
1194
dv_htab_hash (decl_or_value dv)
1195
{
1196
  return dv_uid2hash (dv_uid (dv));
1197
}
1198
 
1199
/* The hash function for variable_htab, computes the hash value
1200
   from the declaration of variable X.  */
1201
 
1202
static hashval_t
1203
variable_htab_hash (const void *x)
1204
{
1205
  const_variable const v = (const_variable) x;
1206
 
1207
  return dv_htab_hash (v->dv);
1208
}
1209
 
1210
/* Compare the declaration of variable X with declaration Y.  */
1211
 
1212
static int
1213
variable_htab_eq (const void *x, const void *y)
1214
{
1215
  const_variable const v = (const_variable) x;
1216
  decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1217
 
1218
  return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
1219
}
1220
 
1221
/* Free the element of VARIABLE_HTAB (its type is struct variable_def).  */
1222
 
1223
static void
1224
variable_htab_free (void *elem)
1225
{
1226
  int i;
1227
  variable var = (variable) elem;
1228
  location_chain node, next;
1229
 
1230
  gcc_assert (var->refcount > 0);
1231
 
1232
  var->refcount--;
1233
  if (var->refcount > 0)
1234
    return;
1235
 
1236
  for (i = 0; i < var->n_var_parts; i++)
1237
    {
1238
      for (node = var->var_part[i].loc_chain; node; node = next)
1239
        {
1240
          next = node->next;
1241
          pool_free (loc_chain_pool, node);
1242
        }
1243
      var->var_part[i].loc_chain = NULL;
1244
    }
1245
  pool_free (dv_pool (var->dv), var);
1246
}
1247
 
1248
/* The hash function for value_chains htab, computes the hash value
1249
   from the VALUE.  */
1250
 
1251
static hashval_t
1252
value_chain_htab_hash (const void *x)
1253
{
1254
  const_value_chain const v = (const_value_chain) x;
1255
 
1256
  return dv_htab_hash (v->dv);
1257
}
1258
 
1259
/* Compare the VALUE X with VALUE Y.  */
1260
 
1261
static int
1262
value_chain_htab_eq (const void *x, const void *y)
1263
{
1264
  const_value_chain const v = (const_value_chain) x;
1265
  decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
1266
 
1267
  return dv_as_opaque (v->dv) == dv_as_opaque (dv);
1268
}
1269
 
1270
/* Initialize the set (array) SET of attrs to empty lists.  */
1271
 
1272
static void
1273
init_attrs_list_set (attrs *set)
1274
{
1275
  int i;
1276
 
1277
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1278
    set[i] = NULL;
1279
}
1280
 
1281
/* Make the list *LISTP empty.  */
1282
 
1283
static void
1284
attrs_list_clear (attrs *listp)
1285
{
1286
  attrs list, next;
1287
 
1288
  for (list = *listp; list; list = next)
1289
    {
1290
      next = list->next;
1291
      pool_free (attrs_pool, list);
1292
    }
1293
  *listp = NULL;
1294
}
1295
 
1296
/* Return true if the pair of DECL and OFFSET is the member of the LIST.  */
1297
 
1298
static attrs
1299
attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1300
{
1301
  for (; list; list = list->next)
1302
    if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1303
      return list;
1304
  return NULL;
1305
}
1306
 
1307
/* Insert the triplet DECL, OFFSET, LOC to the list *LISTP.  */
1308
 
1309
static void
1310
attrs_list_insert (attrs *listp, decl_or_value dv,
1311
                   HOST_WIDE_INT offset, rtx loc)
1312
{
1313
  attrs list;
1314
 
1315
  list = (attrs) pool_alloc (attrs_pool);
1316
  list->loc = loc;
1317
  list->dv = dv;
1318
  list->offset = offset;
1319
  list->next = *listp;
1320
  *listp = list;
1321
}
1322
 
1323
/* Copy all nodes from SRC and create a list *DSTP of the copies.  */
1324
 
1325
static void
1326
attrs_list_copy (attrs *dstp, attrs src)
1327
{
1328
  attrs n;
1329
 
1330
  attrs_list_clear (dstp);
1331
  for (; src; src = src->next)
1332
    {
1333
      n = (attrs) pool_alloc (attrs_pool);
1334
      n->loc = src->loc;
1335
      n->dv = src->dv;
1336
      n->offset = src->offset;
1337
      n->next = *dstp;
1338
      *dstp = n;
1339
    }
1340
}
1341
 
1342
/* Add all nodes from SRC which are not in *DSTP to *DSTP.  */
1343
 
1344
static void
1345
attrs_list_union (attrs *dstp, attrs src)
1346
{
1347
  for (; src; src = src->next)
1348
    {
1349
      if (!attrs_list_member (*dstp, src->dv, src->offset))
1350
        attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1351
    }
1352
}
1353
 
1354
/* Combine nodes that are not onepart nodes from SRC and SRC2 into
1355
   *DSTP.  */
1356
 
1357
static void
1358
attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1359
{
1360
  gcc_assert (!*dstp);
1361
  for (; src; src = src->next)
1362
    {
1363
      if (!dv_onepart_p (src->dv))
1364
        attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1365
    }
1366
  for (src = src2; src; src = src->next)
1367
    {
1368
      if (!dv_onepart_p (src->dv)
1369
          && !attrs_list_member (*dstp, src->dv, src->offset))
1370
        attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1371
    }
1372
}
1373
 
1374
/* Shared hashtable support.  */
1375
 
1376
/* Return true if VARS is shared.  */
1377
 
1378
static inline bool
1379
shared_hash_shared (shared_hash vars)
1380
{
1381
  return vars->refcount > 1;
1382
}
1383
 
1384
/* Return the hash table for VARS.  */
1385
 
1386
static inline htab_t
1387
shared_hash_htab (shared_hash vars)
1388
{
1389
  return vars->htab;
1390
}
1391
 
1392
/* Return true if VAR is shared, or maybe because VARS is shared.  */
1393
 
1394
static inline bool
1395
shared_var_p (variable var, shared_hash vars)
1396
{
1397
  /* Don't count an entry in the changed_variables table as a duplicate.  */
1398
  return ((var->refcount > 1 + (int) var->in_changed_variables)
1399
          || shared_hash_shared (vars));
1400
}
1401
 
1402
/* Copy variables into a new hash table.  */
1403
 
1404
static shared_hash
1405
shared_hash_unshare (shared_hash vars)
1406
{
1407
  shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
1408
  gcc_assert (vars->refcount > 1);
1409
  new_vars->refcount = 1;
1410
  new_vars->htab
1411
    = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash,
1412
                   variable_htab_eq, variable_htab_free);
1413
  vars_copy (new_vars->htab, vars->htab);
1414
  vars->refcount--;
1415
  return new_vars;
1416
}
1417
 
1418
/* Increment reference counter on VARS and return it.  */
1419
 
1420
static inline shared_hash
1421
shared_hash_copy (shared_hash vars)
1422
{
1423
  vars->refcount++;
1424
  return vars;
1425
}
1426
 
1427
/* Decrement reference counter and destroy hash table if not shared
1428
   anymore.  */
1429
 
1430
static void
1431
shared_hash_destroy (shared_hash vars)
1432
{
1433
  gcc_assert (vars->refcount > 0);
1434
  if (--vars->refcount == 0)
1435
    {
1436
      htab_delete (vars->htab);
1437
      pool_free (shared_hash_pool, vars);
1438
    }
1439
}
1440
 
1441
/* Unshare *PVARS if shared and return slot for DV.  If INS is
1442
   INSERT, insert it if not already present.  */
1443
 
1444
static inline void **
1445
shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1446
                                 hashval_t dvhash, enum insert_option ins)
1447
{
1448
  if (shared_hash_shared (*pvars))
1449
    *pvars = shared_hash_unshare (*pvars);
1450
  return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins);
1451
}
1452
 
1453
static inline void **
1454
shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1455
                               enum insert_option ins)
1456
{
1457
  return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1458
}
1459
 
1460
/* Return slot for DV, if it is already present in the hash table.
1461
   If it is not present, insert it only VARS is not shared, otherwise
1462
   return NULL.  */
1463
 
1464
static inline void **
1465
shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1466
{
1467
  return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1468
                                   shared_hash_shared (vars)
1469
                                   ? NO_INSERT : INSERT);
1470
}
1471
 
1472
static inline void **
1473
shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1474
{
1475
  return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1476
}
1477
 
1478
/* Return slot for DV only if it is already present in the hash table.  */
1479
 
1480
static inline void **
1481
shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1482
                                  hashval_t dvhash)
1483
{
1484
  return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash,
1485
                                   NO_INSERT);
1486
}
1487
 
1488
static inline void **
1489
shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1490
{
1491
  return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1492
}
1493
 
1494
/* Return variable for DV or NULL if not already present in the hash
1495
   table.  */
1496
 
1497
static inline variable
1498
shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1499
{
1500
  return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash);
1501
}
1502
 
1503
static inline variable
1504
shared_hash_find (shared_hash vars, decl_or_value dv)
1505
{
1506
  return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1507
}
1508
 
1509
/* Return true if TVAL is better than CVAL as a canonival value.  We
1510
   choose lowest-numbered VALUEs, using the RTX address as a
1511
   tie-breaker.  The idea is to arrange them into a star topology,
1512
   such that all of them are at most one step away from the canonical
1513
   value, and the canonical value has backlinks to all of them, in
1514
   addition to all the actual locations.  We don't enforce this
1515
   topology throughout the entire dataflow analysis, though.
1516
 */
1517
 
1518
static inline bool
1519
canon_value_cmp (rtx tval, rtx cval)
1520
{
1521
  return !cval
1522
    || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1523
}
1524
 
1525
static bool dst_can_be_shared;
1526
 
1527
/* Return a copy of a variable VAR and insert it to dataflow set SET.  */
1528
 
1529
static void **
1530
unshare_variable (dataflow_set *set, void **slot, variable var,
1531
                  enum var_init_status initialized)
1532
{
1533
  variable new_var;
1534
  int i;
1535
 
1536
  new_var = (variable) pool_alloc (dv_pool (var->dv));
1537
  new_var->dv = var->dv;
1538
  new_var->refcount = 1;
1539
  var->refcount--;
1540
  new_var->n_var_parts = var->n_var_parts;
1541
  new_var->cur_loc_changed = var->cur_loc_changed;
1542
  var->cur_loc_changed = false;
1543
  new_var->in_changed_variables = false;
1544
 
1545
  if (! flag_var_tracking_uninit)
1546
    initialized = VAR_INIT_STATUS_INITIALIZED;
1547
 
1548
  for (i = 0; i < var->n_var_parts; i++)
1549
    {
1550
      location_chain node;
1551
      location_chain *nextp;
1552
 
1553
      new_var->var_part[i].offset = var->var_part[i].offset;
1554
      nextp = &new_var->var_part[i].loc_chain;
1555
      for (node = var->var_part[i].loc_chain; node; node = node->next)
1556
        {
1557
          location_chain new_lc;
1558
 
1559
          new_lc = (location_chain) pool_alloc (loc_chain_pool);
1560
          new_lc->next = NULL;
1561
          if (node->init > initialized)
1562
            new_lc->init = node->init;
1563
          else
1564
            new_lc->init = initialized;
1565
          if (node->set_src && !(MEM_P (node->set_src)))
1566
            new_lc->set_src = node->set_src;
1567
          else
1568
            new_lc->set_src = NULL;
1569
          new_lc->loc = node->loc;
1570
 
1571
          *nextp = new_lc;
1572
          nextp = &new_lc->next;
1573
        }
1574
 
1575
      new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1576
    }
1577
 
1578
  dst_can_be_shared = false;
1579
  if (shared_hash_shared (set->vars))
1580
    slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1581
  else if (set->traversed_vars && set->vars != set->traversed_vars)
1582
    slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1583
  *slot = new_var;
1584
  if (var->in_changed_variables)
1585
    {
1586
      void **cslot
1587
        = htab_find_slot_with_hash (changed_variables, var->dv,
1588
                                    dv_htab_hash (var->dv), NO_INSERT);
1589
      gcc_assert (*cslot == (void *) var);
1590
      var->in_changed_variables = false;
1591
      variable_htab_free (var);
1592
      *cslot = new_var;
1593
      new_var->in_changed_variables = true;
1594
    }
1595
  return slot;
1596
}
1597
 
1598
/* Copy all variables from hash table SRC to hash table DST.  */
1599
 
1600
static void
1601
vars_copy (htab_t dst, htab_t src)
1602
{
1603
  htab_iterator hi;
1604
  variable var;
1605
 
1606
  FOR_EACH_HTAB_ELEMENT (src, var, variable, hi)
1607
    {
1608
      void **dstp;
1609
      var->refcount++;
1610
      dstp = htab_find_slot_with_hash (dst, var->dv,
1611
                                       dv_htab_hash (var->dv),
1612
                                       INSERT);
1613
      *dstp = var;
1614
    }
1615
}
1616
 
1617
/* Map a decl to its main debug decl.  */
1618
 
1619
static inline tree
1620
var_debug_decl (tree decl)
1621
{
1622
  if (decl && DECL_P (decl)
1623
      && DECL_DEBUG_EXPR_IS_FROM (decl) && DECL_DEBUG_EXPR (decl)
1624
      && DECL_P (DECL_DEBUG_EXPR (decl)))
1625
    decl = DECL_DEBUG_EXPR (decl);
1626
 
1627
  return decl;
1628
}
1629
 
1630
/* Set the register LOC to contain DV, OFFSET.  */
1631
 
1632
static void
1633
var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1634
                  decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1635
                  enum insert_option iopt)
1636
{
1637
  attrs node;
1638
  bool decl_p = dv_is_decl_p (dv);
1639
 
1640
  if (decl_p)
1641
    dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1642
 
1643
  for (node = set->regs[REGNO (loc)]; node; node = node->next)
1644
    if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1645
        && node->offset == offset)
1646
      break;
1647
  if (!node)
1648
    attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1649
  set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1650
}
1651
 
1652
/* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC).  */
1653
 
1654
static void
1655
var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1656
             rtx set_src)
1657
{
1658
  tree decl = REG_EXPR (loc);
1659
  HOST_WIDE_INT offset = REG_OFFSET (loc);
1660
 
1661
  var_reg_decl_set (set, loc, initialized,
1662
                    dv_from_decl (decl), offset, set_src, INSERT);
1663
}
1664
 
1665
static enum var_init_status
1666
get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1667
{
1668
  variable var;
1669
  int i;
1670
  enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1671
 
1672
  if (! flag_var_tracking_uninit)
1673
    return VAR_INIT_STATUS_INITIALIZED;
1674
 
1675
  var = shared_hash_find (set->vars, dv);
1676
  if (var)
1677
    {
1678
      for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1679
        {
1680
          location_chain nextp;
1681
          for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1682
            if (rtx_equal_p (nextp->loc, loc))
1683
              {
1684
                ret_val = nextp->init;
1685
                break;
1686
              }
1687
        }
1688
    }
1689
 
1690
  return ret_val;
1691
}
1692
 
1693
/* Delete current content of register LOC in dataflow set SET and set
1694
   the register to contain REG_EXPR (LOC), REG_OFFSET (LOC).  If
1695
   MODIFY is true, any other live copies of the same variable part are
1696
   also deleted from the dataflow set, otherwise the variable part is
1697
   assumed to be copied from another location holding the same
1698
   part.  */
1699
 
1700
static void
1701
var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1702
                        enum var_init_status initialized, rtx set_src)
1703
{
1704
  tree decl = REG_EXPR (loc);
1705
  HOST_WIDE_INT offset = REG_OFFSET (loc);
1706
  attrs node, next;
1707
  attrs *nextp;
1708
 
1709
  decl = var_debug_decl (decl);
1710
 
1711
  if (initialized == VAR_INIT_STATUS_UNKNOWN)
1712
    initialized = get_init_value (set, loc, dv_from_decl (decl));
1713
 
1714
  nextp = &set->regs[REGNO (loc)];
1715
  for (node = *nextp; node; node = next)
1716
    {
1717
      next = node->next;
1718
      if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1719
        {
1720
          delete_variable_part (set, node->loc, node->dv, node->offset);
1721
          pool_free (attrs_pool, node);
1722
          *nextp = next;
1723
        }
1724
      else
1725
        {
1726
          node->loc = loc;
1727
          nextp = &node->next;
1728
        }
1729
    }
1730
  if (modify)
1731
    clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1732
  var_reg_set (set, loc, initialized, set_src);
1733
}
1734
 
1735
/* Delete the association of register LOC in dataflow set SET with any
1736
   variables that aren't onepart.  If CLOBBER is true, also delete any
1737
   other live copies of the same variable part, and delete the
1738
   association with onepart dvs too.  */
1739
 
1740
static void
1741
var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1742
{
1743
  attrs *nextp = &set->regs[REGNO (loc)];
1744
  attrs node, next;
1745
 
1746
  if (clobber)
1747
    {
1748
      tree decl = REG_EXPR (loc);
1749
      HOST_WIDE_INT offset = REG_OFFSET (loc);
1750
 
1751
      decl = var_debug_decl (decl);
1752
 
1753
      clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1754
    }
1755
 
1756
  for (node = *nextp; node; node = next)
1757
    {
1758
      next = node->next;
1759
      if (clobber || !dv_onepart_p (node->dv))
1760
        {
1761
          delete_variable_part (set, node->loc, node->dv, node->offset);
1762
          pool_free (attrs_pool, node);
1763
          *nextp = next;
1764
        }
1765
      else
1766
        nextp = &node->next;
1767
    }
1768
}
1769
 
1770
/* Delete content of register with number REGNO in dataflow set SET.  */
1771
 
1772
static void
1773
var_regno_delete (dataflow_set *set, int regno)
1774
{
1775
  attrs *reg = &set->regs[regno];
1776
  attrs node, next;
1777
 
1778
  for (node = *reg; node; node = next)
1779
    {
1780
      next = node->next;
1781
      delete_variable_part (set, node->loc, node->dv, node->offset);
1782
      pool_free (attrs_pool, node);
1783
    }
1784
  *reg = NULL;
1785
}
1786
 
1787
/* Set the location of DV, OFFSET as the MEM LOC.  */
1788
 
1789
static void
1790
var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1791
                  decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1792
                  enum insert_option iopt)
1793
{
1794
  if (dv_is_decl_p (dv))
1795
    dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1796
 
1797
  set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1798
}
1799
 
1800
/* Set the location part of variable MEM_EXPR (LOC) in dataflow set
1801
   SET to LOC.
1802
   Adjust the address first if it is stack pointer based.  */
1803
 
1804
static void
1805
var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1806
             rtx set_src)
1807
{
1808
  tree decl = MEM_EXPR (loc);
1809
  HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1810
 
1811
  var_mem_decl_set (set, loc, initialized,
1812
                    dv_from_decl (decl), offset, set_src, INSERT);
1813
}
1814
 
1815
/* Delete and set the location part of variable MEM_EXPR (LOC) in
1816
   dataflow set SET to LOC.  If MODIFY is true, any other live copies
1817
   of the same variable part are also deleted from the dataflow set,
1818
   otherwise the variable part is assumed to be copied from another
1819
   location holding the same part.
1820
   Adjust the address first if it is stack pointer based.  */
1821
 
1822
static void
1823
var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1824
                        enum var_init_status initialized, rtx set_src)
1825
{
1826
  tree decl = MEM_EXPR (loc);
1827
  HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1828
 
1829
  decl = var_debug_decl (decl);
1830
 
1831
  if (initialized == VAR_INIT_STATUS_UNKNOWN)
1832
    initialized = get_init_value (set, loc, dv_from_decl (decl));
1833
 
1834
  if (modify)
1835
    clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
1836
  var_mem_set (set, loc, initialized, set_src);
1837
}
1838
 
1839
/* Delete the location part LOC from dataflow set SET.  If CLOBBER is
1840
   true, also delete any other live copies of the same variable part.
1841
   Adjust the address first if it is stack pointer based.  */
1842
 
1843
static void
1844
var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
1845
{
1846
  tree decl = MEM_EXPR (loc);
1847
  HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
1848
 
1849
  decl = var_debug_decl (decl);
1850
  if (clobber)
1851
    clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
1852
  delete_variable_part (set, loc, dv_from_decl (decl), offset);
1853
}
1854
 
1855
/* Bind a value to a location it was just stored in.  If MODIFIED
1856
   holds, assume the location was modified, detaching it from any
1857
   values bound to it.  */
1858
 
1859
static void
1860
val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified)
1861
{
1862
  cselib_val *v = CSELIB_VAL_PTR (val);
1863
 
1864
  gcc_assert (cselib_preserved_value_p (v));
1865
 
1866
  if (dump_file)
1867
    {
1868
      fprintf (dump_file, "%i: ", INSN_UID (insn));
1869
      print_inline_rtx (dump_file, val, 0);
1870
      fprintf (dump_file, " stored in ");
1871
      print_inline_rtx (dump_file, loc, 0);
1872
      if (v->locs)
1873
        {
1874
          struct elt_loc_list *l;
1875
          for (l = v->locs; l; l = l->next)
1876
            {
1877
              fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
1878
              print_inline_rtx (dump_file, l->loc, 0);
1879
            }
1880
        }
1881
      fprintf (dump_file, "\n");
1882
    }
1883
 
1884
  if (REG_P (loc))
1885
    {
1886
      if (modified)
1887
        var_regno_delete (set, REGNO (loc));
1888
      var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1889
                        dv_from_value (val), 0, NULL_RTX, INSERT);
1890
    }
1891
  else if (MEM_P (loc))
1892
    var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
1893
                      dv_from_value (val), 0, NULL_RTX, INSERT);
1894
  else
1895
    set_variable_part (set, loc, dv_from_value (val), 0,
1896
                       VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
1897
}
1898
 
1899
/* Reset this node, detaching all its equivalences.  Return the slot
1900
   in the variable hash table that holds dv, if there is one.  */
1901
 
1902
static void
1903
val_reset (dataflow_set *set, decl_or_value dv)
1904
{
1905
  variable var = shared_hash_find (set->vars, dv) ;
1906
  location_chain node;
1907
  rtx cval;
1908
 
1909
  if (!var || !var->n_var_parts)
1910
    return;
1911
 
1912
  gcc_assert (var->n_var_parts == 1);
1913
 
1914
  cval = NULL;
1915
  for (node = var->var_part[0].loc_chain; node; node = node->next)
1916
    if (GET_CODE (node->loc) == VALUE
1917
        && canon_value_cmp (node->loc, cval))
1918
      cval = node->loc;
1919
 
1920
  for (node = var->var_part[0].loc_chain; node; node = node->next)
1921
    if (GET_CODE (node->loc) == VALUE && cval != node->loc)
1922
      {
1923
        /* Redirect the equivalence link to the new canonical
1924
           value, or simply remove it if it would point at
1925
           itself.  */
1926
        if (cval)
1927
          set_variable_part (set, cval, dv_from_value (node->loc),
1928
                             0, node->init, node->set_src, NO_INSERT);
1929
        delete_variable_part (set, dv_as_value (dv),
1930
                              dv_from_value (node->loc), 0);
1931
      }
1932
 
1933
  if (cval)
1934
    {
1935
      decl_or_value cdv = dv_from_value (cval);
1936
 
1937
      /* Keep the remaining values connected, accummulating links
1938
         in the canonical value.  */
1939
      for (node = var->var_part[0].loc_chain; node; node = node->next)
1940
        {
1941
          if (node->loc == cval)
1942
            continue;
1943
          else if (GET_CODE (node->loc) == REG)
1944
            var_reg_decl_set (set, node->loc, node->init, cdv, 0,
1945
                              node->set_src, NO_INSERT);
1946
          else if (GET_CODE (node->loc) == MEM)
1947
            var_mem_decl_set (set, node->loc, node->init, cdv, 0,
1948
                              node->set_src, NO_INSERT);
1949
          else
1950
            set_variable_part (set, node->loc, cdv, 0,
1951
                               node->init, node->set_src, NO_INSERT);
1952
        }
1953
    }
1954
 
1955
  /* We remove this last, to make sure that the canonical value is not
1956
     removed to the point of requiring reinsertion.  */
1957
  if (cval)
1958
    delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
1959
 
1960
  clobber_variable_part (set, NULL, dv, 0, NULL);
1961
 
1962
  /* ??? Should we make sure there aren't other available values or
1963
     variables whose values involve this one other than by
1964
     equivalence?  E.g., at the very least we should reset MEMs, those
1965
     shouldn't be too hard to find cselib-looking up the value as an
1966
     address, then locating the resulting value in our own hash
1967
     table.  */
1968
}
1969
 
1970
/* Find the values in a given location and map the val to another
1971
   value, if it is unique, or add the location as one holding the
1972
   value.  */
1973
 
1974
static void
1975
val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn)
1976
{
1977
  decl_or_value dv = dv_from_value (val);
1978
 
1979
  if (dump_file && (dump_flags & TDF_DETAILS))
1980
    {
1981
      if (insn)
1982
        fprintf (dump_file, "%i: ", INSN_UID (insn));
1983
      else
1984
        fprintf (dump_file, "head: ");
1985
      print_inline_rtx (dump_file, val, 0);
1986
      fputs (" is at ", dump_file);
1987
      print_inline_rtx (dump_file, loc, 0);
1988
      fputc ('\n', dump_file);
1989
    }
1990
 
1991
  val_reset (set, dv);
1992
 
1993
  if (REG_P (loc))
1994
    {
1995
      attrs node, found = NULL;
1996
 
1997
      for (node = set->regs[REGNO (loc)]; node; node = node->next)
1998
        if (dv_is_value_p (node->dv)
1999
            && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2000
          {
2001
            found = node;
2002
 
2003
            /* Map incoming equivalences.  ??? Wouldn't it be nice if
2004
             we just started sharing the location lists?  Maybe a
2005
             circular list ending at the value itself or some
2006
             such.  */
2007
            set_variable_part (set, dv_as_value (node->dv),
2008
                               dv_from_value (val), node->offset,
2009
                               VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2010
            set_variable_part (set, val, node->dv, node->offset,
2011
                               VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2012
          }
2013
 
2014
      /* If we didn't find any equivalence, we need to remember that
2015
         this value is held in the named register.  */
2016
      if (!found)
2017
        var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2018
                          dv_from_value (val), 0, NULL_RTX, INSERT);
2019
    }
2020
  else if (MEM_P (loc))
2021
    /* ??? Merge equivalent MEMs.  */
2022
    var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2023
                      dv_from_value (val), 0, NULL_RTX, INSERT);
2024
  else
2025
    /* ??? Merge equivalent expressions.  */
2026
    set_variable_part (set, loc, dv_from_value (val), 0,
2027
                       VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2028
}
2029
 
2030
/* Initialize dataflow set SET to be empty.
2031
   VARS_SIZE is the initial size of hash table VARS.  */
2032
 
2033
static void
2034
dataflow_set_init (dataflow_set *set)
2035
{
2036
  init_attrs_list_set (set->regs);
2037
  set->vars = shared_hash_copy (empty_shared_hash);
2038
  set->stack_adjust = 0;
2039
  set->traversed_vars = NULL;
2040
}
2041
 
2042
/* Delete the contents of dataflow set SET.  */
2043
 
2044
static void
2045
dataflow_set_clear (dataflow_set *set)
2046
{
2047
  int i;
2048
 
2049
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2050
    attrs_list_clear (&set->regs[i]);
2051
 
2052
  shared_hash_destroy (set->vars);
2053
  set->vars = shared_hash_copy (empty_shared_hash);
2054
}
2055
 
2056
/* Copy the contents of dataflow set SRC to DST.  */
2057
 
2058
static void
2059
dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2060
{
2061
  int i;
2062
 
2063
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2064
    attrs_list_copy (&dst->regs[i], src->regs[i]);
2065
 
2066
  shared_hash_destroy (dst->vars);
2067
  dst->vars = shared_hash_copy (src->vars);
2068
  dst->stack_adjust = src->stack_adjust;
2069
}
2070
 
2071
/* Information for merging lists of locations for a given offset of variable.
2072
 */
2073
struct variable_union_info
2074
{
2075
  /* Node of the location chain.  */
2076
  location_chain lc;
2077
 
2078
  /* The sum of positions in the input chains.  */
2079
  int pos;
2080
 
2081
  /* The position in the chain of DST dataflow set.  */
2082
  int pos_dst;
2083
};
2084
 
2085
/* Buffer for location list sorting and its allocated size.  */
2086
static struct variable_union_info *vui_vec;
2087
static int vui_allocated;
2088
 
2089
/* Compare function for qsort, order the structures by POS element.  */
2090
 
2091
static int
2092
variable_union_info_cmp_pos (const void *n1, const void *n2)
2093
{
2094
  const struct variable_union_info *const i1 =
2095
    (const struct variable_union_info *) n1;
2096
  const struct variable_union_info *const i2 =
2097
    ( const struct variable_union_info *) n2;
2098
 
2099
  if (i1->pos != i2->pos)
2100
    return i1->pos - i2->pos;
2101
 
2102
  return (i1->pos_dst - i2->pos_dst);
2103
}
2104
 
2105
/* Compute union of location parts of variable *SLOT and the same variable
2106
   from hash table DATA.  Compute "sorted" union of the location chains
2107
   for common offsets, i.e. the locations of a variable part are sorted by
2108
   a priority where the priority is the sum of the positions in the 2 chains
2109
   (if a location is only in one list the position in the second list is
2110
   defined to be larger than the length of the chains).
2111
   When we are updating the location parts the newest location is in the
2112
   beginning of the chain, so when we do the described "sorted" union
2113
   we keep the newest locations in the beginning.  */
2114
 
2115
static int
2116
variable_union (variable src, dataflow_set *set)
2117
{
2118
  variable dst;
2119
  void **dstp;
2120
  int i, j, k;
2121
 
2122
  dstp = shared_hash_find_slot (set->vars, src->dv);
2123
  if (!dstp || !*dstp)
2124
    {
2125
      src->refcount++;
2126
 
2127
      dst_can_be_shared = false;
2128
      if (!dstp)
2129
        dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2130
 
2131
      *dstp = src;
2132
 
2133
      /* Continue traversing the hash table.  */
2134
      return 1;
2135
    }
2136
  else
2137
    dst = (variable) *dstp;
2138
 
2139
  gcc_assert (src->n_var_parts);
2140
 
2141
  /* We can combine one-part variables very efficiently, because their
2142
     entries are in canonical order.  */
2143
  if (dv_onepart_p (src->dv))
2144
    {
2145
      location_chain *nodep, dnode, snode;
2146
 
2147
      gcc_assert (src->n_var_parts == 1
2148
                  && dst->n_var_parts == 1);
2149
 
2150
      snode = src->var_part[0].loc_chain;
2151
      gcc_assert (snode);
2152
 
2153
    restart_onepart_unshared:
2154
      nodep = &dst->var_part[0].loc_chain;
2155
      dnode = *nodep;
2156
      gcc_assert (dnode);
2157
 
2158
      while (snode)
2159
        {
2160
          int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2161
 
2162
          if (r > 0)
2163
            {
2164
              location_chain nnode;
2165
 
2166
              if (shared_var_p (dst, set->vars))
2167
                {
2168
                  dstp = unshare_variable (set, dstp, dst,
2169
                                           VAR_INIT_STATUS_INITIALIZED);
2170
                  dst = (variable)*dstp;
2171
                  goto restart_onepart_unshared;
2172
                }
2173
 
2174
              *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
2175
              nnode->loc = snode->loc;
2176
              nnode->init = snode->init;
2177
              if (!snode->set_src || MEM_P (snode->set_src))
2178
                nnode->set_src = NULL;
2179
              else
2180
                nnode->set_src = snode->set_src;
2181
              nnode->next = dnode;
2182
              dnode = nnode;
2183
            }
2184
#ifdef ENABLE_CHECKING
2185
          else if (r == 0)
2186
            gcc_assert (rtx_equal_p (dnode->loc, snode->loc));
2187
#endif
2188
 
2189
          if (r >= 0)
2190
            snode = snode->next;
2191
 
2192
          nodep = &dnode->next;
2193
          dnode = *nodep;
2194
        }
2195
 
2196
      return 1;
2197
    }
2198
 
2199
  /* Count the number of location parts, result is K.  */
2200
  for (i = 0, j = 0, k = 0;
2201
       i < src->n_var_parts && j < dst->n_var_parts; k++)
2202
    {
2203
      if (src->var_part[i].offset == dst->var_part[j].offset)
2204
        {
2205
          i++;
2206
          j++;
2207
        }
2208
      else if (src->var_part[i].offset < dst->var_part[j].offset)
2209
        i++;
2210
      else
2211
        j++;
2212
    }
2213
  k += src->n_var_parts - i;
2214
  k += dst->n_var_parts - j;
2215
 
2216
  /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2217
     thus there are at most MAX_VAR_PARTS different offsets.  */
2218
  gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS);
2219
 
2220
  if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2221
    {
2222
      dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2223
      dst = (variable)*dstp;
2224
    }
2225
 
2226
  i = src->n_var_parts - 1;
2227
  j = dst->n_var_parts - 1;
2228
  dst->n_var_parts = k;
2229
 
2230
  for (k--; k >= 0; k--)
2231
    {
2232
      location_chain node, node2;
2233
 
2234
      if (i >= 0 && j >= 0
2235
          && src->var_part[i].offset == dst->var_part[j].offset)
2236
        {
2237
          /* Compute the "sorted" union of the chains, i.e. the locations which
2238
             are in both chains go first, they are sorted by the sum of
2239
             positions in the chains.  */
2240
          int dst_l, src_l;
2241
          int ii, jj, n;
2242
          struct variable_union_info *vui;
2243
 
2244
          /* If DST is shared compare the location chains.
2245
             If they are different we will modify the chain in DST with
2246
             high probability so make a copy of DST.  */
2247
          if (shared_var_p (dst, set->vars))
2248
            {
2249
              for (node = src->var_part[i].loc_chain,
2250
                   node2 = dst->var_part[j].loc_chain; node && node2;
2251
                   node = node->next, node2 = node2->next)
2252
                {
2253
                  if (!((REG_P (node2->loc)
2254
                         && REG_P (node->loc)
2255
                         && REGNO (node2->loc) == REGNO (node->loc))
2256
                        || rtx_equal_p (node2->loc, node->loc)))
2257
                    {
2258
                      if (node2->init < node->init)
2259
                        node2->init = node->init;
2260
                      break;
2261
                    }
2262
                }
2263
              if (node || node2)
2264
                {
2265
                  dstp = unshare_variable (set, dstp, dst,
2266
                                           VAR_INIT_STATUS_UNKNOWN);
2267
                  dst = (variable)*dstp;
2268
                }
2269
            }
2270
 
2271
          src_l = 0;
2272
          for (node = src->var_part[i].loc_chain; node; node = node->next)
2273
            src_l++;
2274
          dst_l = 0;
2275
          for (node = dst->var_part[j].loc_chain; node; node = node->next)
2276
            dst_l++;
2277
 
2278
          if (dst_l == 1)
2279
            {
2280
              /* The most common case, much simpler, no qsort is needed.  */
2281
              location_chain dstnode = dst->var_part[j].loc_chain;
2282
              dst->var_part[k].loc_chain = dstnode;
2283
              dst->var_part[k].offset = dst->var_part[j].offset;
2284
              node2 = dstnode;
2285
              for (node = src->var_part[i].loc_chain; node; node = node->next)
2286
                if (!((REG_P (dstnode->loc)
2287
                       && REG_P (node->loc)
2288
                       && REGNO (dstnode->loc) == REGNO (node->loc))
2289
                      || rtx_equal_p (dstnode->loc, node->loc)))
2290
                  {
2291
                    location_chain new_node;
2292
 
2293
                    /* Copy the location from SRC.  */
2294
                    new_node = (location_chain) pool_alloc (loc_chain_pool);
2295
                    new_node->loc = node->loc;
2296
                    new_node->init = node->init;
2297
                    if (!node->set_src || MEM_P (node->set_src))
2298
                      new_node->set_src = NULL;
2299
                    else
2300
                      new_node->set_src = node->set_src;
2301
                    node2->next = new_node;
2302
                    node2 = new_node;
2303
                  }
2304
              node2->next = NULL;
2305
            }
2306
          else
2307
            {
2308
              if (src_l + dst_l > vui_allocated)
2309
                {
2310
                  vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2311
                  vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2312
                                        vui_allocated);
2313
                }
2314
              vui = vui_vec;
2315
 
2316
              /* Fill in the locations from DST.  */
2317
              for (node = dst->var_part[j].loc_chain, jj = 0; node;
2318
                   node = node->next, jj++)
2319
                {
2320
                  vui[jj].lc = node;
2321
                  vui[jj].pos_dst = jj;
2322
 
2323
                  /* Pos plus value larger than a sum of 2 valid positions.  */
2324
                  vui[jj].pos = jj + src_l + dst_l;
2325
                }
2326
 
2327
              /* Fill in the locations from SRC.  */
2328
              n = dst_l;
2329
              for (node = src->var_part[i].loc_chain, ii = 0; node;
2330
                   node = node->next, ii++)
2331
                {
2332
                  /* Find location from NODE.  */
2333
                  for (jj = 0; jj < dst_l; jj++)
2334
                    {
2335
                      if ((REG_P (vui[jj].lc->loc)
2336
                           && REG_P (node->loc)
2337
                           && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
2338
                          || rtx_equal_p (vui[jj].lc->loc, node->loc))
2339
                        {
2340
                          vui[jj].pos = jj + ii;
2341
                          break;
2342
                        }
2343
                    }
2344
                  if (jj >= dst_l)      /* The location has not been found.  */
2345
                    {
2346
                      location_chain new_node;
2347
 
2348
                      /* Copy the location from SRC.  */
2349
                      new_node = (location_chain) pool_alloc (loc_chain_pool);
2350
                      new_node->loc = node->loc;
2351
                      new_node->init = node->init;
2352
                      if (!node->set_src || MEM_P (node->set_src))
2353
                        new_node->set_src = NULL;
2354
                      else
2355
                        new_node->set_src = node->set_src;
2356
                      vui[n].lc = new_node;
2357
                      vui[n].pos_dst = src_l + dst_l;
2358
                      vui[n].pos = ii + src_l + dst_l;
2359
                      n++;
2360
                    }
2361
                }
2362
 
2363
              if (dst_l == 2)
2364
                {
2365
                  /* Special case still very common case.  For dst_l == 2
2366
                     all entries dst_l ... n-1 are sorted, with for i >= dst_l
2367
                     vui[i].pos == i + src_l + dst_l.  */
2368
                  if (vui[0].pos > vui[1].pos)
2369
                    {
2370
                      /* Order should be 1, 0, 2... */
2371
                      dst->var_part[k].loc_chain = vui[1].lc;
2372
                      vui[1].lc->next = vui[0].lc;
2373
                      if (n >= 3)
2374
                        {
2375
                          vui[0].lc->next = vui[2].lc;
2376
                          vui[n - 1].lc->next = NULL;
2377
                        }
2378
                      else
2379
                        vui[0].lc->next = NULL;
2380
                      ii = 3;
2381
                    }
2382
                  else
2383
                    {
2384
                      dst->var_part[k].loc_chain = vui[0].lc;
2385
                      if (n >= 3 && vui[2].pos < vui[1].pos)
2386
                        {
2387
                          /* Order should be 0, 2, 1, 3... */
2388
                          vui[0].lc->next = vui[2].lc;
2389
                          vui[2].lc->next = vui[1].lc;
2390
                          if (n >= 4)
2391
                            {
2392
                              vui[1].lc->next = vui[3].lc;
2393
                              vui[n - 1].lc->next = NULL;
2394
                            }
2395
                          else
2396
                            vui[1].lc->next = NULL;
2397
                          ii = 4;
2398
                        }
2399
                      else
2400
                        {
2401
                          /* Order should be 0, 1, 2... */
2402
                          ii = 1;
2403
                          vui[n - 1].lc->next = NULL;
2404
                        }
2405
                    }
2406
                  for (; ii < n; ii++)
2407
                    vui[ii - 1].lc->next = vui[ii].lc;
2408
                }
2409
              else
2410
                {
2411
                  qsort (vui, n, sizeof (struct variable_union_info),
2412
                         variable_union_info_cmp_pos);
2413
 
2414
                  /* Reconnect the nodes in sorted order.  */
2415
                  for (ii = 1; ii < n; ii++)
2416
                    vui[ii - 1].lc->next = vui[ii].lc;
2417
                  vui[n - 1].lc->next = NULL;
2418
                  dst->var_part[k].loc_chain = vui[0].lc;
2419
                }
2420
 
2421
              dst->var_part[k].offset = dst->var_part[j].offset;
2422
            }
2423
          i--;
2424
          j--;
2425
        }
2426
      else if ((i >= 0 && j >= 0
2427
                && src->var_part[i].offset < dst->var_part[j].offset)
2428
               || i < 0)
2429
        {
2430
          dst->var_part[k] = dst->var_part[j];
2431
          j--;
2432
        }
2433
      else if ((i >= 0 && j >= 0
2434
                && src->var_part[i].offset > dst->var_part[j].offset)
2435
               || j < 0)
2436
        {
2437
          location_chain *nextp;
2438
 
2439
          /* Copy the chain from SRC.  */
2440
          nextp = &dst->var_part[k].loc_chain;
2441
          for (node = src->var_part[i].loc_chain; node; node = node->next)
2442
            {
2443
              location_chain new_lc;
2444
 
2445
              new_lc = (location_chain) pool_alloc (loc_chain_pool);
2446
              new_lc->next = NULL;
2447
              new_lc->init = node->init;
2448
              if (!node->set_src || MEM_P (node->set_src))
2449
                new_lc->set_src = NULL;
2450
              else
2451
                new_lc->set_src = node->set_src;
2452
              new_lc->loc = node->loc;
2453
 
2454
              *nextp = new_lc;
2455
              nextp = &new_lc->next;
2456
            }
2457
 
2458
          dst->var_part[k].offset = src->var_part[i].offset;
2459
          i--;
2460
        }
2461
      dst->var_part[k].cur_loc = NULL;
2462
    }
2463
 
2464
  if (flag_var_tracking_uninit)
2465
    for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
2466
      {
2467
        location_chain node, node2;
2468
        for (node = src->var_part[i].loc_chain; node; node = node->next)
2469
          for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
2470
            if (rtx_equal_p (node->loc, node2->loc))
2471
              {
2472
                if (node->init > node2->init)
2473
                  node2->init = node->init;
2474
              }
2475
      }
2476
 
2477
  /* Continue traversing the hash table.  */
2478
  return 1;
2479
}
2480
 
2481
/* Compute union of dataflow sets SRC and DST and store it to DST.  */
2482
 
2483
static void
2484
dataflow_set_union (dataflow_set *dst, dataflow_set *src)
2485
{
2486
  int i;
2487
 
2488
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2489
    attrs_list_union (&dst->regs[i], src->regs[i]);
2490
 
2491
  if (dst->vars == empty_shared_hash)
2492
    {
2493
      shared_hash_destroy (dst->vars);
2494
      dst->vars = shared_hash_copy (src->vars);
2495
    }
2496
  else
2497
    {
2498
      htab_iterator hi;
2499
      variable var;
2500
 
2501
      FOR_EACH_HTAB_ELEMENT (shared_hash_htab (src->vars), var, variable, hi)
2502
        variable_union (var, dst);
2503
    }
2504
}
2505
 
2506
/* Whether the value is currently being expanded.  */
2507
#define VALUE_RECURSED_INTO(x) \
2508
  (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
2509
/* Whether the value is in changed_variables hash table.  */
2510
#define VALUE_CHANGED(x) \
2511
  (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
2512
/* Whether the decl is in changed_variables hash table.  */
2513
#define DECL_CHANGED(x) TREE_VISITED (x)
2514
 
2515
/* Record that DV has been added into resp. removed from changed_variables
2516
   hashtable.  */
2517
 
2518
static inline void
2519
set_dv_changed (decl_or_value dv, bool newv)
2520
{
2521
  if (dv_is_value_p (dv))
2522
    VALUE_CHANGED (dv_as_value (dv)) = newv;
2523
  else
2524
    DECL_CHANGED (dv_as_decl (dv)) = newv;
2525
}
2526
 
2527
/* Return true if DV is present in changed_variables hash table.  */
2528
 
2529
static inline bool
2530
dv_changed_p (decl_or_value dv)
2531
{
2532
  return (dv_is_value_p (dv)
2533
          ? VALUE_CHANGED (dv_as_value (dv))
2534
          : DECL_CHANGED (dv_as_decl (dv)));
2535
}
2536
 
2537
/* Return a location list node whose loc is rtx_equal to LOC, in the
2538
   location list of a one-part variable or value VAR, or in that of
2539
   any values recursively mentioned in the location lists.  VARS must
2540
   be in star-canonical form.  */
2541
 
2542
static location_chain
2543
find_loc_in_1pdv (rtx loc, variable var, htab_t vars)
2544
{
2545
  location_chain node;
2546
  enum rtx_code loc_code;
2547
 
2548
  if (!var)
2549
    return NULL;
2550
 
2551
#ifdef ENABLE_CHECKING
2552
  gcc_assert (dv_onepart_p (var->dv));
2553
#endif
2554
 
2555
  if (!var->n_var_parts)
2556
    return NULL;
2557
 
2558
#ifdef ENABLE_CHECKING
2559
  gcc_assert (var->var_part[0].offset == 0);
2560
  gcc_assert (loc != dv_as_opaque (var->dv));
2561
#endif
2562
 
2563
  loc_code = GET_CODE (loc);
2564
  for (node = var->var_part[0].loc_chain; node; node = node->next)
2565
    {
2566
      decl_or_value dv;
2567
      variable rvar;
2568
 
2569
      if (GET_CODE (node->loc) != loc_code)
2570
        {
2571
          if (GET_CODE (node->loc) != VALUE)
2572
            continue;
2573
        }
2574
      else if (loc == node->loc)
2575
        return node;
2576
      else if (loc_code != VALUE)
2577
        {
2578
          if (rtx_equal_p (loc, node->loc))
2579
            return node;
2580
          continue;
2581
        }
2582
 
2583
      /* Since we're in star-canonical form, we don't need to visit
2584
         non-canonical nodes: one-part variables and non-canonical
2585
         values would only point back to the canonical node.  */
2586
      if (dv_is_value_p (var->dv)
2587
          && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
2588
        {
2589
          /* Skip all subsequent VALUEs.  */
2590
          while (node->next && GET_CODE (node->next->loc) == VALUE)
2591
            {
2592
              node = node->next;
2593
#ifdef ENABLE_CHECKING
2594
              gcc_assert (!canon_value_cmp (node->loc,
2595
                                            dv_as_value (var->dv)));
2596
#endif
2597
              if (loc == node->loc)
2598
                return node;
2599
            }
2600
          continue;
2601
        }
2602
 
2603
#ifdef ENABLE_CHECKING
2604
      gcc_assert (node == var->var_part[0].loc_chain);
2605
      gcc_assert (!node->next);
2606
#endif
2607
 
2608
      dv = dv_from_value (node->loc);
2609
      rvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
2610
      return find_loc_in_1pdv (loc, rvar, vars);
2611
    }
2612
 
2613
  return NULL;
2614
}
2615
 
2616
/* Hash table iteration argument passed to variable_merge.  */
2617
struct dfset_merge
2618
{
2619
  /* The set in which the merge is to be inserted.  */
2620
  dataflow_set *dst;
2621
  /* The set that we're iterating in.  */
2622
  dataflow_set *cur;
2623
  /* The set that may contain the other dv we are to merge with.  */
2624
  dataflow_set *src;
2625
  /* Number of onepart dvs in src.  */
2626
  int src_onepart_cnt;
2627
};
2628
 
2629
/* Insert LOC in *DNODE, if it's not there yet.  The list must be in
2630
   loc_cmp order, and it is maintained as such.  */
2631
 
2632
static void
2633
insert_into_intersection (location_chain *nodep, rtx loc,
2634
                          enum var_init_status status)
2635
{
2636
  location_chain node;
2637
  int r;
2638
 
2639
  for (node = *nodep; node; nodep = &node->next, node = *nodep)
2640
    if ((r = loc_cmp (node->loc, loc)) == 0)
2641
      {
2642
        node->init = MIN (node->init, status);
2643
        return;
2644
      }
2645
    else if (r > 0)
2646
      break;
2647
 
2648
  node = (location_chain) pool_alloc (loc_chain_pool);
2649
 
2650
  node->loc = loc;
2651
  node->set_src = NULL;
2652
  node->init = status;
2653
  node->next = *nodep;
2654
  *nodep = node;
2655
}
2656
 
2657
/* Insert in DEST the intersection the locations present in both
2658
   S1NODE and S2VAR, directly or indirectly.  S1NODE is from a
2659
   variable in DSM->cur, whereas S2VAR is from DSM->src.  dvar is in
2660
   DSM->dst.  */
2661
 
2662
static void
2663
intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
2664
                      location_chain s1node, variable s2var)
2665
{
2666
  dataflow_set *s1set = dsm->cur;
2667
  dataflow_set *s2set = dsm->src;
2668
  location_chain found;
2669
 
2670
  if (s2var)
2671
    {
2672
      location_chain s2node;
2673
 
2674
#ifdef ENABLE_CHECKING
2675
      gcc_assert (dv_onepart_p (s2var->dv));
2676
#endif
2677
 
2678
      if (s2var->n_var_parts)
2679
        {
2680
#ifdef ENABLE_CHECKING
2681
          gcc_assert (s2var->var_part[0].offset == 0);
2682
#endif
2683
          s2node = s2var->var_part[0].loc_chain;
2684
 
2685
          for (; s1node && s2node;
2686
               s1node = s1node->next, s2node = s2node->next)
2687
            if (s1node->loc != s2node->loc)
2688
              break;
2689
            else if (s1node->loc == val)
2690
              continue;
2691
            else
2692
              insert_into_intersection (dest, s1node->loc,
2693
                                        MIN (s1node->init, s2node->init));
2694
        }
2695
    }
2696
 
2697
  for (; s1node; s1node = s1node->next)
2698
    {
2699
      if (s1node->loc == val)
2700
        continue;
2701
 
2702
      if ((found = find_loc_in_1pdv (s1node->loc, s2var,
2703
                                     shared_hash_htab (s2set->vars))))
2704
        {
2705
          insert_into_intersection (dest, s1node->loc,
2706
                                    MIN (s1node->init, found->init));
2707
          continue;
2708
        }
2709
 
2710
      if (GET_CODE (s1node->loc) == VALUE
2711
          && !VALUE_RECURSED_INTO (s1node->loc))
2712
        {
2713
          decl_or_value dv = dv_from_value (s1node->loc);
2714
          variable svar = shared_hash_find (s1set->vars, dv);
2715
          if (svar)
2716
            {
2717
              if (svar->n_var_parts == 1)
2718
                {
2719
                  VALUE_RECURSED_INTO (s1node->loc) = true;
2720
                  intersect_loc_chains (val, dest, dsm,
2721
                                        svar->var_part[0].loc_chain,
2722
                                        s2var);
2723
                  VALUE_RECURSED_INTO (s1node->loc) = false;
2724
                }
2725
            }
2726
        }
2727
 
2728
      /* ??? if the location is equivalent to any location in src,
2729
         searched recursively
2730
 
2731
           add to dst the values needed to represent the equivalence
2732
 
2733
     telling whether locations S is equivalent to another dv's
2734
     location list:
2735
 
2736
       for each location D in the list
2737
 
2738
         if S and D satisfy rtx_equal_p, then it is present
2739
 
2740
         else if D is a value, recurse without cycles
2741
 
2742
         else if S and D have the same CODE and MODE
2743
 
2744
           for each operand oS and the corresponding oD
2745
 
2746
             if oS and oD are not equivalent, then S an D are not equivalent
2747
 
2748
             else if they are RTX vectors
2749
 
2750
               if any vector oS element is not equivalent to its respective oD,
2751
               then S and D are not equivalent
2752
 
2753
   */
2754
 
2755
 
2756
    }
2757
}
2758
 
2759
/* Return -1 if X should be before Y in a location list for a 1-part
2760
   variable, 1 if Y should be before X, and 0 if they're equivalent
2761
   and should not appear in the list.  */
2762
 
2763
static int
2764
loc_cmp (rtx x, rtx y)
2765
{
2766
  int i, j, r;
2767
  RTX_CODE code = GET_CODE (x);
2768
  const char *fmt;
2769
 
2770
  if (x == y)
2771
    return 0;
2772
 
2773
  if (REG_P (x))
2774
    {
2775
      if (!REG_P (y))
2776
        return -1;
2777
      gcc_assert (GET_MODE (x) == GET_MODE (y));
2778
      if (REGNO (x) == REGNO (y))
2779
        return 0;
2780
      else if (REGNO (x) < REGNO (y))
2781
        return -1;
2782
      else
2783
        return 1;
2784
    }
2785
 
2786
  if (REG_P (y))
2787
    return 1;
2788
 
2789
  if (MEM_P (x))
2790
    {
2791
      if (!MEM_P (y))
2792
        return -1;
2793
      gcc_assert (GET_MODE (x) == GET_MODE (y));
2794
      return loc_cmp (XEXP (x, 0), XEXP (y, 0));
2795
    }
2796
 
2797
  if (MEM_P (y))
2798
    return 1;
2799
 
2800
  if (GET_CODE (x) == VALUE)
2801
    {
2802
      if (GET_CODE (y) != VALUE)
2803
        return -1;
2804
      /* Don't assert the modes are the same, that is true only
2805
         when not recursing.  (subreg:QI (value:SI 1:1) 0)
2806
         and (subreg:QI (value:DI 2:2) 0) can be compared,
2807
         even when the modes are different.  */
2808
      if (canon_value_cmp (x, y))
2809
        return -1;
2810
      else
2811
        return 1;
2812
    }
2813
 
2814
  if (GET_CODE (y) == VALUE)
2815
    return 1;
2816
 
2817
  if (GET_CODE (x) == GET_CODE (y))
2818
    /* Compare operands below.  */;
2819
  else if (GET_CODE (x) < GET_CODE (y))
2820
    return -1;
2821
  else
2822
    return 1;
2823
 
2824
  gcc_assert (GET_MODE (x) == GET_MODE (y));
2825
 
2826
  if (GET_CODE (x) == DEBUG_EXPR)
2827
    {
2828
      if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2829
          < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
2830
        return -1;
2831
#ifdef ENABLE_CHECKING
2832
      gcc_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
2833
                  > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
2834
#endif
2835
      return 1;
2836
    }
2837
 
2838
  fmt = GET_RTX_FORMAT (code);
2839
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
2840
    switch (fmt[i])
2841
      {
2842
      case 'w':
2843
        if (XWINT (x, i) == XWINT (y, i))
2844
          break;
2845
        else if (XWINT (x, i) < XWINT (y, i))
2846
          return -1;
2847
        else
2848
          return 1;
2849
 
2850
      case 'n':
2851
      case 'i':
2852
        if (XINT (x, i) == XINT (y, i))
2853
          break;
2854
        else if (XINT (x, i) < XINT (y, i))
2855
          return -1;
2856
        else
2857
          return 1;
2858
 
2859
      case 'V':
2860
      case 'E':
2861
        /* Compare the vector length first.  */
2862
        if (XVECLEN (x, i) == XVECLEN (y, i))
2863
          /* Compare the vectors elements.  */;
2864
        else if (XVECLEN (x, i) < XVECLEN (y, i))
2865
          return -1;
2866
        else
2867
          return 1;
2868
 
2869
        for (j = 0; j < XVECLEN (x, i); j++)
2870
          if ((r = loc_cmp (XVECEXP (x, i, j),
2871
                            XVECEXP (y, i, j))))
2872
            return r;
2873
        break;
2874
 
2875
      case 'e':
2876
        if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
2877
          return r;
2878
        break;
2879
 
2880
      case 'S':
2881
      case 's':
2882
        if (XSTR (x, i) == XSTR (y, i))
2883
          break;
2884
        if (!XSTR (x, i))
2885
          return -1;
2886
        if (!XSTR (y, i))
2887
          return 1;
2888
        if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
2889
          break;
2890
        else if (r < 0)
2891
          return -1;
2892
        else
2893
          return 1;
2894
 
2895
      case 'u':
2896
        /* These are just backpointers, so they don't matter.  */
2897
        break;
2898
 
2899
      case '0':
2900
      case 't':
2901
        break;
2902
 
2903
        /* It is believed that rtx's at this level will never
2904
           contain anything but integers and other rtx's,
2905
           except for within LABEL_REFs and SYMBOL_REFs.  */
2906
      default:
2907
        gcc_unreachable ();
2908
      }
2909
 
2910
  return 0;
2911
}
2912
 
2913
/* If decl or value DVP refers to VALUE from *LOC, add backlinks
2914
   from VALUE to DVP.  */
2915
 
2916
static int
2917
add_value_chain (rtx *loc, void *dvp)
2918
{
2919
  decl_or_value dv, ldv;
2920
  value_chain vc, nvc;
2921
  void **slot;
2922
 
2923
  if (GET_CODE (*loc) == VALUE)
2924
    ldv = dv_from_value (*loc);
2925
  else if (GET_CODE (*loc) == DEBUG_EXPR)
2926
    ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
2927
  else
2928
    return 0;
2929
 
2930
  if (dv_as_opaque (ldv) == dvp)
2931
    return 0;
2932
 
2933
  dv = (decl_or_value) dvp;
2934
  slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
2935
                                   INSERT);
2936
  if (!*slot)
2937
    {
2938
      vc = (value_chain) pool_alloc (value_chain_pool);
2939
      vc->dv = ldv;
2940
      vc->next = NULL;
2941
      vc->refcount = 0;
2942
      *slot = (void *) vc;
2943
    }
2944
  else
2945
    {
2946
      for (vc = ((value_chain) *slot)->next; vc; vc = vc->next)
2947
        if (dv_as_opaque (vc->dv) == dv_as_opaque (dv))
2948
          break;
2949
      if (vc)
2950
        {
2951
          vc->refcount++;
2952
          return 0;
2953
        }
2954
    }
2955
  vc = (value_chain) *slot;
2956
  nvc = (value_chain) pool_alloc (value_chain_pool);
2957
  nvc->dv = dv;
2958
  nvc->next = vc->next;
2959
  nvc->refcount = 1;
2960
  vc->next = nvc;
2961
  return 0;
2962
}
2963
 
2964
/* If decl or value DVP refers to VALUEs from within LOC, add backlinks
2965
   from those VALUEs to DVP.  */
2966
 
2967
static void
2968
add_value_chains (decl_or_value dv, rtx loc)
2969
{
2970
  if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
2971
    {
2972
      add_value_chain (&loc, dv_as_opaque (dv));
2973
      return;
2974
    }
2975
  if (REG_P (loc))
2976
    return;
2977
  if (MEM_P (loc))
2978
    loc = XEXP (loc, 0);
2979
  for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv));
2980
}
2981
 
2982
/* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those
2983
   VALUEs to DV.  Add the same time get rid of ASM_OPERANDS from locs list,
2984
   that is something we never can express in .debug_info and can prevent
2985
   reverse ops from being used.  */
2986
 
2987
static void
2988
add_cselib_value_chains (decl_or_value dv)
2989
{
2990
  struct elt_loc_list **l;
2991
 
2992
  for (l = &CSELIB_VAL_PTR (dv_as_value (dv))->locs; *l;)
2993
    if (GET_CODE ((*l)->loc) == ASM_OPERANDS)
2994
      *l = (*l)->next;
2995
    else
2996
      {
2997
        for_each_rtx (&(*l)->loc, add_value_chain, dv_as_opaque (dv));
2998
        l = &(*l)->next;
2999
      }
3000
}
3001
 
3002
/* If decl or value DVP refers to VALUE from *LOC, remove backlinks
3003
   from VALUE to DVP.  */
3004
 
3005
static int
3006
remove_value_chain (rtx *loc, void *dvp)
3007
{
3008
  decl_or_value dv, ldv;
3009
  value_chain vc;
3010
  void **slot;
3011
 
3012
  if (GET_CODE (*loc) == VALUE)
3013
    ldv = dv_from_value (*loc);
3014
  else if (GET_CODE (*loc) == DEBUG_EXPR)
3015
    ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc));
3016
  else
3017
    return 0;
3018
 
3019
  if (dv_as_opaque (ldv) == dvp)
3020
    return 0;
3021
 
3022
  dv = (decl_or_value) dvp;
3023
  slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv),
3024
                                   NO_INSERT);
3025
  for (vc = (value_chain) *slot; vc->next; vc = vc->next)
3026
    if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv))
3027
      {
3028
        value_chain dvc = vc->next;
3029
        gcc_assert (dvc->refcount > 0);
3030
        if (--dvc->refcount == 0)
3031
          {
3032
            vc->next = dvc->next;
3033
            pool_free (value_chain_pool, dvc);
3034
            if (vc->next == NULL && vc == (value_chain) *slot)
3035
              {
3036
                pool_free (value_chain_pool, vc);
3037
                htab_clear_slot (value_chains, slot);
3038
              }
3039
          }
3040
        return 0;
3041
      }
3042
  gcc_unreachable ();
3043
}
3044
 
3045
/* If decl or value DVP refers to VALUEs from within LOC, remove backlinks
3046
   from those VALUEs to DVP.  */
3047
 
3048
static void
3049
remove_value_chains (decl_or_value dv, rtx loc)
3050
{
3051
  if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR)
3052
    {
3053
      remove_value_chain (&loc, dv_as_opaque (dv));
3054
      return;
3055
    }
3056
  if (REG_P (loc))
3057
    return;
3058
  if (MEM_P (loc))
3059
    loc = XEXP (loc, 0);
3060
  for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv));
3061
}
3062
 
3063
#if ENABLE_CHECKING
3064
/* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those
3065
   VALUEs to DV.  */
3066
 
3067
static void
3068
remove_cselib_value_chains (decl_or_value dv)
3069
{
3070
  struct elt_loc_list *l;
3071
 
3072
  for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next)
3073
    for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv));
3074
}
3075
 
3076
/* Check the order of entries in one-part variables.   */
3077
 
3078
static int
3079
canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED)
3080
{
3081
  variable var = (variable) *slot;
3082
  decl_or_value dv = var->dv;
3083
  location_chain node, next;
3084
 
3085
#ifdef ENABLE_RTL_CHECKING
3086
  int i;
3087
  for (i = 0; i < var->n_var_parts; i++)
3088
    gcc_assert (var->var_part[0].cur_loc == NULL);
3089
  gcc_assert (!var->cur_loc_changed && !var->in_changed_variables);
3090
#endif
3091
 
3092
  if (!dv_onepart_p (dv))
3093
    return 1;
3094
 
3095
  gcc_assert (var->n_var_parts == 1);
3096
  node = var->var_part[0].loc_chain;
3097
  gcc_assert (node);
3098
 
3099
  while ((next = node->next))
3100
    {
3101
      gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3102
      node = next;
3103
    }
3104
 
3105
  return 1;
3106
}
3107
#endif
3108
 
3109
/* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3110
   more likely to be chosen as canonical for an equivalence set.
3111
   Ensure less likely values can reach more likely neighbors, making
3112
   the connections bidirectional.  */
3113
 
3114
static int
3115
canonicalize_values_mark (void **slot, void *data)
3116
{
3117
  dataflow_set *set = (dataflow_set *)data;
3118
  variable var = (variable) *slot;
3119
  decl_or_value dv = var->dv;
3120
  rtx val;
3121
  location_chain node;
3122
 
3123
  if (!dv_is_value_p (dv))
3124
    return 1;
3125
 
3126
  gcc_assert (var->n_var_parts == 1);
3127
 
3128
  val = dv_as_value (dv);
3129
 
3130
  for (node = var->var_part[0].loc_chain; node; node = node->next)
3131
    if (GET_CODE (node->loc) == VALUE)
3132
      {
3133
        if (canon_value_cmp (node->loc, val))
3134
          VALUE_RECURSED_INTO (val) = true;
3135
        else
3136
          {
3137
            decl_or_value odv = dv_from_value (node->loc);
3138
            void **oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3139
 
3140
            oslot = set_slot_part (set, val, oslot, odv, 0,
3141
                                   node->init, NULL_RTX);
3142
 
3143
            VALUE_RECURSED_INTO (node->loc) = true;
3144
          }
3145
      }
3146
 
3147
  return 1;
3148
}
3149
 
3150
/* Remove redundant entries from equivalence lists in onepart
3151
   variables, canonicalizing equivalence sets into star shapes.  */
3152
 
3153
static int
3154
canonicalize_values_star (void **slot, void *data)
3155
{
3156
  dataflow_set *set = (dataflow_set *)data;
3157
  variable var = (variable) *slot;
3158
  decl_or_value dv = var->dv;
3159
  location_chain node;
3160
  decl_or_value cdv;
3161
  rtx val, cval;
3162
  void **cslot;
3163
  bool has_value;
3164
  bool has_marks;
3165
 
3166
  if (!dv_onepart_p (dv))
3167
    return 1;
3168
 
3169
  gcc_assert (var->n_var_parts == 1);
3170
 
3171
  if (dv_is_value_p (dv))
3172
    {
3173
      cval = dv_as_value (dv);
3174
      if (!VALUE_RECURSED_INTO (cval))
3175
        return 1;
3176
      VALUE_RECURSED_INTO (cval) = false;
3177
    }
3178
  else
3179
    cval = NULL_RTX;
3180
 
3181
 restart:
3182
  val = cval;
3183
  has_value = false;
3184
  has_marks = false;
3185
 
3186
  gcc_assert (var->n_var_parts == 1);
3187
 
3188
  for (node = var->var_part[0].loc_chain; node; node = node->next)
3189
    if (GET_CODE (node->loc) == VALUE)
3190
      {
3191
        has_value = true;
3192
        if (VALUE_RECURSED_INTO (node->loc))
3193
          has_marks = true;
3194
        if (canon_value_cmp (node->loc, cval))
3195
          cval = node->loc;
3196
      }
3197
 
3198
  if (!has_value)
3199
    return 1;
3200
 
3201
  if (cval == val)
3202
    {
3203
      if (!has_marks || dv_is_decl_p (dv))
3204
        return 1;
3205
 
3206
      /* Keep it marked so that we revisit it, either after visiting a
3207
         child node, or after visiting a new parent that might be
3208
         found out.  */
3209
      VALUE_RECURSED_INTO (val) = true;
3210
 
3211
      for (node = var->var_part[0].loc_chain; node; node = node->next)
3212
        if (GET_CODE (node->loc) == VALUE
3213
            && VALUE_RECURSED_INTO (node->loc))
3214
          {
3215
            cval = node->loc;
3216
          restart_with_cval:
3217
            VALUE_RECURSED_INTO (cval) = false;
3218
            dv = dv_from_value (cval);
3219
            slot = shared_hash_find_slot_noinsert (set->vars, dv);
3220
            if (!slot)
3221
              {
3222
                gcc_assert (dv_is_decl_p (var->dv));
3223
                /* The canonical value was reset and dropped.
3224
                   Remove it.  */
3225
                clobber_variable_part (set, NULL, var->dv, 0, NULL);
3226
                return 1;
3227
              }
3228
            var = (variable)*slot;
3229
            gcc_assert (dv_is_value_p (var->dv));
3230
            if (var->n_var_parts == 0)
3231
              return 1;
3232
            gcc_assert (var->n_var_parts == 1);
3233
            goto restart;
3234
          }
3235
 
3236
      VALUE_RECURSED_INTO (val) = false;
3237
 
3238
      return 1;
3239
    }
3240
 
3241
  /* Push values to the canonical one.  */
3242
  cdv = dv_from_value (cval);
3243
  cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3244
 
3245
  for (node = var->var_part[0].loc_chain; node; node = node->next)
3246
    if (node->loc != cval)
3247
      {
3248
        cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3249
                               node->init, NULL_RTX);
3250
        if (GET_CODE (node->loc) == VALUE)
3251
          {
3252
            decl_or_value ndv = dv_from_value (node->loc);
3253
 
3254
            set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3255
                               NO_INSERT);
3256
 
3257
            if (canon_value_cmp (node->loc, val))
3258
              {
3259
                /* If it could have been a local minimum, it's not any more,
3260
                   since it's now neighbor to cval, so it may have to push
3261
                   to it.  Conversely, if it wouldn't have prevailed over
3262
                   val, then whatever mark it has is fine: if it was to
3263
                   push, it will now push to a more canonical node, but if
3264
                   it wasn't, then it has already pushed any values it might
3265
                   have to.  */
3266
                VALUE_RECURSED_INTO (node->loc) = true;
3267
                /* Make sure we visit node->loc by ensuring we cval is
3268
                   visited too.  */
3269
                VALUE_RECURSED_INTO (cval) = true;
3270
              }
3271
            else if (!VALUE_RECURSED_INTO (node->loc))
3272
              /* If we have no need to "recurse" into this node, it's
3273
                 already "canonicalized", so drop the link to the old
3274
                 parent.  */
3275
              clobber_variable_part (set, cval, ndv, 0, NULL);
3276
          }
3277
        else if (GET_CODE (node->loc) == REG)
3278
          {
3279
            attrs list = set->regs[REGNO (node->loc)], *listp;
3280
 
3281
            /* Change an existing attribute referring to dv so that it
3282
               refers to cdv, removing any duplicate this might
3283
               introduce, and checking that no previous duplicates
3284
               existed, all in a single pass.  */
3285
 
3286
            while (list)
3287
              {
3288
                if (list->offset == 0
3289
                    && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3290
                        || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3291
                  break;
3292
 
3293
                list = list->next;
3294
              }
3295
 
3296
            gcc_assert (list);
3297
            if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3298
              {
3299
                list->dv = cdv;
3300
                for (listp = &list->next; (list = *listp); listp = &list->next)
3301
                  {
3302
                    if (list->offset)
3303
                      continue;
3304
 
3305
                    if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3306
                      {
3307
                        *listp = list->next;
3308
                        pool_free (attrs_pool, list);
3309
                        list = *listp;
3310
                        break;
3311
                      }
3312
 
3313
                    gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3314
                  }
3315
              }
3316
            else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3317
              {
3318
                for (listp = &list->next; (list = *listp); listp = &list->next)
3319
                  {
3320
                    if (list->offset)
3321
                      continue;
3322
 
3323
                    if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3324
                      {
3325
                        *listp = list->next;
3326
                        pool_free (attrs_pool, list);
3327
                        list = *listp;
3328
                        break;
3329
                      }
3330
 
3331
                    gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3332
                  }
3333
              }
3334
            else
3335
              gcc_unreachable ();
3336
 
3337
#if ENABLE_CHECKING
3338
            while (list)
3339
              {
3340
                if (list->offset == 0
3341
                    && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3342
                        || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3343
                  gcc_unreachable ();
3344
 
3345
                list = list->next;
3346
              }
3347
#endif
3348
          }
3349
      }
3350
 
3351
  if (val)
3352
    cslot = set_slot_part (set, val, cslot, cdv, 0,
3353
                           VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3354
 
3355
  slot = clobber_slot_part (set, cval, slot, 0, NULL);
3356
 
3357
  /* Variable may have been unshared.  */
3358
  var = (variable)*slot;
3359
  gcc_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3360
              && var->var_part[0].loc_chain->next == NULL);
3361
 
3362
  if (VALUE_RECURSED_INTO (cval))
3363
    goto restart_with_cval;
3364
 
3365
  return 1;
3366
}
3367
 
3368
/* Bind one-part variables to the canonical value in an equivalence
3369
   set.  Not doing this causes dataflow convergence failure in rare
3370
   circumstances, see PR42873.  Unfortunately we can't do this
3371
   efficiently as part of canonicalize_values_star, since we may not
3372
   have determined or even seen the canonical value of a set when we
3373
   get to a variable that references another member of the set.  */
3374
 
3375
static int
3376
canonicalize_vars_star (void **slot, void *data)
3377
{
3378
  dataflow_set *set = (dataflow_set *)data;
3379
  variable var = (variable) *slot;
3380
  decl_or_value dv = var->dv;
3381
  location_chain node;
3382
  rtx cval;
3383
  decl_or_value cdv;
3384
  void **cslot;
3385
  variable cvar;
3386
  location_chain cnode;
3387
 
3388
  if (!dv_onepart_p (dv) || dv_is_value_p (dv))
3389
    return 1;
3390
 
3391
  gcc_assert (var->n_var_parts == 1);
3392
 
3393
  node = var->var_part[0].loc_chain;
3394
 
3395
  if (GET_CODE (node->loc) != VALUE)
3396
    return 1;
3397
 
3398
  gcc_assert (!node->next);
3399
  cval = node->loc;
3400
 
3401
  /* Push values to the canonical one.  */
3402
  cdv = dv_from_value (cval);
3403
  cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3404
  if (!cslot)
3405
    return 1;
3406
  cvar = (variable)*cslot;
3407
  gcc_assert (cvar->n_var_parts == 1);
3408
 
3409
  cnode = cvar->var_part[0].loc_chain;
3410
 
3411
  /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3412
     that are not “more canonical” than it.  */
3413
  if (GET_CODE (cnode->loc) != VALUE
3414
      || !canon_value_cmp (cnode->loc, cval))
3415
    return 1;
3416
 
3417
  /* CVAL was found to be non-canonical.  Change the variable to point
3418
     to the canonical VALUE.  */
3419
  gcc_assert (!cnode->next);
3420
  cval = cnode->loc;
3421
 
3422
  slot = set_slot_part (set, cval, slot, dv, 0,
3423
                        node->init, node->set_src);
3424
  slot = clobber_slot_part (set, cval, slot, 0, node->set_src);
3425
 
3426
  return 1;
3427
}
3428
 
3429
/* Combine variable or value in *S1SLOT (in DSM->cur) with the
3430
   corresponding entry in DSM->src.  Multi-part variables are combined
3431
   with variable_union, whereas onepart dvs are combined with
3432
   intersection.  */
3433
 
3434
static int
3435
variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3436
{
3437
  dataflow_set *dst = dsm->dst;
3438
  void **dstslot;
3439
  variable s2var, dvar = NULL;
3440
  decl_or_value dv = s1var->dv;
3441
  bool onepart = dv_onepart_p (dv);
3442
  rtx val;
3443
  hashval_t dvhash;
3444
  location_chain node, *nodep;
3445
 
3446
  /* If the incoming onepart variable has an empty location list, then
3447
     the intersection will be just as empty.  For other variables,
3448
     it's always union.  */
3449
  gcc_assert (s1var->n_var_parts
3450
              && s1var->var_part[0].loc_chain);
3451
 
3452
  if (!onepart)
3453
    return variable_union (s1var, dst);
3454
 
3455
  gcc_assert (s1var->n_var_parts == 1
3456
              && s1var->var_part[0].offset == 0);
3457
 
3458
  dvhash = dv_htab_hash (dv);
3459
  if (dv_is_value_p (dv))
3460
    val = dv_as_value (dv);
3461
  else
3462
    val = NULL;
3463
 
3464
  s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
3465
  if (!s2var)
3466
    {
3467
      dst_can_be_shared = false;
3468
      return 1;
3469
    }
3470
 
3471
  dsm->src_onepart_cnt--;
3472
  gcc_assert (s2var->var_part[0].loc_chain
3473
              && s2var->n_var_parts == 1
3474
              && s2var->var_part[0].offset == 0);
3475
 
3476
  dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3477
  if (dstslot)
3478
    {
3479
      dvar = (variable)*dstslot;
3480
      gcc_assert (dvar->refcount == 1
3481
                  && dvar->n_var_parts == 1
3482
                  && dvar->var_part[0].offset == 0);
3483
      nodep = &dvar->var_part[0].loc_chain;
3484
    }
3485
  else
3486
    {
3487
      nodep = &node;
3488
      node = NULL;
3489
    }
3490
 
3491
  if (!dstslot && !onepart_variable_different_p (s1var, s2var))
3492
    {
3493
      dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
3494
                                                 dvhash, INSERT);
3495
      *dstslot = dvar = s2var;
3496
      dvar->refcount++;
3497
    }
3498
  else
3499
    {
3500
      dst_can_be_shared = false;
3501
 
3502
      intersect_loc_chains (val, nodep, dsm,
3503
                            s1var->var_part[0].loc_chain, s2var);
3504
 
3505
      if (!dstslot)
3506
        {
3507
          if (node)
3508
            {
3509
              dvar = (variable) pool_alloc (dv_pool (dv));
3510
              dvar->dv = dv;
3511
              dvar->refcount = 1;
3512
              dvar->n_var_parts = 1;
3513
              dvar->cur_loc_changed = false;
3514
              dvar->in_changed_variables = false;
3515
              dvar->var_part[0].offset = 0;
3516
              dvar->var_part[0].loc_chain = node;
3517
              dvar->var_part[0].cur_loc = NULL;
3518
 
3519
              dstslot
3520
                = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
3521
                                                   INSERT);
3522
              gcc_assert (!*dstslot);
3523
              *dstslot = dvar;
3524
            }
3525
          else
3526
            return 1;
3527
        }
3528
    }
3529
 
3530
  nodep = &dvar->var_part[0].loc_chain;
3531
  while ((node = *nodep))
3532
    {
3533
      location_chain *nextp = &node->next;
3534
 
3535
      if (GET_CODE (node->loc) == REG)
3536
        {
3537
          attrs list;
3538
 
3539
          for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
3540
            if (GET_MODE (node->loc) == GET_MODE (list->loc)
3541
                && dv_is_value_p (list->dv))
3542
              break;
3543
 
3544
          if (!list)
3545
            attrs_list_insert (&dst->regs[REGNO (node->loc)],
3546
                               dv, 0, node->loc);
3547
          /* If this value became canonical for another value that had
3548
             this register, we want to leave it alone.  */
3549
          else if (dv_as_value (list->dv) != val)
3550
            {
3551
              dstslot = set_slot_part (dst, dv_as_value (list->dv),
3552
                                       dstslot, dv, 0,
3553
                                       node->init, NULL_RTX);
3554
              dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
3555
 
3556
              /* Since nextp points into the removed node, we can't
3557
                 use it.  The pointer to the next node moved to nodep.
3558
                 However, if the variable we're walking is unshared
3559
                 during our walk, we'll keep walking the location list
3560
                 of the previously-shared variable, in which case the
3561
                 node won't have been removed, and we'll want to skip
3562
                 it.  That's why we test *nodep here.  */
3563
              if (*nodep != node)
3564
                nextp = nodep;
3565
            }
3566
        }
3567
      else
3568
        /* Canonicalization puts registers first, so we don't have to
3569
           walk it all.  */
3570
        break;
3571
      nodep = nextp;
3572
    }
3573
 
3574
  if (dvar != (variable)*dstslot)
3575
    dvar = (variable)*dstslot;
3576
  nodep = &dvar->var_part[0].loc_chain;
3577
 
3578
  if (val)
3579
    {
3580
      /* Mark all referenced nodes for canonicalization, and make sure
3581
         we have mutual equivalence links.  */
3582
      VALUE_RECURSED_INTO (val) = true;
3583
      for (node = *nodep; node; node = node->next)
3584
        if (GET_CODE (node->loc) == VALUE)
3585
          {
3586
            VALUE_RECURSED_INTO (node->loc) = true;
3587
            set_variable_part (dst, val, dv_from_value (node->loc), 0,
3588
                               node->init, NULL, INSERT);
3589
          }
3590
 
3591
      dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3592
      gcc_assert (*dstslot == dvar);
3593
      canonicalize_values_star (dstslot, dst);
3594
#ifdef ENABLE_CHECKING
3595
      gcc_assert (dstslot
3596
                  == shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash));
3597
#endif
3598
      dvar = (variable)*dstslot;
3599
    }
3600
  else
3601
    {
3602
      bool has_value = false, has_other = false;
3603
 
3604
      /* If we have one value and anything else, we're going to
3605
         canonicalize this, so make sure all values have an entry in
3606
         the table and are marked for canonicalization.  */
3607
      for (node = *nodep; node; node = node->next)
3608
        {
3609
          if (GET_CODE (node->loc) == VALUE)
3610
            {
3611
              /* If this was marked during register canonicalization,
3612
                 we know we have to canonicalize values.  */
3613
              if (has_value)
3614
                has_other = true;
3615
              has_value = true;
3616
              if (has_other)
3617
                break;
3618
            }
3619
          else
3620
            {
3621
              has_other = true;
3622
              if (has_value)
3623
                break;
3624
            }
3625
        }
3626
 
3627
      if (has_value && has_other)
3628
        {
3629
          for (node = *nodep; node; node = node->next)
3630
            {
3631
              if (GET_CODE (node->loc) == VALUE)
3632
                {
3633
                  decl_or_value dv = dv_from_value (node->loc);
3634
                  void **slot = NULL;
3635
 
3636
                  if (shared_hash_shared (dst->vars))
3637
                    slot = shared_hash_find_slot_noinsert (dst->vars, dv);
3638
                  if (!slot)
3639
                    slot = shared_hash_find_slot_unshare (&dst->vars, dv,
3640
                                                          INSERT);
3641
                  if (!*slot)
3642
                    {
3643
                      variable var = (variable) pool_alloc (dv_pool (dv));
3644
                      var->dv = dv;
3645
                      var->refcount = 1;
3646
                      var->n_var_parts = 1;
3647
                      var->cur_loc_changed = false;
3648
                      var->in_changed_variables = false;
3649
                      var->var_part[0].offset = 0;
3650
                      var->var_part[0].loc_chain = NULL;
3651
                      var->var_part[0].cur_loc = NULL;
3652
                      *slot = var;
3653
                    }
3654
 
3655
                  VALUE_RECURSED_INTO (node->loc) = true;
3656
                }
3657
            }
3658
 
3659
          dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
3660
          gcc_assert (*dstslot == dvar);
3661
          canonicalize_values_star (dstslot, dst);
3662
#ifdef ENABLE_CHECKING
3663
          gcc_assert (dstslot
3664
                      == shared_hash_find_slot_noinsert_1 (dst->vars,
3665
                                                           dv, dvhash));
3666
#endif
3667
          dvar = (variable)*dstslot;
3668
        }
3669
    }
3670
 
3671
  if (!onepart_variable_different_p (dvar, s2var))
3672
    {
3673
      variable_htab_free (dvar);
3674
      *dstslot = dvar = s2var;
3675
      dvar->refcount++;
3676
    }
3677
  else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
3678
    {
3679
      variable_htab_free (dvar);
3680
      *dstslot = dvar = s1var;
3681
      dvar->refcount++;
3682
      dst_can_be_shared = false;
3683
    }
3684
  else
3685
    dst_can_be_shared = false;
3686
 
3687
  return 1;
3688
}
3689
 
3690
/* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
3691
   multi-part variable.  Unions of multi-part variables and
3692
   intersections of one-part ones will be handled in
3693
   variable_merge_over_cur().  */
3694
 
3695
static int
3696
variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
3697
{
3698
  dataflow_set *dst = dsm->dst;
3699
  decl_or_value dv = s2var->dv;
3700
  bool onepart = dv_onepart_p (dv);
3701
 
3702
  if (!onepart)
3703
    {
3704
      void **dstp = shared_hash_find_slot (dst->vars, dv);
3705
      *dstp = s2var;
3706
      s2var->refcount++;
3707
      return 1;
3708
    }
3709
 
3710
  dsm->src_onepart_cnt++;
3711
  return 1;
3712
}
3713
 
3714
/* Combine dataflow set information from SRC2 into DST, using PDST
3715
   to carry over information across passes.  */
3716
 
3717
static void
3718
dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
3719
{
3720
  dataflow_set cur = *dst;
3721
  dataflow_set *src1 = &cur;
3722
  struct dfset_merge dsm;
3723
  int i;
3724
  size_t src1_elems, src2_elems;
3725
  htab_iterator hi;
3726
  variable var;
3727
 
3728
  src1_elems = htab_elements (shared_hash_htab (src1->vars));
3729
  src2_elems = htab_elements (shared_hash_htab (src2->vars));
3730
  dataflow_set_init (dst);
3731
  dst->stack_adjust = cur.stack_adjust;
3732
  shared_hash_destroy (dst->vars);
3733
  dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
3734
  dst->vars->refcount = 1;
3735
  dst->vars->htab
3736
    = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash,
3737
                   variable_htab_eq, variable_htab_free);
3738
 
3739
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3740
    attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
3741
 
3742
  dsm.dst = dst;
3743
  dsm.src = src2;
3744
  dsm.cur = src1;
3745
  dsm.src_onepart_cnt = 0;
3746
 
3747
  FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.src->vars), var, variable, hi)
3748
    variable_merge_over_src (var, &dsm);
3749
  FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.cur->vars), var, variable, hi)
3750
    variable_merge_over_cur (var, &dsm);
3751
 
3752
  if (dsm.src_onepart_cnt)
3753
    dst_can_be_shared = false;
3754
 
3755
  dataflow_set_destroy (src1);
3756
}
3757
 
3758
/* Mark register equivalences.  */
3759
 
3760
static void
3761
dataflow_set_equiv_regs (dataflow_set *set)
3762
{
3763
  int i;
3764
  attrs list, *listp;
3765
 
3766
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3767
    {
3768
      rtx canon[NUM_MACHINE_MODES];
3769
 
3770
      /* If the list is empty or one entry, no need to canonicalize
3771
         anything.  */
3772
      if (set->regs[i] == NULL || set->regs[i]->next == NULL)
3773
        continue;
3774
 
3775
      memset (canon, 0, sizeof (canon));
3776
 
3777
      for (list = set->regs[i]; list; list = list->next)
3778
        if (list->offset == 0 && dv_is_value_p (list->dv))
3779
          {
3780
            rtx val = dv_as_value (list->dv);
3781
            rtx *cvalp = &canon[(int)GET_MODE (val)];
3782
            rtx cval = *cvalp;
3783
 
3784
            if (canon_value_cmp (val, cval))
3785
              *cvalp = val;
3786
          }
3787
 
3788
      for (list = set->regs[i]; list; list = list->next)
3789
        if (list->offset == 0 && dv_onepart_p (list->dv))
3790
          {
3791
            rtx cval = canon[(int)GET_MODE (list->loc)];
3792
 
3793
            if (!cval)
3794
              continue;
3795
 
3796
            if (dv_is_value_p (list->dv))
3797
              {
3798
                rtx val = dv_as_value (list->dv);
3799
 
3800
                if (val == cval)
3801
                  continue;
3802
 
3803
                VALUE_RECURSED_INTO (val) = true;
3804
                set_variable_part (set, val, dv_from_value (cval), 0,
3805
                                   VAR_INIT_STATUS_INITIALIZED,
3806
                                   NULL, NO_INSERT);
3807
              }
3808
 
3809
            VALUE_RECURSED_INTO (cval) = true;
3810
            set_variable_part (set, cval, list->dv, 0,
3811
                               VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
3812
          }
3813
 
3814
      for (listp = &set->regs[i]; (list = *listp);
3815
           listp = list ? &list->next : listp)
3816
        if (list->offset == 0 && dv_onepart_p (list->dv))
3817
          {
3818
            rtx cval = canon[(int)GET_MODE (list->loc)];
3819
            void **slot;
3820
 
3821
            if (!cval)
3822
              continue;
3823
 
3824
            if (dv_is_value_p (list->dv))
3825
              {
3826
                rtx val = dv_as_value (list->dv);
3827
                if (!VALUE_RECURSED_INTO (val))
3828
                  continue;
3829
              }
3830
 
3831
            slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
3832
            canonicalize_values_star (slot, set);
3833
            if (*listp != list)
3834
              list = NULL;
3835
          }
3836
    }
3837
}
3838
 
3839
/* Remove any redundant values in the location list of VAR, which must
3840
   be unshared and 1-part.  */
3841
 
3842
static void
3843
remove_duplicate_values (variable var)
3844
{
3845
  location_chain node, *nodep;
3846
 
3847
  gcc_assert (dv_onepart_p (var->dv));
3848
  gcc_assert (var->n_var_parts == 1);
3849
  gcc_assert (var->refcount == 1);
3850
 
3851
  for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
3852
    {
3853
      if (GET_CODE (node->loc) == VALUE)
3854
        {
3855
          if (VALUE_RECURSED_INTO (node->loc))
3856
            {
3857
              /* Remove duplicate value node.  */
3858
              *nodep = node->next;
3859
              pool_free (loc_chain_pool, node);
3860
              continue;
3861
            }
3862
          else
3863
            VALUE_RECURSED_INTO (node->loc) = true;
3864
        }
3865
      nodep = &node->next;
3866
    }
3867
 
3868
  for (node = var->var_part[0].loc_chain; node; node = node->next)
3869
    if (GET_CODE (node->loc) == VALUE)
3870
      {
3871
        gcc_assert (VALUE_RECURSED_INTO (node->loc));
3872
        VALUE_RECURSED_INTO (node->loc) = false;
3873
      }
3874
}
3875
 
3876
 
3877
/* Hash table iteration argument passed to variable_post_merge.  */
3878
struct dfset_post_merge
3879
{
3880
  /* The new input set for the current block.  */
3881
  dataflow_set *set;
3882
  /* Pointer to the permanent input set for the current block, or
3883
     NULL.  */
3884
  dataflow_set **permp;
3885
};
3886
 
3887
/* Create values for incoming expressions associated with one-part
3888
   variables that don't have value numbers for them.  */
3889
 
3890
static int
3891
variable_post_merge_new_vals (void **slot, void *info)
3892
{
3893
  struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
3894
  dataflow_set *set = dfpm->set;
3895
  variable var = (variable)*slot;
3896
  location_chain node;
3897
 
3898
  if (!dv_onepart_p (var->dv) || !var->n_var_parts)
3899
    return 1;
3900
 
3901
  gcc_assert (var->n_var_parts == 1);
3902
 
3903
  if (dv_is_decl_p (var->dv))
3904
    {
3905
      bool check_dupes = false;
3906
 
3907
    restart:
3908
      for (node = var->var_part[0].loc_chain; node; node = node->next)
3909
        {
3910
          if (GET_CODE (node->loc) == VALUE)
3911
            gcc_assert (!VALUE_RECURSED_INTO (node->loc));
3912
          else if (GET_CODE (node->loc) == REG)
3913
            {
3914
              attrs att, *attp, *curp = NULL;
3915
 
3916
              if (var->refcount != 1)
3917
                {
3918
                  slot = unshare_variable (set, slot, var,
3919
                                           VAR_INIT_STATUS_INITIALIZED);
3920
                  var = (variable)*slot;
3921
                  goto restart;
3922
                }
3923
 
3924
              for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
3925
                   attp = &att->next)
3926
                if (att->offset == 0
3927
                    && GET_MODE (att->loc) == GET_MODE (node->loc))
3928
                  {
3929
                    if (dv_is_value_p (att->dv))
3930
                      {
3931
                        rtx cval = dv_as_value (att->dv);
3932
                        node->loc = cval;
3933
                        check_dupes = true;
3934
                        break;
3935
                      }
3936
                    else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
3937
                      curp = attp;
3938
                  }
3939
 
3940
              if (!curp)
3941
                {
3942
                  curp = attp;
3943
                  while (*curp)
3944
                    if ((*curp)->offset == 0
3945
                        && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
3946
                        && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
3947
                      break;
3948
                    else
3949
                      curp = &(*curp)->next;
3950
                  gcc_assert (*curp);
3951
                }
3952
 
3953
              if (!att)
3954
                {
3955
                  decl_or_value cdv;
3956
                  rtx cval;
3957
 
3958
                  if (!*dfpm->permp)
3959
                    {
3960
                      *dfpm->permp = XNEW (dataflow_set);
3961
                      dataflow_set_init (*dfpm->permp);
3962
                    }
3963
 
3964
                  for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
3965
                       att; att = att->next)
3966
                    if (GET_MODE (att->loc) == GET_MODE (node->loc))
3967
                      {
3968
                        gcc_assert (att->offset == 0
3969
                                    && dv_is_value_p (att->dv));
3970
                        val_reset (set, att->dv);
3971
                        break;
3972
                      }
3973
 
3974
                  if (att)
3975
                    {
3976
                      cdv = att->dv;
3977
                      cval = dv_as_value (cdv);
3978
                    }
3979
                  else
3980
                    {
3981
                      /* Create a unique value to hold this register,
3982
                         that ought to be found and reused in
3983
                         subsequent rounds.  */
3984
                      cselib_val *v;
3985
                      gcc_assert (!cselib_lookup (node->loc,
3986
                                                  GET_MODE (node->loc), 0));
3987
                      v = cselib_lookup (node->loc, GET_MODE (node->loc), 1);
3988
                      cselib_preserve_value (v);
3989
                      cselib_invalidate_rtx (node->loc);
3990
                      cval = v->val_rtx;
3991
                      cdv = dv_from_value (cval);
3992
                      if (dump_file)
3993
                        fprintf (dump_file,
3994
                                 "Created new value %u:%u for reg %i\n",
3995
                                 v->uid, v->hash, REGNO (node->loc));
3996
                    }
3997
 
3998
                  var_reg_decl_set (*dfpm->permp, node->loc,
3999
                                    VAR_INIT_STATUS_INITIALIZED,
4000
                                    cdv, 0, NULL, INSERT);
4001
 
4002
                  node->loc = cval;
4003
                  check_dupes = true;
4004
                }
4005
 
4006
              /* Remove attribute referring to the decl, which now
4007
                 uses the value for the register, already existing or
4008
                 to be added when we bring perm in.  */
4009
              att = *curp;
4010
              *curp = att->next;
4011
              pool_free (attrs_pool, att);
4012
            }
4013
        }
4014
 
4015
      if (check_dupes)
4016
        remove_duplicate_values (var);
4017
    }
4018
 
4019
  return 1;
4020
}
4021
 
4022
/* Reset values in the permanent set that are not associated with the
4023
   chosen expression.  */
4024
 
4025
static int
4026
variable_post_merge_perm_vals (void **pslot, void *info)
4027
{
4028
  struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info;
4029
  dataflow_set *set = dfpm->set;
4030
  variable pvar = (variable)*pslot, var;
4031
  location_chain pnode;
4032
  decl_or_value dv;
4033
  attrs att;
4034
 
4035
  gcc_assert (dv_is_value_p (pvar->dv)
4036
              && pvar->n_var_parts == 1);
4037
  pnode = pvar->var_part[0].loc_chain;
4038
  gcc_assert (pnode
4039
              && !pnode->next
4040
              && REG_P (pnode->loc));
4041
 
4042
  dv = pvar->dv;
4043
 
4044
  var = shared_hash_find (set->vars, dv);
4045
  if (var)
4046
    {
4047
      /* Although variable_post_merge_new_vals may have made decls
4048
         non-star-canonical, values that pre-existed in canonical form
4049
         remain canonical, and newly-created values reference a single
4050
         REG, so they are canonical as well.  Since VAR has the
4051
         location list for a VALUE, using find_loc_in_1pdv for it is
4052
         fine, since VALUEs don't map back to DECLs.  */
4053
      if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4054
        return 1;
4055
      val_reset (set, dv);
4056
    }
4057
 
4058
  for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4059
    if (att->offset == 0
4060
        && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4061
        && dv_is_value_p (att->dv))
4062
      break;
4063
 
4064
  /* If there is a value associated with this register already, create
4065
     an equivalence.  */
4066
  if (att && dv_as_value (att->dv) != dv_as_value (dv))
4067
    {
4068
      rtx cval = dv_as_value (att->dv);
4069
      set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4070
      set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4071
                         NULL, INSERT);
4072
    }
4073
  else if (!att)
4074
    {
4075
      attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4076
                         dv, 0, pnode->loc);
4077
      variable_union (pvar, set);
4078
    }
4079
 
4080
  return 1;
4081
}
4082
 
4083
/* Just checking stuff and registering register attributes for
4084
   now.  */
4085
 
4086
static void
4087
dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4088
{
4089
  struct dfset_post_merge dfpm;
4090
 
4091
  dfpm.set = set;
4092
  dfpm.permp = permp;
4093
 
4094
  htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals,
4095
                 &dfpm);
4096
  if (*permp)
4097
    htab_traverse (shared_hash_htab ((*permp)->vars),
4098
                   variable_post_merge_perm_vals, &dfpm);
4099
  htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set);
4100
  htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set);
4101
}
4102
 
4103
/* Return a node whose loc is a MEM that refers to EXPR in the
4104
   location list of a one-part variable or value VAR, or in that of
4105
   any values recursively mentioned in the location lists.  */
4106
 
4107
static location_chain
4108
find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars)
4109
{
4110
  location_chain node;
4111
  decl_or_value dv;
4112
  variable var;
4113
  location_chain where = NULL;
4114
 
4115
  if (!val)
4116
    return NULL;
4117
 
4118
  gcc_assert (GET_CODE (val) == VALUE
4119
              && !VALUE_RECURSED_INTO (val));
4120
 
4121
  dv = dv_from_value (val);
4122
  var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv));
4123
 
4124
  if (!var)
4125
    return NULL;
4126
 
4127
  gcc_assert (dv_onepart_p (var->dv));
4128
 
4129
  if (!var->n_var_parts)
4130
    return NULL;
4131
 
4132
  gcc_assert (var->var_part[0].offset == 0);
4133
 
4134
  VALUE_RECURSED_INTO (val) = true;
4135
 
4136
  for (node = var->var_part[0].loc_chain; node; node = node->next)
4137
    if (MEM_P (node->loc) && MEM_EXPR (node->loc) == expr
4138
        && MEM_OFFSET (node->loc) == 0)
4139
      {
4140
        where = node;
4141
        break;
4142
      }
4143
    else if (GET_CODE (node->loc) == VALUE
4144
             && !VALUE_RECURSED_INTO (node->loc)
4145
             && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4146
      break;
4147
 
4148
  VALUE_RECURSED_INTO (val) = false;
4149
 
4150
  return where;
4151
}
4152
 
4153
/* Return TRUE if the value of MEM may vary across a call.  */
4154
 
4155
static bool
4156
mem_dies_at_call (rtx mem)
4157
{
4158
  tree expr = MEM_EXPR (mem);
4159
  tree decl;
4160
 
4161
  if (!expr)
4162
    return true;
4163
 
4164
  decl = get_base_address (expr);
4165
 
4166
  if (!decl)
4167
    return true;
4168
 
4169
  if (!DECL_P (decl))
4170
    return true;
4171
 
4172
  return (may_be_aliased (decl)
4173
          || (!TREE_READONLY (decl) && is_global_var (decl)));
4174
}
4175
 
4176
/* Remove all MEMs from the location list of a hash table entry for a
4177
   one-part variable, except those whose MEM attributes map back to
4178
   the variable itself, directly or within a VALUE.  */
4179
 
4180
static int
4181
dataflow_set_preserve_mem_locs (void **slot, void *data)
4182
{
4183
  dataflow_set *set = (dataflow_set *) data;
4184
  variable var = (variable) *slot;
4185
 
4186
  if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv))
4187
    {
4188
      tree decl = dv_as_decl (var->dv);
4189
      location_chain loc, *locp;
4190
      bool changed = false;
4191
 
4192
      if (!var->n_var_parts)
4193
        return 1;
4194
 
4195
      gcc_assert (var->n_var_parts == 1);
4196
 
4197
      if (shared_var_p (var, set->vars))
4198
        {
4199
          for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4200
            {
4201
              /* We want to remove dying MEMs that doesn't refer to
4202
                 DECL.  */
4203
              if (GET_CODE (loc->loc) == MEM
4204
                  && (MEM_EXPR (loc->loc) != decl
4205
                      || MEM_OFFSET (loc->loc))
4206
                  && !mem_dies_at_call (loc->loc))
4207
                break;
4208
              /* We want to move here MEMs that do refer to DECL.  */
4209
              else if (GET_CODE (loc->loc) == VALUE
4210
                       && find_mem_expr_in_1pdv (decl, loc->loc,
4211
                                                 shared_hash_htab (set->vars)))
4212
                break;
4213
            }
4214
 
4215
          if (!loc)
4216
            return 1;
4217
 
4218
          slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4219
          var = (variable)*slot;
4220
          gcc_assert (var->n_var_parts == 1);
4221
        }
4222
 
4223
      for (locp = &var->var_part[0].loc_chain, loc = *locp;
4224
           loc; loc = *locp)
4225
        {
4226
          rtx old_loc = loc->loc;
4227
          if (GET_CODE (old_loc) == VALUE)
4228
            {
4229
              location_chain mem_node
4230
                = find_mem_expr_in_1pdv (decl, loc->loc,
4231
                                         shared_hash_htab (set->vars));
4232
 
4233
              /* ??? This picks up only one out of multiple MEMs that
4234
                 refer to the same variable.  Do we ever need to be
4235
                 concerned about dealing with more than one, or, given
4236
                 that they should all map to the same variable
4237
                 location, their addresses will have been merged and
4238
                 they will be regarded as equivalent?  */
4239
              if (mem_node)
4240
                {
4241
                  loc->loc = mem_node->loc;
4242
                  loc->set_src = mem_node->set_src;
4243
                  loc->init = MIN (loc->init, mem_node->init);
4244
                }
4245
            }
4246
 
4247
          if (GET_CODE (loc->loc) != MEM
4248
              || (MEM_EXPR (loc->loc) == decl
4249
                  && MEM_OFFSET (loc->loc) == 0)
4250
              || !mem_dies_at_call (loc->loc))
4251
            {
4252
              if (old_loc != loc->loc && emit_notes)
4253
                {
4254
                  if (old_loc == var->var_part[0].cur_loc)
4255
                    {
4256
                      changed = true;
4257
                      var->var_part[0].cur_loc = NULL;
4258
                      var->cur_loc_changed = true;
4259
                    }
4260
                  add_value_chains (var->dv, loc->loc);
4261
                  remove_value_chains (var->dv, old_loc);
4262
                }
4263
              locp = &loc->next;
4264
              continue;
4265
            }
4266
 
4267
          if (emit_notes)
4268
            {
4269
              remove_value_chains (var->dv, old_loc);
4270
              if (old_loc == var->var_part[0].cur_loc)
4271
                {
4272
                  changed = true;
4273
                  var->var_part[0].cur_loc = NULL;
4274
                  var->cur_loc_changed = true;
4275
                }
4276
            }
4277
          *locp = loc->next;
4278
          pool_free (loc_chain_pool, loc);
4279
        }
4280
 
4281
      if (!var->var_part[0].loc_chain)
4282
        {
4283
          var->n_var_parts--;
4284
          changed = true;
4285
        }
4286
      if (changed)
4287
        variable_was_changed (var, set);
4288
    }
4289
 
4290
  return 1;
4291
}
4292
 
4293
/* Remove all MEMs from the location list of a hash table entry for a
4294
   value.  */
4295
 
4296
static int
4297
dataflow_set_remove_mem_locs (void **slot, void *data)
4298
{
4299
  dataflow_set *set = (dataflow_set *) data;
4300
  variable var = (variable) *slot;
4301
 
4302
  if (dv_is_value_p (var->dv))
4303
    {
4304
      location_chain loc, *locp;
4305
      bool changed = false;
4306
 
4307
      gcc_assert (var->n_var_parts == 1);
4308
 
4309
      if (shared_var_p (var, set->vars))
4310
        {
4311
          for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4312
            if (GET_CODE (loc->loc) == MEM
4313
                && mem_dies_at_call (loc->loc))
4314
              break;
4315
 
4316
          if (!loc)
4317
            return 1;
4318
 
4319
          slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4320
          var = (variable)*slot;
4321
          gcc_assert (var->n_var_parts == 1);
4322
        }
4323
 
4324
      for (locp = &var->var_part[0].loc_chain, loc = *locp;
4325
           loc; loc = *locp)
4326
        {
4327
          if (GET_CODE (loc->loc) != MEM
4328
              || !mem_dies_at_call (loc->loc))
4329
            {
4330
              locp = &loc->next;
4331
              continue;
4332
            }
4333
 
4334
          if (emit_notes)
4335
            remove_value_chains (var->dv, loc->loc);
4336
          *locp = loc->next;
4337
          /* If we have deleted the location which was last emitted
4338
             we have to emit new location so add the variable to set
4339
             of changed variables.  */
4340
          if (var->var_part[0].cur_loc == loc->loc)
4341
            {
4342
              changed = true;
4343
              var->var_part[0].cur_loc = NULL;
4344
              var->cur_loc_changed = true;
4345
            }
4346
          pool_free (loc_chain_pool, loc);
4347
        }
4348
 
4349
      if (!var->var_part[0].loc_chain)
4350
        {
4351
          var->n_var_parts--;
4352
          changed = true;
4353
        }
4354
      if (changed)
4355
        variable_was_changed (var, set);
4356
    }
4357
 
4358
  return 1;
4359
}
4360
 
4361
/* Remove all variable-location information about call-clobbered
4362
   registers, as well as associations between MEMs and VALUEs.  */
4363
 
4364
static void
4365
dataflow_set_clear_at_call (dataflow_set *set)
4366
{
4367
  int r;
4368
 
4369
  for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
4370
    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r))
4371
      var_regno_delete (set, r);
4372
 
4373
  if (MAY_HAVE_DEBUG_INSNS)
4374
    {
4375
      set->traversed_vars = set->vars;
4376
      htab_traverse (shared_hash_htab (set->vars),
4377
                     dataflow_set_preserve_mem_locs, set);
4378
      set->traversed_vars = set->vars;
4379
      htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs,
4380
                     set);
4381
      set->traversed_vars = NULL;
4382
    }
4383
}
4384
 
4385
static bool
4386
variable_part_different_p (variable_part *vp1, variable_part *vp2)
4387
{
4388
  location_chain lc1, lc2;
4389
 
4390
  for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4391
    {
4392
      for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4393
        {
4394
          if (REG_P (lc1->loc) && REG_P (lc2->loc))
4395
            {
4396
              if (REGNO (lc1->loc) == REGNO (lc2->loc))
4397
                break;
4398
            }
4399
          if (rtx_equal_p (lc1->loc, lc2->loc))
4400
            break;
4401
        }
4402
      if (!lc2)
4403
        return true;
4404
    }
4405
  return false;
4406
}
4407
 
4408
/* Return true if one-part variables VAR1 and VAR2 are different.
4409
   They must be in canonical order.  */
4410
 
4411
static bool
4412
onepart_variable_different_p (variable var1, variable var2)
4413
{
4414
  location_chain lc1, lc2;
4415
 
4416
  if (var1 == var2)
4417
    return false;
4418
 
4419
  gcc_assert (var1->n_var_parts == 1
4420
              && var2->n_var_parts == 1);
4421
 
4422
  lc1 = var1->var_part[0].loc_chain;
4423
  lc2 = var2->var_part[0].loc_chain;
4424
 
4425
  gcc_assert (lc1 && lc2);
4426
 
4427
  while (lc1 && lc2)
4428
    {
4429
      if (loc_cmp (lc1->loc, lc2->loc))
4430
        return true;
4431
      lc1 = lc1->next;
4432
      lc2 = lc2->next;
4433
    }
4434
 
4435
  return lc1 != lc2;
4436
}
4437
 
4438
/* Return true if variables VAR1 and VAR2 are different.  */
4439
 
4440
static bool
4441
variable_different_p (variable var1, variable var2)
4442
{
4443
  int i;
4444
 
4445
  if (var1 == var2)
4446
    return false;
4447
 
4448
  if (var1->n_var_parts != var2->n_var_parts)
4449
    return true;
4450
 
4451
  for (i = 0; i < var1->n_var_parts; i++)
4452
    {
4453
      if (var1->var_part[i].offset != var2->var_part[i].offset)
4454
        return true;
4455
      /* One-part values have locations in a canonical order.  */
4456
      if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv))
4457
        {
4458
          gcc_assert (var1->n_var_parts == 1
4459
                      && dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv));
4460
          return onepart_variable_different_p (var1, var2);
4461
        }
4462
      if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
4463
        return true;
4464
      if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
4465
        return true;
4466
    }
4467
  return false;
4468
}
4469
 
4470
/* Return true if dataflow sets OLD_SET and NEW_SET differ.  */
4471
 
4472
static bool
4473
dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
4474
{
4475
  htab_iterator hi;
4476
  variable var1;
4477
 
4478
  if (old_set->vars == new_set->vars)
4479
    return false;
4480
 
4481
  if (htab_elements (shared_hash_htab (old_set->vars))
4482
      != htab_elements (shared_hash_htab (new_set->vars)))
4483
    return true;
4484
 
4485
  FOR_EACH_HTAB_ELEMENT (shared_hash_htab (old_set->vars), var1, variable, hi)
4486
    {
4487
      htab_t htab = shared_hash_htab (new_set->vars);
4488
      variable var2 = (variable) htab_find_with_hash (htab, var1->dv,
4489
                                                      dv_htab_hash (var1->dv));
4490
      if (!var2)
4491
        {
4492
          if (dump_file && (dump_flags & TDF_DETAILS))
4493
            {
4494
              fprintf (dump_file, "dataflow difference found: removal of:\n");
4495
              dump_var (var1);
4496
            }
4497
          return true;
4498
        }
4499
 
4500
      if (variable_different_p (var1, var2))
4501
        {
4502
          if (dump_file && (dump_flags & TDF_DETAILS))
4503
            {
4504
              fprintf (dump_file, "dataflow difference found: "
4505
                       "old and new follow:\n");
4506
              dump_var (var1);
4507
              dump_var (var2);
4508
            }
4509
          return true;
4510
        }
4511
    }
4512
 
4513
  /* No need to traverse the second hashtab, if both have the same number
4514
     of elements and the second one had all entries found in the first one,
4515
     then it can't have any extra entries.  */
4516
  return false;
4517
}
4518
 
4519
/* Free the contents of dataflow set SET.  */
4520
 
4521
static void
4522
dataflow_set_destroy (dataflow_set *set)
4523
{
4524
  int i;
4525
 
4526
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4527
    attrs_list_clear (&set->regs[i]);
4528
 
4529
  shared_hash_destroy (set->vars);
4530
  set->vars = NULL;
4531
}
4532
 
4533
/* Return true if RTL X contains a SYMBOL_REF.  */
4534
 
4535
static bool
4536
contains_symbol_ref (rtx x)
4537
{
4538
  const char *fmt;
4539
  RTX_CODE code;
4540
  int i;
4541
 
4542
  if (!x)
4543
    return false;
4544
 
4545
  code = GET_CODE (x);
4546
  if (code == SYMBOL_REF)
4547
    return true;
4548
 
4549
  fmt = GET_RTX_FORMAT (code);
4550
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4551
    {
4552
      if (fmt[i] == 'e')
4553
        {
4554
          if (contains_symbol_ref (XEXP (x, i)))
4555
            return true;
4556
        }
4557
      else if (fmt[i] == 'E')
4558
        {
4559
          int j;
4560
          for (j = 0; j < XVECLEN (x, i); j++)
4561
            if (contains_symbol_ref (XVECEXP (x, i, j)))
4562
              return true;
4563
        }
4564
    }
4565
 
4566
  return false;
4567
}
4568
 
4569
/* Shall EXPR be tracked?  */
4570
 
4571
static bool
4572
track_expr_p (tree expr, bool need_rtl)
4573
{
4574
  rtx decl_rtl;
4575
  tree realdecl;
4576
 
4577
  if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
4578
    return DECL_RTL_SET_P (expr);
4579
 
4580
  /* If EXPR is not a parameter or a variable do not track it.  */
4581
  if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
4582
    return 0;
4583
 
4584
  /* It also must have a name...  */
4585
  if (!DECL_NAME (expr) && need_rtl)
4586
    return 0;
4587
 
4588
  /* ... and a RTL assigned to it.  */
4589
  decl_rtl = DECL_RTL_IF_SET (expr);
4590
  if (!decl_rtl && need_rtl)
4591
    return 0;
4592
 
4593
  /* If this expression is really a debug alias of some other declaration, we
4594
     don't need to track this expression if the ultimate declaration is
4595
     ignored.  */
4596
  realdecl = expr;
4597
  if (DECL_DEBUG_EXPR_IS_FROM (realdecl) && DECL_DEBUG_EXPR (realdecl))
4598
    {
4599
      realdecl = DECL_DEBUG_EXPR (realdecl);
4600
      /* ??? We don't yet know how to emit DW_OP_piece for variable
4601
         that has been SRA'ed.  */
4602
      if (!DECL_P (realdecl))
4603
        return 0;
4604
    }
4605
 
4606
  /* Do not track EXPR if REALDECL it should be ignored for debugging
4607
     purposes.  */
4608
  if (DECL_IGNORED_P (realdecl))
4609
    return 0;
4610
 
4611
  /* Do not track global variables until we are able to emit correct location
4612
     list for them.  */
4613
  if (TREE_STATIC (realdecl))
4614
    return 0;
4615
 
4616
  /* When the EXPR is a DECL for alias of some variable (see example)
4617
     the TREE_STATIC flag is not used.  Disable tracking all DECLs whose
4618
     DECL_RTL contains SYMBOL_REF.
4619
 
4620
     Example:
4621
     extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
4622
     char **_dl_argv;
4623
  */
4624
  if (decl_rtl && MEM_P (decl_rtl)
4625
      && contains_symbol_ref (XEXP (decl_rtl, 0)))
4626
    return 0;
4627
 
4628
  /* If RTX is a memory it should not be very large (because it would be
4629
     an array or struct).  */
4630
  if (decl_rtl && MEM_P (decl_rtl))
4631
    {
4632
      /* Do not track structures and arrays.  */
4633
      if (GET_MODE (decl_rtl) == BLKmode
4634
          || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
4635
        return 0;
4636
      if (MEM_SIZE (decl_rtl)
4637
          && INTVAL (MEM_SIZE (decl_rtl)) > MAX_VAR_PARTS)
4638
        return 0;
4639
    }
4640
 
4641
  DECL_CHANGED (expr) = 0;
4642
  DECL_CHANGED (realdecl) = 0;
4643
  return 1;
4644
}
4645
 
4646
/* Determine whether a given LOC refers to the same variable part as
4647
   EXPR+OFFSET.  */
4648
 
4649
static bool
4650
same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
4651
{
4652
  tree expr2;
4653
  HOST_WIDE_INT offset2;
4654
 
4655
  if (! DECL_P (expr))
4656
    return false;
4657
 
4658
  if (REG_P (loc))
4659
    {
4660
      expr2 = REG_EXPR (loc);
4661
      offset2 = REG_OFFSET (loc);
4662
    }
4663
  else if (MEM_P (loc))
4664
    {
4665
      expr2 = MEM_EXPR (loc);
4666
      offset2 = INT_MEM_OFFSET (loc);
4667
    }
4668
  else
4669
    return false;
4670
 
4671
  if (! expr2 || ! DECL_P (expr2))
4672
    return false;
4673
 
4674
  expr = var_debug_decl (expr);
4675
  expr2 = var_debug_decl (expr2);
4676
 
4677
  return (expr == expr2 && offset == offset2);
4678
}
4679
 
4680
/* LOC is a REG or MEM that we would like to track if possible.
4681
   If EXPR is null, we don't know what expression LOC refers to,
4682
   otherwise it refers to EXPR + OFFSET.  STORE_REG_P is true if
4683
   LOC is an lvalue register.
4684
 
4685
   Return true if EXPR is nonnull and if LOC, or some lowpart of it,
4686
   is something we can track.  When returning true, store the mode of
4687
   the lowpart we can track in *MODE_OUT (if nonnull) and its offset
4688
   from EXPR in *OFFSET_OUT (if nonnull).  */
4689
 
4690
static bool
4691
track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
4692
             enum machine_mode *mode_out, HOST_WIDE_INT *offset_out)
4693
{
4694
  enum machine_mode mode;
4695
 
4696
  if (expr == NULL || !track_expr_p (expr, true))
4697
    return false;
4698
 
4699
  /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
4700
     whole subreg, but only the old inner part is really relevant.  */
4701
  mode = GET_MODE (loc);
4702
  if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
4703
    {
4704
      enum machine_mode pseudo_mode;
4705
 
4706
      pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
4707
      if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
4708
        {
4709
          offset += byte_lowpart_offset (pseudo_mode, mode);
4710
          mode = pseudo_mode;
4711
        }
4712
    }
4713
 
4714
  /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
4715
     Do the same if we are storing to a register and EXPR occupies
4716
     the whole of register LOC; in that case, the whole of EXPR is
4717
     being changed.  We exclude complex modes from the second case
4718
     because the real and imaginary parts are represented as separate
4719
     pseudo registers, even if the whole complex value fits into one
4720
     hard register.  */
4721
  if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
4722
       || (store_reg_p
4723
           && !COMPLEX_MODE_P (DECL_MODE (expr))
4724
           && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
4725
      && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
4726
    {
4727
      mode = DECL_MODE (expr);
4728
      offset = 0;
4729
    }
4730
 
4731
  if (offset < 0 || offset >= MAX_VAR_PARTS)
4732
    return false;
4733
 
4734
  if (mode_out)
4735
    *mode_out = mode;
4736
  if (offset_out)
4737
    *offset_out = offset;
4738
  return true;
4739
}
4740
 
4741
/* Return the MODE lowpart of LOC, or null if LOC is not something we
4742
   want to track.  When returning nonnull, make sure that the attributes
4743
   on the returned value are updated.  */
4744
 
4745
static rtx
4746
var_lowpart (enum machine_mode mode, rtx loc)
4747
{
4748
  unsigned int offset, reg_offset, regno;
4749
 
4750
  if (!REG_P (loc) && !MEM_P (loc))
4751
    return NULL;
4752
 
4753
  if (GET_MODE (loc) == mode)
4754
    return loc;
4755
 
4756
  offset = byte_lowpart_offset (mode, GET_MODE (loc));
4757
 
4758
  if (MEM_P (loc))
4759
    return adjust_address_nv (loc, mode, offset);
4760
 
4761
  reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
4762
  regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
4763
                                             reg_offset, mode);
4764
  return gen_rtx_REG_offset (loc, mode, regno, offset);
4765
}
4766
 
4767
/* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
4768
   hard_frame_pointer_rtx is being mapped to it.  */
4769
static rtx cfa_base_rtx;
4770
 
4771
/* Carry information about uses and stores while walking rtx.  */
4772
 
4773
struct count_use_info
4774
{
4775
  /* The insn where the RTX is.  */
4776
  rtx insn;
4777
 
4778
  /* The basic block where insn is.  */
4779
  basic_block bb;
4780
 
4781
  /* The array of n_sets sets in the insn, as determined by cselib.  */
4782
  struct cselib_set *sets;
4783
  int n_sets;
4784
 
4785
  /* True if we're counting stores, false otherwise.  */
4786
  bool store_p;
4787
};
4788
 
4789
/* Find a VALUE corresponding to X.   */
4790
 
4791
static inline cselib_val *
4792
find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
4793
{
4794
  int i;
4795
 
4796
  if (cui->sets)
4797
    {
4798
      /* This is called after uses are set up and before stores are
4799
         processed bycselib, so it's safe to look up srcs, but not
4800
         dsts.  So we look up expressions that appear in srcs or in
4801
         dest expressions, but we search the sets array for dests of
4802
         stores.  */
4803
      if (cui->store_p)
4804
        {
4805
          for (i = 0; i < cui->n_sets; i++)
4806
            if (cui->sets[i].dest == x)
4807
              return cui->sets[i].src_elt;
4808
        }
4809
      else
4810
        return cselib_lookup (x, mode, 0);
4811
    }
4812
 
4813
  return NULL;
4814
}
4815
 
4816
/* Helper function to get mode of MEM's address.  */
4817
 
4818
static inline enum machine_mode
4819
get_address_mode (rtx mem)
4820
{
4821
  enum machine_mode mode = GET_MODE (XEXP (mem, 0));
4822
  if (mode != VOIDmode)
4823
    return mode;
4824
  return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
4825
}
4826
 
4827
/* Replace all registers and addresses in an expression with VALUE
4828
   expressions that map back to them, unless the expression is a
4829
   register.  If no mapping is or can be performed, returns NULL.  */
4830
 
4831
static rtx
4832
replace_expr_with_values (rtx loc)
4833
{
4834
  if (REG_P (loc))
4835
    return NULL;
4836
  else if (MEM_P (loc))
4837
    {
4838
      cselib_val *addr = cselib_lookup (XEXP (loc, 0),
4839
                                        get_address_mode (loc), 0);
4840
      if (addr)
4841
        return replace_equiv_address_nv (loc, addr->val_rtx);
4842
      else
4843
        return NULL;
4844
    }
4845
  else
4846
    return cselib_subst_to_values (loc);
4847
}
4848
 
4849
/* Determine what kind of micro operation to choose for a USE.  Return
4850
   MO_CLOBBER if no micro operation is to be generated.  */
4851
 
4852
static enum micro_operation_type
4853
use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
4854
{
4855
  tree expr;
4856
 
4857
  if (cui && cui->sets)
4858
    {
4859
      if (GET_CODE (loc) == VAR_LOCATION)
4860
        {
4861
          if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
4862
            {
4863
              rtx ploc = PAT_VAR_LOCATION_LOC (loc);
4864
              if (! VAR_LOC_UNKNOWN_P (ploc))
4865
                {
4866
                  cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
4867
 
4868
                  /* ??? flag_float_store and volatile mems are never
4869
                     given values, but we could in theory use them for
4870
                     locations.  */
4871
                  gcc_assert (val || 1);
4872
                }
4873
              return MO_VAL_LOC;
4874
            }
4875
          else
4876
            return MO_CLOBBER;
4877
        }
4878
 
4879
      if (REG_P (loc) || MEM_P (loc))
4880
        {
4881
          if (modep)
4882
            *modep = GET_MODE (loc);
4883
          if (cui->store_p)
4884
            {
4885
              if (REG_P (loc)
4886
                  || (find_use_val (loc, GET_MODE (loc), cui)
4887
                      && cselib_lookup (XEXP (loc, 0),
4888
                                        get_address_mode (loc), 0)))
4889
                return MO_VAL_SET;
4890
            }
4891
          else
4892
            {
4893
              cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
4894
 
4895
              if (val && !cselib_preserved_value_p (val))
4896
                return MO_VAL_USE;
4897
            }
4898
        }
4899
    }
4900
 
4901
  if (REG_P (loc))
4902
    {
4903
      gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
4904
 
4905
      if (loc == cfa_base_rtx)
4906
        return MO_CLOBBER;
4907
      expr = REG_EXPR (loc);
4908
 
4909
      if (!expr)
4910
        return MO_USE_NO_VAR;
4911
      else if (target_for_debug_bind (var_debug_decl (expr)))
4912
        return MO_CLOBBER;
4913
      else if (track_loc_p (loc, expr, REG_OFFSET (loc),
4914
                            false, modep, NULL))
4915
        return MO_USE;
4916
      else
4917
        return MO_USE_NO_VAR;
4918
    }
4919
  else if (MEM_P (loc))
4920
    {
4921
      expr = MEM_EXPR (loc);
4922
 
4923
      if (!expr)
4924
        return MO_CLOBBER;
4925
      else if (target_for_debug_bind (var_debug_decl (expr)))
4926
        return MO_CLOBBER;
4927
      else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
4928
                            false, modep, NULL))
4929
        return MO_USE;
4930
      else
4931
        return MO_CLOBBER;
4932
    }
4933
 
4934
  return MO_CLOBBER;
4935
}
4936
 
4937
/* Log to OUT information about micro-operation MOPT involving X in
4938
   INSN of BB.  */
4939
 
4940
static inline void
4941
log_op_type (rtx x, basic_block bb, rtx insn,
4942
             enum micro_operation_type mopt, FILE *out)
4943
{
4944
  fprintf (out, "bb %i op %i insn %i %s ",
4945
           bb->index, VEC_length (micro_operation, VTI (bb)->mos),
4946
           INSN_UID (insn), micro_operation_type_name[mopt]);
4947
  print_inline_rtx (out, x, 2);
4948
  fputc ('\n', out);
4949
}
4950
 
4951
/* Tell whether the CONCAT used to holds a VALUE and its location
4952
   needs value resolution, i.e., an attempt of mapping the location
4953
   back to other incoming values.  */
4954
#define VAL_NEEDS_RESOLUTION(x) \
4955
  (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
4956
/* Whether the location in the CONCAT is a tracked expression, that
4957
   should also be handled like a MO_USE.  */
4958
#define VAL_HOLDS_TRACK_EXPR(x) \
4959
  (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
4960
/* Whether the location in the CONCAT should be handled like a MO_COPY
4961
   as well.  */
4962
#define VAL_EXPR_IS_COPIED(x) \
4963
  (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
4964
/* Whether the location in the CONCAT should be handled like a
4965
   MO_CLOBBER as well.  */
4966
#define VAL_EXPR_IS_CLOBBERED(x) \
4967
  (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
4968
/* Whether the location is a CONCAT of the MO_VAL_SET expression and
4969
   a reverse operation that should be handled afterwards.  */
4970
#define VAL_EXPR_HAS_REVERSE(x) \
4971
  (RTL_FLAG_CHECK1 ("VAL_EXPR_HAS_REVERSE", (x), CONCAT)->return_val)
4972
 
4973
/* All preserved VALUEs.  */
4974
static VEC (rtx, heap) *preserved_values;
4975
 
4976
/* Ensure VAL is preserved and remember it in a vector for vt_emit_notes.  */
4977
 
4978
static void
4979
preserve_value (cselib_val *val)
4980
{
4981
  cselib_preserve_value (val);
4982
  VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
4983
}
4984
 
4985
/* Helper function for MO_VAL_LOC handling.  Return non-zero if
4986
   any rtxes not suitable for CONST use not replaced by VALUEs
4987
   are discovered.  */
4988
 
4989
static int
4990
non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED)
4991
{
4992
  if (*x == NULL_RTX)
4993
    return 0;
4994
 
4995
  switch (GET_CODE (*x))
4996
    {
4997
    case REG:
4998
    case DEBUG_EXPR:
4999
    case PC:
5000
    case SCRATCH:
5001
    case CC0:
5002
    case ASM_INPUT:
5003
    case ASM_OPERANDS:
5004
      return 1;
5005
    case MEM:
5006
      return !MEM_READONLY_P (*x);
5007
    default:
5008
      return 0;
5009
    }
5010
}
5011
 
5012
/* Add uses (register and memory references) LOC which will be tracked
5013
   to VTI (bb)->mos.  INSN is instruction which the LOC is part of.  */
5014
 
5015
static int
5016
add_uses (rtx *ploc, void *data)
5017
{
5018
  rtx loc = *ploc;
5019
  enum machine_mode mode = VOIDmode;
5020
  struct count_use_info *cui = (struct count_use_info *)data;
5021
  enum micro_operation_type type = use_type (loc, cui, &mode);
5022
 
5023
  if (type != MO_CLOBBER)
5024
    {
5025
      basic_block bb = cui->bb;
5026
      micro_operation mo;
5027
 
5028
      mo.type = type;
5029
      mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5030
      mo.insn = cui->insn;
5031
 
5032
      if (type == MO_VAL_LOC)
5033
        {
5034
          rtx oloc = loc;
5035
          rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5036
          cselib_val *val;
5037
 
5038
          gcc_assert (cui->sets);
5039
 
5040
          if (MEM_P (vloc)
5041
              && !REG_P (XEXP (vloc, 0))
5042
              && !MEM_P (XEXP (vloc, 0))
5043
              && (GET_CODE (XEXP (vloc, 0)) != PLUS
5044
                  || XEXP (XEXP (vloc, 0), 0) != cfa_base_rtx
5045
                  || !CONST_INT_P (XEXP (XEXP (vloc, 0), 1))))
5046
            {
5047
              rtx mloc = vloc;
5048
              enum machine_mode address_mode = get_address_mode (mloc);
5049
              cselib_val *val
5050
                = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
5051
 
5052
              if (val && !cselib_preserved_value_p (val))
5053
                {
5054
                  micro_operation moa;
5055
                  preserve_value (val);
5056
                  mloc = cselib_subst_to_values (XEXP (mloc, 0));
5057
                  moa.type = MO_VAL_USE;
5058
                  moa.insn = cui->insn;
5059
                  moa.u.loc = gen_rtx_CONCAT (address_mode,
5060
                                              val->val_rtx, mloc);
5061
                  if (dump_file && (dump_flags & TDF_DETAILS))
5062
                    log_op_type (moa.u.loc, cui->bb, cui->insn,
5063
                                 moa.type, dump_file);
5064
                  VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5065
                }
5066
            }
5067
 
5068
          if (CONSTANT_P (vloc)
5069
              && (GET_CODE (vloc) != CONST
5070
                  || for_each_rtx (&vloc, non_suitable_const, NULL)))
5071
            /* For constants don't look up any value.  */;
5072
          else if (!VAR_LOC_UNKNOWN_P (vloc)
5073
                   && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5074
            {
5075
              enum machine_mode mode2;
5076
              enum micro_operation_type type2;
5077
              rtx nloc = replace_expr_with_values (vloc);
5078
 
5079
              if (nloc)
5080
                {
5081
                  oloc = shallow_copy_rtx (oloc);
5082
                  PAT_VAR_LOCATION_LOC (oloc) = nloc;
5083
                }
5084
 
5085
              oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5086
 
5087
              type2 = use_type (vloc, 0, &mode2);
5088
 
5089
              gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5090
                          || type2 == MO_CLOBBER);
5091
 
5092
              if (type2 == MO_CLOBBER
5093
                  && !cselib_preserved_value_p (val))
5094
                {
5095
                  VAL_NEEDS_RESOLUTION (oloc) = 1;
5096
                  preserve_value (val);
5097
                }
5098
            }
5099
          else if (!VAR_LOC_UNKNOWN_P (vloc))
5100
            {
5101
              oloc = shallow_copy_rtx (oloc);
5102
              PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5103
            }
5104
 
5105
          mo.u.loc = oloc;
5106
        }
5107
      else if (type == MO_VAL_USE)
5108
        {
5109
          enum machine_mode mode2 = VOIDmode;
5110
          enum micro_operation_type type2;
5111
          cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5112
          rtx vloc, oloc = loc, nloc;
5113
 
5114
          gcc_assert (cui->sets);
5115
 
5116
          if (MEM_P (oloc)
5117
              && !REG_P (XEXP (oloc, 0))
5118
              && !MEM_P (XEXP (oloc, 0))
5119
              && (GET_CODE (XEXP (oloc, 0)) != PLUS
5120
                  || XEXP (XEXP (oloc, 0), 0) != cfa_base_rtx
5121
                  || !CONST_INT_P (XEXP (XEXP (oloc, 0), 1))))
5122
            {
5123
              rtx mloc = oloc;
5124
              enum machine_mode address_mode = get_address_mode (mloc);
5125
              cselib_val *val
5126
                = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
5127
 
5128
              if (val && !cselib_preserved_value_p (val))
5129
                {
5130
                  micro_operation moa;
5131
                  preserve_value (val);
5132
                  mloc = cselib_subst_to_values (XEXP (mloc, 0));
5133
                  moa.type = MO_VAL_USE;
5134
                  moa.insn = cui->insn;
5135
                  moa.u.loc = gen_rtx_CONCAT (address_mode,
5136
                                              val->val_rtx, mloc);
5137
                  if (dump_file && (dump_flags & TDF_DETAILS))
5138
                    log_op_type (moa.u.loc, cui->bb, cui->insn,
5139
                                 moa.type, dump_file);
5140
                  VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5141
                }
5142
            }
5143
 
5144
          type2 = use_type (loc, 0, &mode2);
5145
 
5146
          gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5147
                      || type2 == MO_CLOBBER);
5148
 
5149
          if (type2 == MO_USE)
5150
            vloc = var_lowpart (mode2, loc);
5151
          else
5152
            vloc = oloc;
5153
 
5154
          /* The loc of a MO_VAL_USE may have two forms:
5155
 
5156
             (concat val src): val is at src, a value-based
5157
             representation.
5158
 
5159
             (concat (concat val use) src): same as above, with use as
5160
             the MO_USE tracked value, if it differs from src.
5161
 
5162
          */
5163
 
5164
          nloc = replace_expr_with_values (loc);
5165
          if (!nloc)
5166
            nloc = oloc;
5167
 
5168
          if (vloc != nloc)
5169
            oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5170
          else
5171
            oloc = val->val_rtx;
5172
 
5173
          mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5174
 
5175
          if (type2 == MO_USE)
5176
            VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5177
          if (!cselib_preserved_value_p (val))
5178
            {
5179
              VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5180
              preserve_value (val);
5181
            }
5182
        }
5183
      else
5184
        gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5185
 
5186
      if (dump_file && (dump_flags & TDF_DETAILS))
5187
        log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5188
      VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5189
    }
5190
 
5191
  return 0;
5192
}
5193
 
5194
/* Helper function for finding all uses of REG/MEM in X in insn INSN.  */
5195
 
5196
static void
5197
add_uses_1 (rtx *x, void *cui)
5198
{
5199
  for_each_rtx (x, add_uses, cui);
5200
}
5201
 
5202
/* Attempt to reverse the EXPR operation in the debug info.  Say for
5203
   reg1 = reg2 + 6 even when reg2 is no longer live we
5204
   can express its value as VAL - 6.  */
5205
 
5206
static rtx
5207
reverse_op (rtx val, const_rtx expr)
5208
{
5209
  rtx src, arg, ret;
5210
  cselib_val *v;
5211
  enum rtx_code code;
5212
 
5213
  if (GET_CODE (expr) != SET)
5214
    return NULL_RTX;
5215
 
5216
  if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5217
    return NULL_RTX;
5218
 
5219
  src = SET_SRC (expr);
5220
  switch (GET_CODE (src))
5221
    {
5222
    case PLUS:
5223
    case MINUS:
5224
    case XOR:
5225
    case NOT:
5226
    case NEG:
5227
    case SIGN_EXTEND:
5228
    case ZERO_EXTEND:
5229
      break;
5230
    default:
5231
      return NULL_RTX;
5232
    }
5233
 
5234
  if (!REG_P (XEXP (src, 0))
5235
      || !SCALAR_INT_MODE_P (GET_MODE (src))
5236
      || XEXP (src, 0) == cfa_base_rtx)
5237
    return NULL_RTX;
5238
 
5239
  v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0);
5240
  if (!v || !cselib_preserved_value_p (v))
5241
    return NULL_RTX;
5242
 
5243
  switch (GET_CODE (src))
5244
    {
5245
    case NOT:
5246
    case NEG:
5247
      if (GET_MODE (v->val_rtx) != GET_MODE (val))
5248
        return NULL_RTX;
5249
      ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5250
      break;
5251
    case SIGN_EXTEND:
5252
    case ZERO_EXTEND:
5253
      ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5254
      break;
5255
    case XOR:
5256
      code = XOR;
5257
      goto binary;
5258
    case PLUS:
5259
      code = MINUS;
5260
      goto binary;
5261
    case MINUS:
5262
      code = PLUS;
5263
      goto binary;
5264
    binary:
5265
      if (GET_MODE (v->val_rtx) != GET_MODE (val))
5266
        return NULL_RTX;
5267
      arg = XEXP (src, 1);
5268
      if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5269
        {
5270
          arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5271
          if (arg == NULL_RTX)
5272
            return NULL_RTX;
5273
          if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5274
            return NULL_RTX;
5275
        }
5276
      ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5277
      if (ret == val)
5278
        /* Ensure ret isn't VALUE itself (which can happen e.g. for
5279
           (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5280
           breaks a lot of routines during var-tracking.  */
5281
        ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5282
      break;
5283
    default:
5284
      gcc_unreachable ();
5285
    }
5286
 
5287
  return gen_rtx_CONCAT (GET_MODE (v->val_rtx), v->val_rtx, ret);
5288
}
5289
 
5290
/* Add stores (register and memory references) LOC which will be tracked
5291
   to VTI (bb)->mos.  EXPR is the RTL expression containing the store.
5292
   CUIP->insn is instruction which the LOC is part of.  */
5293
 
5294
static void
5295
add_stores (rtx loc, const_rtx expr, void *cuip)
5296
{
5297
  enum machine_mode mode = VOIDmode, mode2;
5298
  struct count_use_info *cui = (struct count_use_info *)cuip;
5299
  basic_block bb = cui->bb;
5300
  micro_operation mo;
5301
  rtx oloc = loc, nloc, src = NULL;
5302
  enum micro_operation_type type = use_type (loc, cui, &mode);
5303
  bool track_p = false;
5304
  cselib_val *v;
5305
  bool resolve, preserve;
5306
  rtx reverse;
5307
 
5308
  if (type == MO_CLOBBER)
5309
    return;
5310
 
5311
  mode2 = mode;
5312
 
5313
  if (REG_P (loc))
5314
    {
5315
      gcc_assert (loc != cfa_base_rtx);
5316
      if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5317
          || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5318
          || GET_CODE (expr) == CLOBBER)
5319
        {
5320
          mo.type = MO_CLOBBER;
5321
          mo.u.loc = loc;
5322
        }
5323
      else
5324
        {
5325
          if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
5326
            src = var_lowpart (mode2, SET_SRC (expr));
5327
          loc = var_lowpart (mode2, loc);
5328
 
5329
          if (src == NULL)
5330
            {
5331
              mo.type = MO_SET;
5332
              mo.u.loc = loc;
5333
            }
5334
          else
5335
            {
5336
              rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5337
              if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5338
                mo.type = MO_COPY;
5339
              else
5340
                mo.type = MO_SET;
5341
              mo.u.loc = xexpr;
5342
            }
5343
        }
5344
      mo.insn = cui->insn;
5345
    }
5346
  else if (MEM_P (loc)
5347
           && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5348
               || cui->sets))
5349
    {
5350
      if (MEM_P (loc) && type == MO_VAL_SET
5351
          && !REG_P (XEXP (loc, 0))
5352
          && !MEM_P (XEXP (loc, 0))
5353
          && (GET_CODE (XEXP (loc, 0)) != PLUS
5354
              || XEXP (XEXP (loc, 0), 0) != cfa_base_rtx
5355
              || !CONST_INT_P (XEXP (XEXP (loc, 0), 1))))
5356
        {
5357
          rtx mloc = loc;
5358
          enum machine_mode address_mode = get_address_mode (mloc);
5359
          cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5360
                                           address_mode, 0);
5361
 
5362
          if (val && !cselib_preserved_value_p (val))
5363
            {
5364
              preserve_value (val);
5365
              mo.type = MO_VAL_USE;
5366
              mloc = cselib_subst_to_values (XEXP (mloc, 0));
5367
              mo.u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
5368
              mo.insn = cui->insn;
5369
              if (dump_file && (dump_flags & TDF_DETAILS))
5370
                log_op_type (mo.u.loc, cui->bb, cui->insn,
5371
                             mo.type, dump_file);
5372
              VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5373
            }
5374
        }
5375
 
5376
      if (GET_CODE (expr) == CLOBBER || !track_p)
5377
        {
5378
          mo.type = MO_CLOBBER;
5379
          mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5380
        }
5381
      else
5382
        {
5383
          if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
5384
            src = var_lowpart (mode2, SET_SRC (expr));
5385
          loc = var_lowpart (mode2, loc);
5386
 
5387
          if (src == NULL)
5388
            {
5389
              mo.type = MO_SET;
5390
              mo.u.loc = loc;
5391
            }
5392
          else
5393
            {
5394
              rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
5395
              if (same_variable_part_p (SET_SRC (xexpr),
5396
                                        MEM_EXPR (loc),
5397
                                        INT_MEM_OFFSET (loc)))
5398
                mo.type = MO_COPY;
5399
              else
5400
                mo.type = MO_SET;
5401
              mo.u.loc = xexpr;
5402
            }
5403
        }
5404
      mo.insn = cui->insn;
5405
    }
5406
  else
5407
    return;
5408
 
5409
  if (type != MO_VAL_SET)
5410
    goto log_and_return;
5411
 
5412
  v = find_use_val (oloc, mode, cui);
5413
 
5414
  if (!v)
5415
    goto log_and_return;
5416
 
5417
  resolve = preserve = !cselib_preserved_value_p (v);
5418
 
5419
  nloc = replace_expr_with_values (oloc);
5420
  if (nloc)
5421
    oloc = nloc;
5422
 
5423
  if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
5424
    {
5425
      cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0);
5426
 
5427
      gcc_assert (oval != v);
5428
      gcc_assert (REG_P (oloc) || MEM_P (oloc));
5429
 
5430
      if (!cselib_preserved_value_p (oval))
5431
        {
5432
          micro_operation moa;
5433
 
5434
          preserve_value (oval);
5435
 
5436
          moa.type = MO_VAL_USE;
5437
          moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
5438
          VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
5439
          moa.insn = cui->insn;
5440
 
5441
          if (dump_file && (dump_flags & TDF_DETAILS))
5442
            log_op_type (moa.u.loc, cui->bb, cui->insn,
5443
                         moa.type, dump_file);
5444
          VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa);
5445
        }
5446
 
5447
      resolve = false;
5448
    }
5449
  else if (resolve && GET_CODE (mo.u.loc) == SET)
5450
    {
5451
      nloc = replace_expr_with_values (SET_SRC (expr));
5452
 
5453
      /* Avoid the mode mismatch between oexpr and expr.  */
5454
      if (!nloc && mode != mode2)
5455
        {
5456
          nloc = SET_SRC (expr);
5457
          gcc_assert (oloc == SET_DEST (expr));
5458
        }
5459
 
5460
      if (nloc)
5461
        oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
5462
      else
5463
        {
5464
          if (oloc == SET_DEST (mo.u.loc))
5465
            /* No point in duplicating.  */
5466
            oloc = mo.u.loc;
5467
          if (!REG_P (SET_SRC (mo.u.loc)))
5468
            resolve = false;
5469
        }
5470
    }
5471
  else if (!resolve)
5472
    {
5473
      if (GET_CODE (mo.u.loc) == SET
5474
          && oloc == SET_DEST (mo.u.loc))
5475
        /* No point in duplicating.  */
5476
        oloc = mo.u.loc;
5477
    }
5478
  else
5479
    resolve = false;
5480
 
5481
  loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
5482
 
5483
  if (mo.u.loc != oloc)
5484
    loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
5485
 
5486
  /* The loc of a MO_VAL_SET may have various forms:
5487
 
5488
     (concat val dst): dst now holds val
5489
 
5490
     (concat val (set dst src)): dst now holds val, copied from src
5491
 
5492
     (concat (concat val dstv) dst): dst now holds val; dstv is dst
5493
     after replacing mems and non-top-level regs with values.
5494
 
5495
     (concat (concat val dstv) (set dst src)): dst now holds val,
5496
     copied from src.  dstv is a value-based representation of dst, if
5497
     it differs from dst.  If resolution is needed, src is a REG, and
5498
     its mode is the same as that of val.
5499
 
5500
     (concat (concat val (set dstv srcv)) (set dst src)): src
5501
     copied to dst, holding val.  dstv and srcv are value-based
5502
     representations of dst and src, respectively.
5503
 
5504
  */
5505
 
5506
  if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
5507
    {
5508
      reverse = reverse_op (v->val_rtx, expr);
5509
      if (reverse)
5510
        {
5511
          loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, reverse);
5512
          VAL_EXPR_HAS_REVERSE (loc) = 1;
5513
        }
5514
    }
5515
 
5516
  mo.u.loc = loc;
5517
 
5518
  if (track_p)
5519
    VAL_HOLDS_TRACK_EXPR (loc) = 1;
5520
  if (preserve)
5521
    {
5522
      VAL_NEEDS_RESOLUTION (loc) = resolve;
5523
      preserve_value (v);
5524
    }
5525
  if (mo.type == MO_CLOBBER)
5526
    VAL_EXPR_IS_CLOBBERED (loc) = 1;
5527
  if (mo.type == MO_COPY)
5528
    VAL_EXPR_IS_COPIED (loc) = 1;
5529
 
5530
  mo.type = MO_VAL_SET;
5531
 
5532
 log_and_return:
5533
  if (dump_file && (dump_flags & TDF_DETAILS))
5534
    log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5535
  VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5536
}
5537
 
5538
/* Callback for cselib_record_sets_hook, that records as micro
5539
   operations uses and stores in an insn after cselib_record_sets has
5540
   analyzed the sets in an insn, but before it modifies the stored
5541
   values in the internal tables, unless cselib_record_sets doesn't
5542
   call it directly (perhaps because we're not doing cselib in the
5543
   first place, in which case sets and n_sets will be 0).  */
5544
 
5545
static void
5546
add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
5547
{
5548
  basic_block bb = BLOCK_FOR_INSN (insn);
5549
  int n1, n2;
5550
  struct count_use_info cui;
5551
  micro_operation *mos;
5552
 
5553
  cselib_hook_called = true;
5554
 
5555
  cui.insn = insn;
5556
  cui.bb = bb;
5557
  cui.sets = sets;
5558
  cui.n_sets = n_sets;
5559
 
5560
  n1 = VEC_length (micro_operation, VTI (bb)->mos);
5561
  cui.store_p = false;
5562
  note_uses (&PATTERN (insn), add_uses_1, &cui);
5563
  n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5564
  mos = VEC_address (micro_operation, VTI (bb)->mos);
5565
 
5566
  /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
5567
     MO_VAL_LOC last.  */
5568
  while (n1 < n2)
5569
    {
5570
      while (n1 < n2 && mos[n1].type == MO_USE)
5571
        n1++;
5572
      while (n1 < n2 && mos[n2].type != MO_USE)
5573
        n2--;
5574
      if (n1 < n2)
5575
        {
5576
          micro_operation sw;
5577
 
5578
          sw = mos[n1];
5579
          mos[n1] = mos[n2];
5580
          mos[n2] = sw;
5581
        }
5582
    }
5583
 
5584
  n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5585
  while (n1 < n2)
5586
    {
5587
      while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
5588
        n1++;
5589
      while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
5590
        n2--;
5591
      if (n1 < n2)
5592
        {
5593
          micro_operation sw;
5594
 
5595
          sw = mos[n1];
5596
          mos[n1] = mos[n2];
5597
          mos[n2] = sw;
5598
        }
5599
    }
5600
 
5601
  if (CALL_P (insn))
5602
    {
5603
      micro_operation mo;
5604
 
5605
      mo.type = MO_CALL;
5606
      mo.insn = insn;
5607
      mo.u.loc = NULL_RTX;
5608
 
5609
      if (dump_file && (dump_flags & TDF_DETAILS))
5610
        log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
5611
      VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo);
5612
    }
5613
 
5614
  n1 = VEC_length (micro_operation, VTI (bb)->mos);
5615
  /* This will record NEXT_INSN (insn), such that we can
5616
     insert notes before it without worrying about any
5617
     notes that MO_USEs might emit after the insn.  */
5618
  cui.store_p = true;
5619
  note_stores (PATTERN (insn), add_stores, &cui);
5620
  n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5621
  mos = VEC_address (micro_operation, VTI (bb)->mos);
5622
 
5623
  /* Order the MO_VAL_USEs first (note_stores does nothing
5624
     on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
5625
     insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET.  */
5626
  while (n1 < n2)
5627
    {
5628
      while (n1 < n2 && mos[n1].type == MO_VAL_USE)
5629
        n1++;
5630
      while (n1 < n2 && mos[n2].type != MO_VAL_USE)
5631
        n2--;
5632
      if (n1 < n2)
5633
        {
5634
          micro_operation sw;
5635
 
5636
          sw = mos[n1];
5637
          mos[n1] = mos[n2];
5638
          mos[n2] = sw;
5639
        }
5640
    }
5641
 
5642
  n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
5643
  while (n1 < n2)
5644
    {
5645
      while (n1 < n2 && mos[n1].type == MO_CLOBBER)
5646
        n1++;
5647
      while (n1 < n2 && mos[n2].type != MO_CLOBBER)
5648
        n2--;
5649
      if (n1 < n2)
5650
        {
5651
          micro_operation sw;
5652
 
5653
          sw = mos[n1];
5654
          mos[n1] = mos[n2];
5655
          mos[n2] = sw;
5656
        }
5657
    }
5658
}
5659
 
5660
static enum var_init_status
5661
find_src_status (dataflow_set *in, rtx src)
5662
{
5663
  tree decl = NULL_TREE;
5664
  enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
5665
 
5666
  if (! flag_var_tracking_uninit)
5667
    status = VAR_INIT_STATUS_INITIALIZED;
5668
 
5669
  if (src && REG_P (src))
5670
    decl = var_debug_decl (REG_EXPR (src));
5671
  else if (src && MEM_P (src))
5672
    decl = var_debug_decl (MEM_EXPR (src));
5673
 
5674
  if (src && decl)
5675
    status = get_init_value (in, src, dv_from_decl (decl));
5676
 
5677
  return status;
5678
}
5679
 
5680
/* SRC is the source of an assignment.  Use SET to try to find what
5681
   was ultimately assigned to SRC.  Return that value if known,
5682
   otherwise return SRC itself.  */
5683
 
5684
static rtx
5685
find_src_set_src (dataflow_set *set, rtx src)
5686
{
5687
  tree decl = NULL_TREE;   /* The variable being copied around.          */
5688
  rtx set_src = NULL_RTX;  /* The value for "decl" stored in "src".      */
5689
  variable var;
5690
  location_chain nextp;
5691
  int i;
5692
  bool found;
5693
 
5694
  if (src && REG_P (src))
5695
    decl = var_debug_decl (REG_EXPR (src));
5696
  else if (src && MEM_P (src))
5697
    decl = var_debug_decl (MEM_EXPR (src));
5698
 
5699
  if (src && decl)
5700
    {
5701
      decl_or_value dv = dv_from_decl (decl);
5702
 
5703
      var = shared_hash_find (set->vars, dv);
5704
      if (var)
5705
        {
5706
          found = false;
5707
          for (i = 0; i < var->n_var_parts && !found; i++)
5708
            for (nextp = var->var_part[i].loc_chain; nextp && !found;
5709
                 nextp = nextp->next)
5710
              if (rtx_equal_p (nextp->loc, src))
5711
                {
5712
                  set_src = nextp->set_src;
5713
                  found = true;
5714
                }
5715
 
5716
        }
5717
    }
5718
 
5719
  return set_src;
5720
}
5721
 
5722
/* Compute the changes of variable locations in the basic block BB.  */
5723
 
5724
static bool
5725
compute_bb_dataflow (basic_block bb)
5726
{
5727
  unsigned int i;
5728
  micro_operation *mo;
5729
  bool changed;
5730
  dataflow_set old_out;
5731
  dataflow_set *in = &VTI (bb)->in;
5732
  dataflow_set *out = &VTI (bb)->out;
5733
 
5734
  dataflow_set_init (&old_out);
5735
  dataflow_set_copy (&old_out, out);
5736
  dataflow_set_copy (out, in);
5737
 
5738
  for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
5739
    {
5740
      rtx insn = mo->insn;
5741
 
5742
      switch (mo->type)
5743
        {
5744
          case MO_CALL:
5745
            dataflow_set_clear_at_call (out);
5746
            break;
5747
 
5748
          case MO_USE:
5749
            {
5750
              rtx loc = mo->u.loc;
5751
 
5752
              if (REG_P (loc))
5753
                var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5754
              else if (MEM_P (loc))
5755
                var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
5756
            }
5757
            break;
5758
 
5759
          case MO_VAL_LOC:
5760
            {
5761
              rtx loc = mo->u.loc;
5762
              rtx val, vloc;
5763
              tree var;
5764
 
5765
              if (GET_CODE (loc) == CONCAT)
5766
                {
5767
                  val = XEXP (loc, 0);
5768
                  vloc = XEXP (loc, 1);
5769
                }
5770
              else
5771
                {
5772
                  val = NULL_RTX;
5773
                  vloc = loc;
5774
                }
5775
 
5776
              var = PAT_VAR_LOCATION_DECL (vloc);
5777
 
5778
              clobber_variable_part (out, NULL_RTX,
5779
                                     dv_from_decl (var), 0, NULL_RTX);
5780
              if (val)
5781
                {
5782
                  if (VAL_NEEDS_RESOLUTION (loc))
5783
                    val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
5784
                  set_variable_part (out, val, dv_from_decl (var), 0,
5785
                                     VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
5786
                                     INSERT);
5787
                }
5788
              else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
5789
                set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
5790
                                   dv_from_decl (var), 0,
5791
                                   VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
5792
                                   INSERT);
5793
            }
5794
            break;
5795
 
5796
          case MO_VAL_USE:
5797
            {
5798
              rtx loc = mo->u.loc;
5799
              rtx val, vloc, uloc;
5800
 
5801
              vloc = uloc = XEXP (loc, 1);
5802
              val = XEXP (loc, 0);
5803
 
5804
              if (GET_CODE (val) == CONCAT)
5805
                {
5806
                  uloc = XEXP (val, 1);
5807
                  val = XEXP (val, 0);
5808
                }
5809
 
5810
              if (VAL_NEEDS_RESOLUTION (loc))
5811
                val_resolve (out, val, vloc, insn);
5812
              else
5813
                val_store (out, val, uloc, insn, false);
5814
 
5815
              if (VAL_HOLDS_TRACK_EXPR (loc))
5816
                {
5817
                  if (GET_CODE (uloc) == REG)
5818
                    var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5819
                                 NULL);
5820
                  else if (GET_CODE (uloc) == MEM)
5821
                    var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
5822
                                 NULL);
5823
                }
5824
            }
5825
            break;
5826
 
5827
          case MO_VAL_SET:
5828
            {
5829
              rtx loc = mo->u.loc;
5830
              rtx val, vloc, uloc, reverse = NULL_RTX;
5831
 
5832
              vloc = loc;
5833
              if (VAL_EXPR_HAS_REVERSE (loc))
5834
                {
5835
                  reverse = XEXP (loc, 1);
5836
                  vloc = XEXP (loc, 0);
5837
                }
5838
              uloc = XEXP (vloc, 1);
5839
              val = XEXP (vloc, 0);
5840
              vloc = uloc;
5841
 
5842
              if (GET_CODE (val) == CONCAT)
5843
                {
5844
                  vloc = XEXP (val, 1);
5845
                  val = XEXP (val, 0);
5846
                }
5847
 
5848
              if (GET_CODE (vloc) == SET)
5849
                {
5850
                  rtx vsrc = SET_SRC (vloc);
5851
 
5852
                  gcc_assert (val != vsrc);
5853
                  gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
5854
 
5855
                  vloc = SET_DEST (vloc);
5856
 
5857
                  if (VAL_NEEDS_RESOLUTION (loc))
5858
                    val_resolve (out, val, vsrc, insn);
5859
                }
5860
              else if (VAL_NEEDS_RESOLUTION (loc))
5861
                {
5862
                  gcc_assert (GET_CODE (uloc) == SET
5863
                              && GET_CODE (SET_SRC (uloc)) == REG);
5864
                  val_resolve (out, val, SET_SRC (uloc), insn);
5865
                }
5866
 
5867
              if (VAL_HOLDS_TRACK_EXPR (loc))
5868
                {
5869
                  if (VAL_EXPR_IS_CLOBBERED (loc))
5870
                    {
5871
                      if (REG_P (uloc))
5872
                        var_reg_delete (out, uloc, true);
5873
                      else if (MEM_P (uloc))
5874
                        var_mem_delete (out, uloc, true);
5875
                    }
5876
                  else
5877
                    {
5878
                      bool copied_p = VAL_EXPR_IS_COPIED (loc);
5879
                      rtx set_src = NULL;
5880
                      enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
5881
 
5882
                      if (GET_CODE (uloc) == SET)
5883
                        {
5884
                          set_src = SET_SRC (uloc);
5885
                          uloc = SET_DEST (uloc);
5886
                        }
5887
 
5888
                      if (copied_p)
5889
                        {
5890
                          if (flag_var_tracking_uninit)
5891
                            {
5892
                              status = find_src_status (in, set_src);
5893
 
5894
                              if (status == VAR_INIT_STATUS_UNKNOWN)
5895
                                status = find_src_status (out, set_src);
5896
                            }
5897
 
5898
                          set_src = find_src_set_src (in, set_src);
5899
                        }
5900
 
5901
                      if (REG_P (uloc))
5902
                        var_reg_delete_and_set (out, uloc, !copied_p,
5903
                                                status, set_src);
5904
                      else if (MEM_P (uloc))
5905
                        var_mem_delete_and_set (out, uloc, !copied_p,
5906
                                                status, set_src);
5907
                    }
5908
                }
5909
              else if (REG_P (uloc))
5910
                var_regno_delete (out, REGNO (uloc));
5911
 
5912
              val_store (out, val, vloc, insn, true);
5913
 
5914
              if (reverse)
5915
                val_store (out, XEXP (reverse, 0), XEXP (reverse, 1),
5916
                           insn, false);
5917
            }
5918
            break;
5919
 
5920
          case MO_SET:
5921
            {
5922
              rtx loc = mo->u.loc;
5923
              rtx set_src = NULL;
5924
 
5925
              if (GET_CODE (loc) == SET)
5926
                {
5927
                  set_src = SET_SRC (loc);
5928
                  loc = SET_DEST (loc);
5929
                }
5930
 
5931
              if (REG_P (loc))
5932
                var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5933
                                        set_src);
5934
              else if (MEM_P (loc))
5935
                var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
5936
                                        set_src);
5937
            }
5938
            break;
5939
 
5940
          case MO_COPY:
5941
            {
5942
              rtx loc = mo->u.loc;
5943
              enum var_init_status src_status;
5944
              rtx set_src = NULL;
5945
 
5946
              if (GET_CODE (loc) == SET)
5947
                {
5948
                  set_src = SET_SRC (loc);
5949
                  loc = SET_DEST (loc);
5950
                }
5951
 
5952
              if (! flag_var_tracking_uninit)
5953
                src_status = VAR_INIT_STATUS_INITIALIZED;
5954
              else
5955
                {
5956
                  src_status = find_src_status (in, set_src);
5957
 
5958
                  if (src_status == VAR_INIT_STATUS_UNKNOWN)
5959
                    src_status = find_src_status (out, set_src);
5960
                }
5961
 
5962
              set_src = find_src_set_src (in, set_src);
5963
 
5964
              if (REG_P (loc))
5965
                var_reg_delete_and_set (out, loc, false, src_status, set_src);
5966
              else if (MEM_P (loc))
5967
                var_mem_delete_and_set (out, loc, false, src_status, set_src);
5968
            }
5969
            break;
5970
 
5971
          case MO_USE_NO_VAR:
5972
            {
5973
              rtx loc = mo->u.loc;
5974
 
5975
              if (REG_P (loc))
5976
                var_reg_delete (out, loc, false);
5977
              else if (MEM_P (loc))
5978
                var_mem_delete (out, loc, false);
5979
            }
5980
            break;
5981
 
5982
          case MO_CLOBBER:
5983
            {
5984
              rtx loc = mo->u.loc;
5985
 
5986
              if (REG_P (loc))
5987
                var_reg_delete (out, loc, true);
5988
              else if (MEM_P (loc))
5989
                var_mem_delete (out, loc, true);
5990
            }
5991
            break;
5992
 
5993
          case MO_ADJUST:
5994
            out->stack_adjust += mo->u.adjust;
5995
            break;
5996
        }
5997
    }
5998
 
5999
  if (MAY_HAVE_DEBUG_INSNS)
6000
    {
6001
      dataflow_set_equiv_regs (out);
6002
      htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark,
6003
                     out);
6004
      htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star,
6005
                     out);
6006
#if ENABLE_CHECKING
6007
      htab_traverse (shared_hash_htab (out->vars),
6008
                     canonicalize_loc_order_check, out);
6009
#endif
6010
    }
6011
  changed = dataflow_set_different (&old_out, out);
6012
  dataflow_set_destroy (&old_out);
6013
  return changed;
6014
}
6015
 
6016
/* Find the locations of variables in the whole function.  */
6017
 
6018
static bool
6019
vt_find_locations (void)
6020
{
6021
  fibheap_t worklist, pending, fibheap_swap;
6022
  sbitmap visited, in_worklist, in_pending, sbitmap_swap;
6023
  basic_block bb;
6024
  edge e;
6025
  int *bb_order;
6026
  int *rc_order;
6027
  int i;
6028
  int htabsz = 0;
6029
  int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
6030
  bool success = true;
6031
 
6032
  /* Compute reverse completion order of depth first search of the CFG
6033
     so that the data-flow runs faster.  */
6034
  rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
6035
  bb_order = XNEWVEC (int, last_basic_block);
6036
  pre_and_rev_post_order_compute (NULL, rc_order, false);
6037
  for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
6038
    bb_order[rc_order[i]] = i;
6039
  free (rc_order);
6040
 
6041
  worklist = fibheap_new ();
6042
  pending = fibheap_new ();
6043
  visited = sbitmap_alloc (last_basic_block);
6044
  in_worklist = sbitmap_alloc (last_basic_block);
6045
  in_pending = sbitmap_alloc (last_basic_block);
6046
  sbitmap_zero (in_worklist);
6047
 
6048
  FOR_EACH_BB (bb)
6049
    fibheap_insert (pending, bb_order[bb->index], bb);
6050
  sbitmap_ones (in_pending);
6051
 
6052
  while (success && !fibheap_empty (pending))
6053
    {
6054
      fibheap_swap = pending;
6055
      pending = worklist;
6056
      worklist = fibheap_swap;
6057
      sbitmap_swap = in_pending;
6058
      in_pending = in_worklist;
6059
      in_worklist = sbitmap_swap;
6060
 
6061
      sbitmap_zero (visited);
6062
 
6063
      while (!fibheap_empty (worklist))
6064
        {
6065
          bb = (basic_block) fibheap_extract_min (worklist);
6066
          RESET_BIT (in_worklist, bb->index);
6067
          if (!TEST_BIT (visited, bb->index))
6068
            {
6069
              bool changed;
6070
              edge_iterator ei;
6071
              int oldinsz, oldoutsz;
6072
 
6073
              SET_BIT (visited, bb->index);
6074
 
6075
              if (VTI (bb)->in.vars)
6076
                {
6077
                  htabsz
6078
                    -= (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6079
                        + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6080
                  oldinsz
6081
                    = htab_elements (shared_hash_htab (VTI (bb)->in.vars));
6082
                  oldoutsz
6083
                    = htab_elements (shared_hash_htab (VTI (bb)->out.vars));
6084
                }
6085
              else
6086
                oldinsz = oldoutsz = 0;
6087
 
6088
              if (MAY_HAVE_DEBUG_INSNS)
6089
                {
6090
                  dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
6091
                  bool first = true, adjust = false;
6092
 
6093
                  /* Calculate the IN set as the intersection of
6094
                     predecessor OUT sets.  */
6095
 
6096
                  dataflow_set_clear (in);
6097
                  dst_can_be_shared = true;
6098
 
6099
                  FOR_EACH_EDGE (e, ei, bb->preds)
6100
                    if (!VTI (e->src)->flooded)
6101
                      gcc_assert (bb_order[bb->index]
6102
                                  <= bb_order[e->src->index]);
6103
                    else if (first)
6104
                      {
6105
                        dataflow_set_copy (in, &VTI (e->src)->out);
6106
                        first_out = &VTI (e->src)->out;
6107
                        first = false;
6108
                      }
6109
                    else
6110
                      {
6111
                        dataflow_set_merge (in, &VTI (e->src)->out);
6112
                        adjust = true;
6113
                      }
6114
 
6115
                  if (adjust)
6116
                    {
6117
                      dataflow_post_merge_adjust (in, &VTI (bb)->permp);
6118
#if ENABLE_CHECKING
6119
                      /* Merge and merge_adjust should keep entries in
6120
                         canonical order.  */
6121
                      htab_traverse (shared_hash_htab (in->vars),
6122
                                     canonicalize_loc_order_check,
6123
                                     in);
6124
#endif
6125
                      if (dst_can_be_shared)
6126
                        {
6127
                          shared_hash_destroy (in->vars);
6128
                          in->vars = shared_hash_copy (first_out->vars);
6129
                        }
6130
                    }
6131
 
6132
                  VTI (bb)->flooded = true;
6133
                }
6134
              else
6135
                {
6136
                  /* Calculate the IN set as union of predecessor OUT sets.  */
6137
                  dataflow_set_clear (&VTI (bb)->in);
6138
                  FOR_EACH_EDGE (e, ei, bb->preds)
6139
                    dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
6140
                }
6141
 
6142
              changed = compute_bb_dataflow (bb);
6143
              htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars))
6144
                         + htab_size (shared_hash_htab (VTI (bb)->out.vars)));
6145
 
6146
              if (htabmax && htabsz > htabmax)
6147
                {
6148
                  if (MAY_HAVE_DEBUG_INSNS)
6149
                    inform (DECL_SOURCE_LOCATION (cfun->decl),
6150
                            "variable tracking size limit exceeded with "
6151
                            "-fvar-tracking-assignments, retrying without");
6152
                  else
6153
                    inform (DECL_SOURCE_LOCATION (cfun->decl),
6154
                            "variable tracking size limit exceeded");
6155
                  success = false;
6156
                  break;
6157
                }
6158
 
6159
              if (changed)
6160
                {
6161
                  FOR_EACH_EDGE (e, ei, bb->succs)
6162
                    {
6163
                      if (e->dest == EXIT_BLOCK_PTR)
6164
                        continue;
6165
 
6166
                      if (TEST_BIT (visited, e->dest->index))
6167
                        {
6168
                          if (!TEST_BIT (in_pending, e->dest->index))
6169
                            {
6170
                              /* Send E->DEST to next round.  */
6171
                              SET_BIT (in_pending, e->dest->index);
6172
                              fibheap_insert (pending,
6173
                                              bb_order[e->dest->index],
6174
                                              e->dest);
6175
                            }
6176
                        }
6177
                      else if (!TEST_BIT (in_worklist, e->dest->index))
6178
                        {
6179
                          /* Add E->DEST to current round.  */
6180
                          SET_BIT (in_worklist, e->dest->index);
6181
                          fibheap_insert (worklist, bb_order[e->dest->index],
6182
                                          e->dest);
6183
                        }
6184
                    }
6185
                }
6186
 
6187
              if (dump_file)
6188
                fprintf (dump_file,
6189
                         "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
6190
                         bb->index,
6191
                         (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)),
6192
                         oldinsz,
6193
                         (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)),
6194
                         oldoutsz,
6195
                         (int)worklist->nodes, (int)pending->nodes, htabsz);
6196
 
6197
              if (dump_file && (dump_flags & TDF_DETAILS))
6198
                {
6199
                  fprintf (dump_file, "BB %i IN:\n", bb->index);
6200
                  dump_dataflow_set (&VTI (bb)->in);
6201
                  fprintf (dump_file, "BB %i OUT:\n", bb->index);
6202
                  dump_dataflow_set (&VTI (bb)->out);
6203
                }
6204
            }
6205
        }
6206
    }
6207
 
6208
  if (success && MAY_HAVE_DEBUG_INSNS)
6209
    FOR_EACH_BB (bb)
6210
      gcc_assert (VTI (bb)->flooded);
6211
 
6212
  free (bb_order);
6213
  fibheap_delete (worklist);
6214
  fibheap_delete (pending);
6215
  sbitmap_free (visited);
6216
  sbitmap_free (in_worklist);
6217
  sbitmap_free (in_pending);
6218
 
6219
  return success;
6220
}
6221
 
6222
/* Print the content of the LIST to dump file.  */
6223
 
6224
static void
6225
dump_attrs_list (attrs list)
6226
{
6227
  for (; list; list = list->next)
6228
    {
6229
      if (dv_is_decl_p (list->dv))
6230
        print_mem_expr (dump_file, dv_as_decl (list->dv));
6231
      else
6232
        print_rtl_single (dump_file, dv_as_value (list->dv));
6233
      fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
6234
    }
6235
  fprintf (dump_file, "\n");
6236
}
6237
 
6238
/* Print the information about variable *SLOT to dump file.  */
6239
 
6240
static int
6241
dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED)
6242
{
6243
  variable var = (variable) *slot;
6244
 
6245
  dump_var (var);
6246
 
6247
  /* Continue traversing the hash table.  */
6248
  return 1;
6249
}
6250
 
6251
/* Print the information about variable VAR to dump file.  */
6252
 
6253
static void
6254
dump_var (variable var)
6255
{
6256
  int i;
6257
  location_chain node;
6258
 
6259
  if (dv_is_decl_p (var->dv))
6260
    {
6261
      const_tree decl = dv_as_decl (var->dv);
6262
 
6263
      if (DECL_NAME (decl))
6264
        {
6265
          fprintf (dump_file, "  name: %s",
6266
                   IDENTIFIER_POINTER (DECL_NAME (decl)));
6267
          if (dump_flags & TDF_UID)
6268
            fprintf (dump_file, "D.%u", DECL_UID (decl));
6269
        }
6270
      else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
6271
        fprintf (dump_file, "  name: D#%u", DEBUG_TEMP_UID (decl));
6272
      else
6273
        fprintf (dump_file, "  name: D.%u", DECL_UID (decl));
6274
      fprintf (dump_file, "\n");
6275
    }
6276
  else
6277
    {
6278
      fputc (' ', dump_file);
6279
      print_rtl_single (dump_file, dv_as_value (var->dv));
6280
    }
6281
 
6282
  for (i = 0; i < var->n_var_parts; i++)
6283
    {
6284
      fprintf (dump_file, "    offset %ld\n",
6285
               (long) var->var_part[i].offset);
6286
      for (node = var->var_part[i].loc_chain; node; node = node->next)
6287
        {
6288
          fprintf (dump_file, "      ");
6289
          if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
6290
            fprintf (dump_file, "[uninit]");
6291
          print_rtl_single (dump_file, node->loc);
6292
        }
6293
    }
6294
}
6295
 
6296
/* Print the information about variables from hash table VARS to dump file.  */
6297
 
6298
static void
6299
dump_vars (htab_t vars)
6300
{
6301
  if (htab_elements (vars) > 0)
6302
    {
6303
      fprintf (dump_file, "Variables:\n");
6304
      htab_traverse (vars, dump_var_slot, NULL);
6305
    }
6306
}
6307
 
6308
/* Print the dataflow set SET to dump file.  */
6309
 
6310
static void
6311
dump_dataflow_set (dataflow_set *set)
6312
{
6313
  int i;
6314
 
6315
  fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
6316
           set->stack_adjust);
6317
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6318
    {
6319
      if (set->regs[i])
6320
        {
6321
          fprintf (dump_file, "Reg %d:", i);
6322
          dump_attrs_list (set->regs[i]);
6323
        }
6324
    }
6325
  dump_vars (shared_hash_htab (set->vars));
6326
  fprintf (dump_file, "\n");
6327
}
6328
 
6329
/* Print the IN and OUT sets for each basic block to dump file.  */
6330
 
6331
static void
6332
dump_dataflow_sets (void)
6333
{
6334
  basic_block bb;
6335
 
6336
  FOR_EACH_BB (bb)
6337
    {
6338
      fprintf (dump_file, "\nBasic block %d:\n", bb->index);
6339
      fprintf (dump_file, "IN:\n");
6340
      dump_dataflow_set (&VTI (bb)->in);
6341
      fprintf (dump_file, "OUT:\n");
6342
      dump_dataflow_set (&VTI (bb)->out);
6343
    }
6344
}
6345
 
6346
/* Add variable VAR to the hash table of changed variables and
6347
   if it has no locations delete it from SET's hash table.  */
6348
 
6349
static void
6350
variable_was_changed (variable var, dataflow_set *set)
6351
{
6352
  hashval_t hash = dv_htab_hash (var->dv);
6353
 
6354
  if (emit_notes)
6355
    {
6356
      void **slot;
6357
      bool old_cur_loc_changed = false;
6358
 
6359
      /* Remember this decl or VALUE has been added to changed_variables.  */
6360
      set_dv_changed (var->dv, true);
6361
 
6362
      slot = htab_find_slot_with_hash (changed_variables,
6363
                                       var->dv,
6364
                                       hash, INSERT);
6365
 
6366
      if (*slot)
6367
        {
6368
          variable old_var = (variable) *slot;
6369
          gcc_assert (old_var->in_changed_variables);
6370
          old_var->in_changed_variables = false;
6371
          old_cur_loc_changed = old_var->cur_loc_changed;
6372
          variable_htab_free (*slot);
6373
        }
6374
      if (set && var->n_var_parts == 0)
6375
        {
6376
          variable empty_var;
6377
 
6378
          empty_var = (variable) pool_alloc (dv_pool (var->dv));
6379
          empty_var->dv = var->dv;
6380
          empty_var->refcount = 1;
6381
          empty_var->n_var_parts = 0;
6382
          empty_var->cur_loc_changed = true;
6383
          empty_var->in_changed_variables = true;
6384
          *slot = empty_var;
6385
          goto drop_var;
6386
        }
6387
      else
6388
        {
6389
          var->refcount++;
6390
          var->in_changed_variables = true;
6391
          /* If within processing one uop a variable is deleted
6392
             and then readded, we need to assume it has changed.  */
6393
          if (old_cur_loc_changed)
6394
            var->cur_loc_changed = true;
6395
          *slot = var;
6396
        }
6397
    }
6398
  else
6399
    {
6400
      gcc_assert (set);
6401
      if (var->n_var_parts == 0)
6402
        {
6403
          void **slot;
6404
 
6405
        drop_var:
6406
          slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
6407
          if (slot)
6408
            {
6409
              if (shared_hash_shared (set->vars))
6410
                slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
6411
                                                      NO_INSERT);
6412
              htab_clear_slot (shared_hash_htab (set->vars), slot);
6413
            }
6414
        }
6415
    }
6416
}
6417
 
6418
/* Look for the index in VAR->var_part corresponding to OFFSET.
6419
   Return -1 if not found.  If INSERTION_POINT is non-NULL, the
6420
   referenced int will be set to the index that the part has or should
6421
   have, if it should be inserted.  */
6422
 
6423
static inline int
6424
find_variable_location_part (variable var, HOST_WIDE_INT offset,
6425
                             int *insertion_point)
6426
{
6427
  int pos, low, high;
6428
 
6429
  /* Find the location part.  */
6430
  low = 0;
6431
  high = var->n_var_parts;
6432
  while (low != high)
6433
    {
6434
      pos = (low + high) / 2;
6435
      if (var->var_part[pos].offset < offset)
6436
        low = pos + 1;
6437
      else
6438
        high = pos;
6439
    }
6440
  pos = low;
6441
 
6442
  if (insertion_point)
6443
    *insertion_point = pos;
6444
 
6445
  if (pos < var->n_var_parts && var->var_part[pos].offset == offset)
6446
    return pos;
6447
 
6448
  return -1;
6449
}
6450
 
6451
static void **
6452
set_slot_part (dataflow_set *set, rtx loc, void **slot,
6453
               decl_or_value dv, HOST_WIDE_INT offset,
6454
               enum var_init_status initialized, rtx set_src)
6455
{
6456
  int pos;
6457
  location_chain node, next;
6458
  location_chain *nextp;
6459
  variable var;
6460
  bool onepart = dv_onepart_p (dv);
6461
 
6462
  gcc_assert (offset == 0 || !onepart);
6463
  gcc_assert (loc != dv_as_opaque (dv));
6464
 
6465
  var = (variable) *slot;
6466
 
6467
  if (! flag_var_tracking_uninit)
6468
    initialized = VAR_INIT_STATUS_INITIALIZED;
6469
 
6470
  if (!var)
6471
    {
6472
      /* Create new variable information.  */
6473
      var = (variable) pool_alloc (dv_pool (dv));
6474
      var->dv = dv;
6475
      var->refcount = 1;
6476
      var->n_var_parts = 1;
6477
      var->cur_loc_changed = false;
6478
      var->in_changed_variables = false;
6479
      var->var_part[0].offset = offset;
6480
      var->var_part[0].loc_chain = NULL;
6481
      var->var_part[0].cur_loc = NULL;
6482
      *slot = var;
6483
      pos = 0;
6484
      nextp = &var->var_part[0].loc_chain;
6485
    }
6486
  else if (onepart)
6487
    {
6488
      int r = -1, c = 0;
6489
 
6490
      gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
6491
 
6492
      pos = 0;
6493
 
6494
      if (GET_CODE (loc) == VALUE)
6495
        {
6496
          for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6497
               nextp = &node->next)
6498
            if (GET_CODE (node->loc) == VALUE)
6499
              {
6500
                if (node->loc == loc)
6501
                  {
6502
                    r = 0;
6503
                    break;
6504
                  }
6505
                if (canon_value_cmp (node->loc, loc))
6506
                  c++;
6507
                else
6508
                  {
6509
                    r = 1;
6510
                    break;
6511
                  }
6512
              }
6513
            else if (REG_P (node->loc) || MEM_P (node->loc))
6514
              c++;
6515
            else
6516
              {
6517
                r = 1;
6518
                break;
6519
              }
6520
        }
6521
      else if (REG_P (loc))
6522
        {
6523
          for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6524
               nextp = &node->next)
6525
            if (REG_P (node->loc))
6526
              {
6527
                if (REGNO (node->loc) < REGNO (loc))
6528
                  c++;
6529
                else
6530
                  {
6531
                    if (REGNO (node->loc) == REGNO (loc))
6532
                      r = 0;
6533
                    else
6534
                      r = 1;
6535
                    break;
6536
                  }
6537
              }
6538
            else
6539
              {
6540
                r = 1;
6541
                break;
6542
              }
6543
        }
6544
      else if (MEM_P (loc))
6545
        {
6546
          for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6547
               nextp = &node->next)
6548
            if (REG_P (node->loc))
6549
              c++;
6550
            else if (MEM_P (node->loc))
6551
              {
6552
                if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
6553
                  break;
6554
                else
6555
                  c++;
6556
              }
6557
            else
6558
              {
6559
                r = 1;
6560
                break;
6561
              }
6562
        }
6563
      else
6564
        for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
6565
             nextp = &node->next)
6566
          if ((r = loc_cmp (node->loc, loc)) >= 0)
6567
            break;
6568
          else
6569
            c++;
6570
 
6571
      if (r == 0)
6572
        return slot;
6573
 
6574
      if (shared_var_p (var, set->vars))
6575
        {
6576
          slot = unshare_variable (set, slot, var, initialized);
6577
          var = (variable)*slot;
6578
          for (nextp = &var->var_part[0].loc_chain; c;
6579
               nextp = &(*nextp)->next)
6580
            c--;
6581
          gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
6582
        }
6583
    }
6584
  else
6585
    {
6586
      int inspos = 0;
6587
 
6588
      gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
6589
 
6590
      pos = find_variable_location_part (var, offset, &inspos);
6591
 
6592
      if (pos >= 0)
6593
        {
6594
          node = var->var_part[pos].loc_chain;
6595
 
6596
          if (node
6597
              && ((REG_P (node->loc) && REG_P (loc)
6598
                   && REGNO (node->loc) == REGNO (loc))
6599
                  || rtx_equal_p (node->loc, loc)))
6600
            {
6601
              /* LOC is in the beginning of the chain so we have nothing
6602
                 to do.  */
6603
              if (node->init < initialized)
6604
                node->init = initialized;
6605
              if (set_src != NULL)
6606
                node->set_src = set_src;
6607
 
6608
              return slot;
6609
            }
6610
          else
6611
            {
6612
              /* We have to make a copy of a shared variable.  */
6613
              if (shared_var_p (var, set->vars))
6614
                {
6615
                  slot = unshare_variable (set, slot, var, initialized);
6616
                  var = (variable)*slot;
6617
                }
6618
            }
6619
        }
6620
      else
6621
        {
6622
          /* We have not found the location part, new one will be created.  */
6623
 
6624
          /* We have to make a copy of the shared variable.  */
6625
          if (shared_var_p (var, set->vars))
6626
            {
6627
              slot = unshare_variable (set, slot, var, initialized);
6628
              var = (variable)*slot;
6629
            }
6630
 
6631
          /* We track only variables whose size is <= MAX_VAR_PARTS bytes
6632
             thus there are at most MAX_VAR_PARTS different offsets.  */
6633
          gcc_assert (var->n_var_parts < MAX_VAR_PARTS
6634
                      && (!var->n_var_parts || !dv_onepart_p (var->dv)));
6635
 
6636
          /* We have to move the elements of array starting at index
6637
             inspos to the next position.  */
6638
          for (pos = var->n_var_parts; pos > inspos; pos--)
6639
            var->var_part[pos] = var->var_part[pos - 1];
6640
 
6641
          var->n_var_parts++;
6642
          var->var_part[pos].offset = offset;
6643
          var->var_part[pos].loc_chain = NULL;
6644
          var->var_part[pos].cur_loc = NULL;
6645
        }
6646
 
6647
      /* Delete the location from the list.  */
6648
      nextp = &var->var_part[pos].loc_chain;
6649
      for (node = var->var_part[pos].loc_chain; node; node = next)
6650
        {
6651
          next = node->next;
6652
          if ((REG_P (node->loc) && REG_P (loc)
6653
               && REGNO (node->loc) == REGNO (loc))
6654
              || rtx_equal_p (node->loc, loc))
6655
            {
6656
              /* Save these values, to assign to the new node, before
6657
                 deleting this one.  */
6658
              if (node->init > initialized)
6659
                initialized = node->init;
6660
              if (node->set_src != NULL && set_src == NULL)
6661
                set_src = node->set_src;
6662
              if (var->var_part[pos].cur_loc == node->loc)
6663
                {
6664
                  var->var_part[pos].cur_loc = NULL;
6665
                  var->cur_loc_changed = true;
6666
                }
6667
              pool_free (loc_chain_pool, node);
6668
              *nextp = next;
6669
              break;
6670
            }
6671
          else
6672
            nextp = &node->next;
6673
        }
6674
 
6675
      nextp = &var->var_part[pos].loc_chain;
6676
    }
6677
 
6678
  /* Add the location to the beginning.  */
6679
  node = (location_chain) pool_alloc (loc_chain_pool);
6680
  node->loc = loc;
6681
  node->init = initialized;
6682
  node->set_src = set_src;
6683
  node->next = *nextp;
6684
  *nextp = node;
6685
 
6686
  if (onepart && emit_notes)
6687
    add_value_chains (var->dv, loc);
6688
 
6689
  /* If no location was emitted do so.  */
6690
  if (var->var_part[pos].cur_loc == NULL)
6691
    variable_was_changed (var, set);
6692
 
6693
  return slot;
6694
}
6695
 
6696
/* Set the part of variable's location in the dataflow set SET.  The
6697
   variable part is specified by variable's declaration in DV and
6698
   offset OFFSET and the part's location by LOC.  IOPT should be
6699
   NO_INSERT if the variable is known to be in SET already and the
6700
   variable hash table must not be resized, and INSERT otherwise.  */
6701
 
6702
static void
6703
set_variable_part (dataflow_set *set, rtx loc,
6704
                   decl_or_value dv, HOST_WIDE_INT offset,
6705
                   enum var_init_status initialized, rtx set_src,
6706
                   enum insert_option iopt)
6707
{
6708
  void **slot;
6709
 
6710
  if (iopt == NO_INSERT)
6711
    slot = shared_hash_find_slot_noinsert (set->vars, dv);
6712
  else
6713
    {
6714
      slot = shared_hash_find_slot (set->vars, dv);
6715
      if (!slot)
6716
        slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
6717
    }
6718
  slot = set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
6719
}
6720
 
6721
/* Remove all recorded register locations for the given variable part
6722
   from dataflow set SET, except for those that are identical to loc.
6723
   The variable part is specified by variable's declaration or value
6724
   DV and offset OFFSET.  */
6725
 
6726
static void **
6727
clobber_slot_part (dataflow_set *set, rtx loc, void **slot,
6728
                   HOST_WIDE_INT offset, rtx set_src)
6729
{
6730
  variable var = (variable) *slot;
6731
  int pos = find_variable_location_part (var, offset, NULL);
6732
 
6733
  if (pos >= 0)
6734
    {
6735
      location_chain node, next;
6736
 
6737
      /* Remove the register locations from the dataflow set.  */
6738
      next = var->var_part[pos].loc_chain;
6739
      for (node = next; node; node = next)
6740
        {
6741
          next = node->next;
6742
          if (node->loc != loc
6743
              && (!flag_var_tracking_uninit
6744
                  || !set_src
6745
                  || MEM_P (set_src)
6746
                  || !rtx_equal_p (set_src, node->set_src)))
6747
            {
6748
              if (REG_P (node->loc))
6749
                {
6750
                  attrs anode, anext;
6751
                  attrs *anextp;
6752
 
6753
                  /* Remove the variable part from the register's
6754
                     list, but preserve any other variable parts
6755
                     that might be regarded as live in that same
6756
                     register.  */
6757
                  anextp = &set->regs[REGNO (node->loc)];
6758
                  for (anode = *anextp; anode; anode = anext)
6759
                    {
6760
                      anext = anode->next;
6761
                      if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
6762
                          && anode->offset == offset)
6763
                        {
6764
                          pool_free (attrs_pool, anode);
6765
                          *anextp = anext;
6766
                        }
6767
                      else
6768
                        anextp = &anode->next;
6769
                    }
6770
                }
6771
 
6772
              slot = delete_slot_part (set, node->loc, slot, offset);
6773
            }
6774
        }
6775
    }
6776
 
6777
  return slot;
6778
}
6779
 
6780
/* Remove all recorded register locations for the given variable part
6781
   from dataflow set SET, except for those that are identical to loc.
6782
   The variable part is specified by variable's declaration or value
6783
   DV and offset OFFSET.  */
6784
 
6785
static void
6786
clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6787
                       HOST_WIDE_INT offset, rtx set_src)
6788
{
6789
  void **slot;
6790
 
6791
  if (!dv_as_opaque (dv)
6792
      || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
6793
    return;
6794
 
6795
  slot = shared_hash_find_slot_noinsert (set->vars, dv);
6796
  if (!slot)
6797
    return;
6798
 
6799
  slot = clobber_slot_part (set, loc, slot, offset, set_src);
6800
}
6801
 
6802
/* Delete the part of variable's location from dataflow set SET.  The
6803
   variable part is specified by its SET->vars slot SLOT and offset
6804
   OFFSET and the part's location by LOC.  */
6805
 
6806
static void **
6807
delete_slot_part (dataflow_set *set, rtx loc, void **slot,
6808
                  HOST_WIDE_INT offset)
6809
{
6810
  variable var = (variable) *slot;
6811
  int pos = find_variable_location_part (var, offset, NULL);
6812
 
6813
  if (pos >= 0)
6814
    {
6815
      location_chain node, next;
6816
      location_chain *nextp;
6817
      bool changed;
6818
 
6819
      if (shared_var_p (var, set->vars))
6820
        {
6821
          /* If the variable contains the location part we have to
6822
             make a copy of the variable.  */
6823
          for (node = var->var_part[pos].loc_chain; node;
6824
               node = node->next)
6825
            {
6826
              if ((REG_P (node->loc) && REG_P (loc)
6827
                   && REGNO (node->loc) == REGNO (loc))
6828
                  || rtx_equal_p (node->loc, loc))
6829
                {
6830
                  slot = unshare_variable (set, slot, var,
6831
                                           VAR_INIT_STATUS_UNKNOWN);
6832
                  var = (variable)*slot;
6833
                  break;
6834
                }
6835
            }
6836
        }
6837
 
6838
      /* Delete the location part.  */
6839
      changed = false;
6840
      nextp = &var->var_part[pos].loc_chain;
6841
      for (node = *nextp; node; node = next)
6842
        {
6843
          next = node->next;
6844
          if ((REG_P (node->loc) && REG_P (loc)
6845
               && REGNO (node->loc) == REGNO (loc))
6846
              || rtx_equal_p (node->loc, loc))
6847
            {
6848
              if (emit_notes && pos == 0 && dv_onepart_p (var->dv))
6849
                remove_value_chains (var->dv, node->loc);
6850
              /* If we have deleted the location which was last emitted
6851
                 we have to emit new location so add the variable to set
6852
                 of changed variables.  */
6853
              if (var->var_part[pos].cur_loc == node->loc)
6854
                {
6855
                  changed = true;
6856
                  var->var_part[pos].cur_loc = NULL;
6857
                  var->cur_loc_changed = true;
6858
                }
6859
              pool_free (loc_chain_pool, node);
6860
              *nextp = next;
6861
              break;
6862
            }
6863
          else
6864
            nextp = &node->next;
6865
        }
6866
 
6867
      if (var->var_part[pos].loc_chain == NULL)
6868
        {
6869
          changed = true;
6870
          var->n_var_parts--;
6871
          if (emit_notes)
6872
            var->cur_loc_changed = true;
6873
          while (pos < var->n_var_parts)
6874
            {
6875
              var->var_part[pos] = var->var_part[pos + 1];
6876
              pos++;
6877
            }
6878
        }
6879
      if (changed)
6880
        variable_was_changed (var, set);
6881
    }
6882
 
6883
  return slot;
6884
}
6885
 
6886
/* Delete the part of variable's location from dataflow set SET.  The
6887
   variable part is specified by variable's declaration or value DV
6888
   and offset OFFSET and the part's location by LOC.  */
6889
 
6890
static void
6891
delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
6892
                      HOST_WIDE_INT offset)
6893
{
6894
  void **slot = shared_hash_find_slot_noinsert (set->vars, dv);
6895
  if (!slot)
6896
    return;
6897
 
6898
  slot = delete_slot_part (set, loc, slot, offset);
6899
}
6900
 
6901
/* Structure for passing some other parameters to function
6902
   vt_expand_loc_callback.  */
6903
struct expand_loc_callback_data
6904
{
6905
  /* The variables and values active at this point.  */
6906
  htab_t vars;
6907
 
6908
  /* True in vt_expand_loc_dummy calls, no rtl should be allocated.
6909
     Non-NULL should be returned if vt_expand_loc would return
6910
     non-NULL in that case, NULL otherwise.  cur_loc_changed should be
6911
     computed and cur_loc recomputed when possible (but just once
6912
     per emit_notes_for_changes call).  */
6913
  bool dummy;
6914
 
6915
  /* True if expansion of subexpressions had to recompute some
6916
     VALUE/DEBUG_EXPR_DECL's cur_loc or used a VALUE/DEBUG_EXPR_DECL
6917
     whose cur_loc has been already recomputed during current
6918
     emit_notes_for_changes call.  */
6919
  bool cur_loc_changed;
6920
};
6921
 
6922
/* Callback for cselib_expand_value, that looks for expressions
6923
   holding the value in the var-tracking hash tables.  Return X for
6924
   standard processing, anything else is to be used as-is.  */
6925
 
6926
static rtx
6927
vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data)
6928
{
6929
  struct expand_loc_callback_data *elcd
6930
    = (struct expand_loc_callback_data *) data;
6931
  bool dummy = elcd->dummy;
6932
  bool cur_loc_changed = elcd->cur_loc_changed;
6933
  decl_or_value dv;
6934
  variable var;
6935
  location_chain loc;
6936
  rtx result, subreg, xret;
6937
 
6938
  switch (GET_CODE (x))
6939
    {
6940
    case SUBREG:
6941
      if (dummy)
6942
        {
6943
          if (cselib_dummy_expand_value_rtx_cb (SUBREG_REG (x), regs,
6944
                                                max_depth - 1,
6945
                                                vt_expand_loc_callback, data))
6946
            return pc_rtx;
6947
          else
6948
            return NULL;
6949
        }
6950
 
6951
      subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
6952
                                           max_depth - 1,
6953
                                           vt_expand_loc_callback, data);
6954
 
6955
      if (!subreg)
6956
        return NULL;
6957
 
6958
      result = simplify_gen_subreg (GET_MODE (x), subreg,
6959
                                    GET_MODE (SUBREG_REG (x)),
6960
                                    SUBREG_BYTE (x));
6961
 
6962
      /* Invalid SUBREGs are ok in debug info.  ??? We could try
6963
         alternate expansions for the VALUE as well.  */
6964
      if (!result)
6965
        result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
6966
 
6967
      return result;
6968
 
6969
    case DEBUG_EXPR:
6970
      dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
6971
      xret = NULL;
6972
      break;
6973
 
6974
    case VALUE:
6975
      dv = dv_from_value (x);
6976
      xret = x;
6977
      break;
6978
 
6979
    default:
6980
      return x;
6981
    }
6982
 
6983
  if (VALUE_RECURSED_INTO (x))
6984
    return NULL;
6985
 
6986
  var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv));
6987
 
6988
  if (!var)
6989
    {
6990
      if (dummy && dv_changed_p (dv))
6991
        elcd->cur_loc_changed = true;
6992
      return xret;
6993
    }
6994
 
6995
  if (var->n_var_parts == 0)
6996
    {
6997
      if (dummy)
6998
        elcd->cur_loc_changed = true;
6999
      return xret;
7000
    }
7001
 
7002
  gcc_assert (var->n_var_parts == 1);
7003
 
7004
  VALUE_RECURSED_INTO (x) = true;
7005
  result = NULL;
7006
 
7007
  if (var->var_part[0].cur_loc)
7008
    {
7009
      if (dummy)
7010
        {
7011
          if (cselib_dummy_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
7012
                                                max_depth,
7013
                                                vt_expand_loc_callback, data))
7014
            result = pc_rtx;
7015
        }
7016
      else
7017
        result = cselib_expand_value_rtx_cb (var->var_part[0].cur_loc, regs,
7018
                                             max_depth,
7019
                                             vt_expand_loc_callback, data);
7020
      if (result)
7021
        set_dv_changed (dv, false);
7022
    }
7023
  if (!result && dv_changed_p (dv))
7024
    {
7025
      set_dv_changed (dv, false);
7026
      for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
7027
        if (loc->loc == var->var_part[0].cur_loc)
7028
          continue;
7029
        else if (dummy)
7030
          {
7031
            elcd->cur_loc_changed = cur_loc_changed;
7032
            if (cselib_dummy_expand_value_rtx_cb (loc->loc, regs, max_depth,
7033
                                                  vt_expand_loc_callback,
7034
                                                  data))
7035
              {
7036
                result = pc_rtx;
7037
                break;
7038
              }
7039
          }
7040
        else
7041
          {
7042
            result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth,
7043
                                                 vt_expand_loc_callback, data);
7044
            if (result)
7045
              break;
7046
          }
7047
      if (dummy && (result || var->var_part[0].cur_loc))
7048
        var->cur_loc_changed = true;
7049
      var->var_part[0].cur_loc = loc ? loc->loc : NULL_RTX;
7050
    }
7051
  if (dummy)
7052
    {
7053
      if (var->cur_loc_changed)
7054
        elcd->cur_loc_changed = true;
7055
      else if (!result && var->var_part[0].cur_loc == NULL_RTX)
7056
        elcd->cur_loc_changed = cur_loc_changed;
7057
    }
7058
 
7059
  VALUE_RECURSED_INTO (x) = false;
7060
  if (result)
7061
    return result;
7062
  else
7063
    return xret;
7064
}
7065
 
7066
/* Expand VALUEs in LOC, using VARS as well as cselib's equivalence
7067
   tables.  */
7068
 
7069
static rtx
7070
vt_expand_loc (rtx loc, htab_t vars)
7071
{
7072
  struct expand_loc_callback_data data;
7073
 
7074
  if (!MAY_HAVE_DEBUG_INSNS)
7075
    return loc;
7076
 
7077
  data.vars = vars;
7078
  data.dummy = false;
7079
  data.cur_loc_changed = false;
7080
  loc = cselib_expand_value_rtx_cb (loc, scratch_regs, 5,
7081
                                    vt_expand_loc_callback, &data);
7082
 
7083
  if (loc && MEM_P (loc))
7084
    loc = targetm.delegitimize_address (loc);
7085
  return loc;
7086
}
7087
 
7088
/* Like vt_expand_loc, but only return true/false (whether vt_expand_loc
7089
   would succeed or not, without actually allocating new rtxes.  */
7090
 
7091
static bool
7092
vt_expand_loc_dummy (rtx loc, htab_t vars, bool *pcur_loc_changed)
7093
{
7094
  struct expand_loc_callback_data data;
7095
  bool ret;
7096
 
7097
  gcc_assert (MAY_HAVE_DEBUG_INSNS);
7098
  data.vars = vars;
7099
  data.dummy = true;
7100
  data.cur_loc_changed = false;
7101
  ret = cselib_dummy_expand_value_rtx_cb (loc, scratch_regs, 5,
7102
                                          vt_expand_loc_callback, &data);
7103
  *pcur_loc_changed = data.cur_loc_changed;
7104
  return ret;
7105
}
7106
 
7107
#ifdef ENABLE_RTL_CHECKING
7108
/* Used to verify that cur_loc_changed updating is safe.  */
7109
static struct pointer_map_t *emitted_notes;
7110
#endif
7111
 
7112
/* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP.  DATA contains
7113
   additional parameters: WHERE specifies whether the note shall be emitted
7114
   before or after instruction INSN.  */
7115
 
7116
static int
7117
emit_note_insn_var_location (void **varp, void *data)
7118
{
7119
  variable var = (variable) *varp;
7120
  rtx insn = ((emit_note_data *)data)->insn;
7121
  enum emit_note_where where = ((emit_note_data *)data)->where;
7122
  htab_t vars = ((emit_note_data *)data)->vars;
7123
  rtx note, note_vl;
7124
  int i, j, n_var_parts;
7125
  bool complete;
7126
  enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
7127
  HOST_WIDE_INT last_limit;
7128
  tree type_size_unit;
7129
  HOST_WIDE_INT offsets[MAX_VAR_PARTS];
7130
  rtx loc[MAX_VAR_PARTS];
7131
  tree decl;
7132
  location_chain lc;
7133
 
7134
  if (dv_is_value_p (var->dv))
7135
    goto value_or_debug_decl;
7136
 
7137
  decl = dv_as_decl (var->dv);
7138
 
7139
  if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7140
    goto value_or_debug_decl;
7141
 
7142
  complete = true;
7143
  last_limit = 0;
7144
  n_var_parts = 0;
7145
  if (!MAY_HAVE_DEBUG_INSNS)
7146
    {
7147
      for (i = 0; i < var->n_var_parts; i++)
7148
        if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
7149
          {
7150
            var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
7151
            var->cur_loc_changed = true;
7152
          }
7153
      if (var->n_var_parts == 0)
7154
        var->cur_loc_changed = true;
7155
    }
7156
#ifndef ENABLE_RTL_CHECKING
7157
  if (!var->cur_loc_changed)
7158
    goto clear;
7159
#endif
7160
  for (i = 0; i < var->n_var_parts; i++)
7161
    {
7162
      enum machine_mode mode, wider_mode;
7163
      rtx loc2;
7164
 
7165
      if (last_limit < var->var_part[i].offset)
7166
        {
7167
          complete = false;
7168
          break;
7169
        }
7170
      else if (last_limit > var->var_part[i].offset)
7171
        continue;
7172
      offsets[n_var_parts] = var->var_part[i].offset;
7173
      if (!var->var_part[i].cur_loc)
7174
        {
7175
          complete = false;
7176
          continue;
7177
        }
7178
      loc2 = vt_expand_loc (var->var_part[i].cur_loc, vars);
7179
      if (!loc2)
7180
        {
7181
          complete = false;
7182
          continue;
7183
        }
7184
      loc[n_var_parts] = loc2;
7185
      mode = GET_MODE (var->var_part[i].cur_loc);
7186
      if (mode == VOIDmode && dv_onepart_p (var->dv))
7187
        mode = DECL_MODE (decl);
7188
      for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
7189
        if (var->var_part[i].cur_loc == lc->loc)
7190
          {
7191
            initialized = lc->init;
7192
            break;
7193
          }
7194
      gcc_assert (lc);
7195
      last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
7196
 
7197
      /* Attempt to merge adjacent registers or memory.  */
7198
      wider_mode = GET_MODE_WIDER_MODE (mode);
7199
      for (j = i + 1; j < var->n_var_parts; j++)
7200
        if (last_limit <= var->var_part[j].offset)
7201
          break;
7202
      if (j < var->n_var_parts
7203
          && wider_mode != VOIDmode
7204
          && var->var_part[j].cur_loc
7205
          && mode == GET_MODE (var->var_part[j].cur_loc)
7206
          && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
7207
          && last_limit == var->var_part[j].offset
7208
          && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
7209
          && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
7210
        {
7211
          rtx new_loc = NULL;
7212
 
7213
          if (REG_P (loc[n_var_parts])
7214
              && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
7215
                 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
7216
              && end_hard_regno (mode, REGNO (loc[n_var_parts]))
7217
                 == REGNO (loc2))
7218
            {
7219
              if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
7220
                new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
7221
                                           mode, 0);
7222
              else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
7223
                new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
7224
              if (new_loc)
7225
                {
7226
                  if (!REG_P (new_loc)
7227
                      || REGNO (new_loc) != REGNO (loc[n_var_parts]))
7228
                    new_loc = NULL;
7229
                  else
7230
                    REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
7231
                }
7232
            }
7233
          else if (MEM_P (loc[n_var_parts])
7234
                   && GET_CODE (XEXP (loc2, 0)) == PLUS
7235
                   && REG_P (XEXP (XEXP (loc2, 0), 0))
7236
                   && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
7237
            {
7238
              if ((REG_P (XEXP (loc[n_var_parts], 0))
7239
                   && rtx_equal_p (XEXP (loc[n_var_parts], 0),
7240
                                   XEXP (XEXP (loc2, 0), 0))
7241
                   && INTVAL (XEXP (XEXP (loc2, 0), 1))
7242
                      == GET_MODE_SIZE (mode))
7243
                  || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
7244
                      && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
7245
                      && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
7246
                                      XEXP (XEXP (loc2, 0), 0))
7247
                      && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
7248
                         + GET_MODE_SIZE (mode)
7249
                         == INTVAL (XEXP (XEXP (loc2, 0), 1))))
7250
                new_loc = adjust_address_nv (loc[n_var_parts],
7251
                                             wider_mode, 0);
7252
            }
7253
 
7254
          if (new_loc)
7255
            {
7256
              loc[n_var_parts] = new_loc;
7257
              mode = wider_mode;
7258
              last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
7259
              i = j;
7260
            }
7261
        }
7262
      ++n_var_parts;
7263
    }
7264
  type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7265
  if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
7266
    complete = false;
7267
 
7268
  if (! flag_var_tracking_uninit)
7269
    initialized = VAR_INIT_STATUS_INITIALIZED;
7270
 
7271
  note_vl = NULL_RTX;
7272
  if (!complete)
7273
    note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX,
7274
                                    (int) initialized);
7275
  else if (n_var_parts == 1)
7276
    {
7277
      rtx expr_list;
7278
 
7279
      if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
7280
        expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
7281
      else
7282
        expr_list = loc[0];
7283
 
7284
      note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list,
7285
                                      (int) initialized);
7286
    }
7287
  else if (n_var_parts)
7288
    {
7289
      rtx parallel;
7290
 
7291
      for (i = 0; i < n_var_parts; i++)
7292
        loc[i]
7293
          = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
7294
 
7295
      parallel = gen_rtx_PARALLEL (VOIDmode,
7296
                                   gen_rtvec_v (n_var_parts, loc));
7297
      note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
7298
                                      parallel, (int) initialized);
7299
    }
7300
 
7301
#ifdef ENABLE_RTL_CHECKING
7302
  if (note_vl)
7303
    {
7304
      void **note_slot = pointer_map_insert (emitted_notes, decl);
7305
      rtx pnote = (rtx) *note_slot;
7306
      if (!var->cur_loc_changed && (pnote || PAT_VAR_LOCATION_LOC (note_vl)))
7307
        {
7308
          gcc_assert (pnote);
7309
          gcc_assert (rtx_equal_p (PAT_VAR_LOCATION_LOC (pnote),
7310
                                   PAT_VAR_LOCATION_LOC (note_vl)));
7311
        }
7312
      *note_slot = (void *) note_vl;
7313
    }
7314
  if (!var->cur_loc_changed)
7315
    goto clear;
7316
#endif
7317
 
7318
  if (where != EMIT_NOTE_BEFORE_INSN)
7319
    {
7320
      note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
7321
      if (where == EMIT_NOTE_AFTER_CALL_INSN)
7322
        NOTE_DURING_CALL_P (note) = true;
7323
    }
7324
  else
7325
    note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
7326
  NOTE_VAR_LOCATION (note) = note_vl;
7327
 
7328
 clear:
7329
  set_dv_changed (var->dv, false);
7330
  var->cur_loc_changed = false;
7331
  gcc_assert (var->in_changed_variables);
7332
  var->in_changed_variables = false;
7333
  htab_clear_slot (changed_variables, varp);
7334
 
7335
  /* Continue traversing the hash table.  */
7336
  return 1;
7337
 
7338
 value_or_debug_decl:
7339
  if (dv_changed_p (var->dv) && var->n_var_parts)
7340
    {
7341
      location_chain lc;
7342
      bool cur_loc_changed;
7343
 
7344
      if (var->var_part[0].cur_loc
7345
          && vt_expand_loc_dummy (var->var_part[0].cur_loc, vars,
7346
                                  &cur_loc_changed))
7347
        goto clear;
7348
      for (lc = var->var_part[0].loc_chain; lc; lc = lc->next)
7349
        if (lc->loc != var->var_part[0].cur_loc
7350
            && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
7351
          break;
7352
      var->var_part[0].cur_loc = lc ? lc->loc : NULL_RTX;
7353
    }
7354
  goto clear;
7355
}
7356
 
7357
DEF_VEC_P (variable);
7358
DEF_VEC_ALLOC_P (variable, heap);
7359
 
7360
/* Stack of variable_def pointers that need processing with
7361
   check_changed_vars_2.  */
7362
 
7363
static VEC (variable, heap) *changed_variables_stack;
7364
 
7365
/* VALUEs with no variables that need set_dv_changed (val, false)
7366
   called before check_changed_vars_3.  */
7367
 
7368
static VEC (rtx, heap) *changed_values_stack;
7369
 
7370
/* Helper function for check_changed_vars_1 and check_changed_vars_2.  */
7371
 
7372
static void
7373
check_changed_vars_0 (decl_or_value dv, htab_t htab)
7374
{
7375
  value_chain vc
7376
    = (value_chain) htab_find_with_hash (value_chains, dv, dv_htab_hash (dv));
7377
 
7378
  if (vc == NULL)
7379
    return;
7380
  for (vc = vc->next; vc; vc = vc->next)
7381
    if (!dv_changed_p (vc->dv))
7382
      {
7383
        variable vcvar
7384
          = (variable) htab_find_with_hash (htab, vc->dv,
7385
                                            dv_htab_hash (vc->dv));
7386
        if (vcvar)
7387
          {
7388
            set_dv_changed (vc->dv, true);
7389
            VEC_safe_push (variable, heap, changed_variables_stack, vcvar);
7390
          }
7391
        else if (dv_is_value_p (vc->dv))
7392
          {
7393
            set_dv_changed (vc->dv, true);
7394
            VEC_safe_push (rtx, heap, changed_values_stack,
7395
                           dv_as_value (vc->dv));
7396
            check_changed_vars_0 (vc->dv, htab);
7397
          }
7398
      }
7399
}
7400
 
7401
/* Populate changed_variables_stack with variable_def pointers
7402
   that need variable_was_changed called on them.  */
7403
 
7404
static int
7405
check_changed_vars_1 (void **slot, void *data)
7406
{
7407
  variable var = (variable) *slot;
7408
  htab_t htab = (htab_t) data;
7409
 
7410
  if (dv_is_value_p (var->dv)
7411
      || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7412
    check_changed_vars_0 (var->dv, htab);
7413
  return 1;
7414
}
7415
 
7416
/* Add VAR to changed_variables and also for VALUEs add recursively
7417
   all DVs that aren't in changed_variables yet but reference the
7418
   VALUE from its loc_chain.  */
7419
 
7420
static void
7421
check_changed_vars_2 (variable var, htab_t htab)
7422
{
7423
  variable_was_changed (var, NULL);
7424
  if (dv_is_value_p (var->dv)
7425
      || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7426
    check_changed_vars_0 (var->dv, htab);
7427
}
7428
 
7429
/* For each changed decl (except DEBUG_EXPR_DECLs) recompute
7430
   cur_loc if needed (and cur_loc of all VALUEs and DEBUG_EXPR_DECLs
7431
   it needs and are also in changed variables) and track whether
7432
   cur_loc (or anything it uses to compute location) had to change
7433
   during the current emit_notes_for_changes call.  */
7434
 
7435
static int
7436
check_changed_vars_3 (void **slot, void *data)
7437
{
7438
  variable var = (variable) *slot;
7439
  htab_t vars = (htab_t) data;
7440
  int i;
7441
  location_chain lc;
7442
  bool cur_loc_changed;
7443
 
7444
  if (dv_is_value_p (var->dv)
7445
      || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL)
7446
    return 1;
7447
 
7448
  for (i = 0; i < var->n_var_parts; i++)
7449
    {
7450
      if (var->var_part[i].cur_loc
7451
          && vt_expand_loc_dummy (var->var_part[i].cur_loc, vars,
7452
                                  &cur_loc_changed))
7453
        {
7454
          if (cur_loc_changed)
7455
            var->cur_loc_changed = true;
7456
          continue;
7457
        }
7458
      for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
7459
        if (lc->loc != var->var_part[i].cur_loc
7460
            && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed))
7461
          break;
7462
      if (lc || var->var_part[i].cur_loc)
7463
        var->cur_loc_changed = true;
7464
      var->var_part[i].cur_loc = lc ? lc->loc : NULL_RTX;
7465
    }
7466
  if (var->n_var_parts == 0)
7467
    var->cur_loc_changed = true;
7468
  return 1;
7469
}
7470
 
7471
/* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
7472
   CHANGED_VARIABLES and delete this chain.  WHERE specifies whether the notes
7473
   shall be emitted before of after instruction INSN.  */
7474
 
7475
static void
7476
emit_notes_for_changes (rtx insn, enum emit_note_where where,
7477
                        shared_hash vars)
7478
{
7479
  emit_note_data data;
7480
  htab_t htab = shared_hash_htab (vars);
7481
 
7482
  if (!htab_elements (changed_variables))
7483
    return;
7484
 
7485
  if (MAY_HAVE_DEBUG_INSNS)
7486
    {
7487
      /* Unfortunately this has to be done in two steps, because
7488
         we can't traverse a hashtab into which we are inserting
7489
         through variable_was_changed.  */
7490
      htab_traverse (changed_variables, check_changed_vars_1, htab);
7491
      while (VEC_length (variable, changed_variables_stack) > 0)
7492
        check_changed_vars_2 (VEC_pop (variable, changed_variables_stack),
7493
                              htab);
7494
      while (VEC_length (rtx, changed_values_stack) > 0)
7495
        set_dv_changed (dv_from_value (VEC_pop (rtx, changed_values_stack)),
7496
                        false);
7497
      htab_traverse (changed_variables, check_changed_vars_3, htab);
7498
    }
7499
 
7500
  data.insn = insn;
7501
  data.where = where;
7502
  data.vars = htab;
7503
 
7504
  htab_traverse (changed_variables, emit_note_insn_var_location, &data);
7505
}
7506
 
7507
/* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
7508
   same variable in hash table DATA or is not there at all.  */
7509
 
7510
static int
7511
emit_notes_for_differences_1 (void **slot, void *data)
7512
{
7513
  htab_t new_vars = (htab_t) data;
7514
  variable old_var, new_var;
7515
 
7516
  old_var = (variable) *slot;
7517
  new_var = (variable) htab_find_with_hash (new_vars, old_var->dv,
7518
                                            dv_htab_hash (old_var->dv));
7519
 
7520
  if (!new_var)
7521
    {
7522
      /* Variable has disappeared.  */
7523
      variable empty_var;
7524
 
7525
      empty_var = (variable) pool_alloc (dv_pool (old_var->dv));
7526
      empty_var->dv = old_var->dv;
7527
      empty_var->refcount = 0;
7528
      empty_var->n_var_parts = 0;
7529
      empty_var->cur_loc_changed = false;
7530
      empty_var->in_changed_variables = false;
7531
      if (dv_onepart_p (old_var->dv))
7532
        {
7533
          location_chain lc;
7534
 
7535
          gcc_assert (old_var->n_var_parts == 1);
7536
          for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next)
7537
            remove_value_chains (old_var->dv, lc->loc);
7538
        }
7539
      variable_was_changed (empty_var, NULL);
7540
      /* Continue traversing the hash table.  */
7541
      return 1;
7542
    }
7543
  if (variable_different_p (old_var, new_var))
7544
    {
7545
      if (dv_onepart_p (old_var->dv))
7546
        {
7547
          location_chain lc1, lc2;
7548
 
7549
          gcc_assert (old_var->n_var_parts == 1
7550
                      && new_var->n_var_parts == 1);
7551
          lc1 = old_var->var_part[0].loc_chain;
7552
          lc2 = new_var->var_part[0].loc_chain;
7553
          while (lc1
7554
                 && lc2
7555
                 && ((REG_P (lc1->loc) && REG_P (lc2->loc))
7556
                     || rtx_equal_p (lc1->loc, lc2->loc)))
7557
            {
7558
              lc1 = lc1->next;
7559
              lc2 = lc2->next;
7560
            }
7561
          for (; lc2; lc2 = lc2->next)
7562
            add_value_chains (old_var->dv, lc2->loc);
7563
          for (; lc1; lc1 = lc1->next)
7564
            remove_value_chains (old_var->dv, lc1->loc);
7565
        }
7566
      variable_was_changed (new_var, NULL);
7567
    }
7568
  /* Update cur_loc.  */
7569
  if (old_var != new_var)
7570
    {
7571
      int i;
7572
      for (i = 0; i < new_var->n_var_parts; i++)
7573
        {
7574
          new_var->var_part[i].cur_loc = NULL;
7575
          if (old_var->n_var_parts != new_var->n_var_parts
7576
              || old_var->var_part[i].offset != new_var->var_part[i].offset)
7577
            new_var->cur_loc_changed = true;
7578
          else if (old_var->var_part[i].cur_loc != NULL)
7579
            {
7580
              location_chain lc;
7581
              rtx cur_loc = old_var->var_part[i].cur_loc;
7582
 
7583
              for (lc = new_var->var_part[i].loc_chain; lc; lc = lc->next)
7584
                if (lc->loc == cur_loc
7585
                    || rtx_equal_p (cur_loc, lc->loc))
7586
                  {
7587
                    new_var->var_part[i].cur_loc = lc->loc;
7588
                    break;
7589
                  }
7590
              if (lc == NULL)
7591
                new_var->cur_loc_changed = true;
7592
            }
7593
        }
7594
    }
7595
 
7596
  /* Continue traversing the hash table.  */
7597
  return 1;
7598
}
7599
 
7600
/* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
7601
   table DATA.  */
7602
 
7603
static int
7604
emit_notes_for_differences_2 (void **slot, void *data)
7605
{
7606
  htab_t old_vars = (htab_t) data;
7607
  variable old_var, new_var;
7608
 
7609
  new_var = (variable) *slot;
7610
  old_var = (variable) htab_find_with_hash (old_vars, new_var->dv,
7611
                                            dv_htab_hash (new_var->dv));
7612
  if (!old_var)
7613
    {
7614
      int i;
7615
      /* Variable has appeared.  */
7616
      if (dv_onepart_p (new_var->dv))
7617
        {
7618
          location_chain lc;
7619
 
7620
          gcc_assert (new_var->n_var_parts == 1);
7621
          for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next)
7622
            add_value_chains (new_var->dv, lc->loc);
7623
        }
7624
      for (i = 0; i < new_var->n_var_parts; i++)
7625
        new_var->var_part[i].cur_loc = NULL;
7626
      variable_was_changed (new_var, NULL);
7627
    }
7628
 
7629
  /* Continue traversing the hash table.  */
7630
  return 1;
7631
}
7632
 
7633
/* Emit notes before INSN for differences between dataflow sets OLD_SET and
7634
   NEW_SET.  */
7635
 
7636
static void
7637
emit_notes_for_differences (rtx insn, dataflow_set *old_set,
7638
                            dataflow_set *new_set)
7639
{
7640
  htab_traverse (shared_hash_htab (old_set->vars),
7641
                 emit_notes_for_differences_1,
7642
                 shared_hash_htab (new_set->vars));
7643
  htab_traverse (shared_hash_htab (new_set->vars),
7644
                 emit_notes_for_differences_2,
7645
                 shared_hash_htab (old_set->vars));
7646
  emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
7647
}
7648
 
7649
/* Emit the notes for changes of location parts in the basic block BB.  */
7650
 
7651
static void
7652
emit_notes_in_bb (basic_block bb, dataflow_set *set)
7653
{
7654
  unsigned int i;
7655
  micro_operation *mo;
7656
 
7657
  dataflow_set_clear (set);
7658
  dataflow_set_copy (set, &VTI (bb)->in);
7659
 
7660
  for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++)
7661
    {
7662
      rtx insn = mo->insn;
7663
 
7664
      switch (mo->type)
7665
        {
7666
          case MO_CALL:
7667
            dataflow_set_clear_at_call (set);
7668
            emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
7669
            break;
7670
 
7671
          case MO_USE:
7672
            {
7673
              rtx loc = mo->u.loc;
7674
 
7675
              if (REG_P (loc))
7676
                var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
7677
              else
7678
                var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
7679
 
7680
              emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7681
            }
7682
            break;
7683
 
7684
          case MO_VAL_LOC:
7685
            {
7686
              rtx loc = mo->u.loc;
7687
              rtx val, vloc;
7688
              tree var;
7689
 
7690
              if (GET_CODE (loc) == CONCAT)
7691
                {
7692
                  val = XEXP (loc, 0);
7693
                  vloc = XEXP (loc, 1);
7694
                }
7695
              else
7696
                {
7697
                  val = NULL_RTX;
7698
                  vloc = loc;
7699
                }
7700
 
7701
              var = PAT_VAR_LOCATION_DECL (vloc);
7702
 
7703
              clobber_variable_part (set, NULL_RTX,
7704
                                     dv_from_decl (var), 0, NULL_RTX);
7705
              if (val)
7706
                {
7707
                  if (VAL_NEEDS_RESOLUTION (loc))
7708
                    val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
7709
                  set_variable_part (set, val, dv_from_decl (var), 0,
7710
                                     VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
7711
                                     INSERT);
7712
                }
7713
              else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
7714
                set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
7715
                                   dv_from_decl (var), 0,
7716
                                   VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
7717
                                   INSERT);
7718
 
7719
              emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7720
            }
7721
            break;
7722
 
7723
          case MO_VAL_USE:
7724
            {
7725
              rtx loc = mo->u.loc;
7726
              rtx val, vloc, uloc;
7727
 
7728
              vloc = uloc = XEXP (loc, 1);
7729
              val = XEXP (loc, 0);
7730
 
7731
              if (GET_CODE (val) == CONCAT)
7732
                {
7733
                  uloc = XEXP (val, 1);
7734
                  val = XEXP (val, 0);
7735
                }
7736
 
7737
              if (VAL_NEEDS_RESOLUTION (loc))
7738
                val_resolve (set, val, vloc, insn);
7739
              else
7740
                val_store (set, val, uloc, insn, false);
7741
 
7742
              if (VAL_HOLDS_TRACK_EXPR (loc))
7743
                {
7744
                  if (GET_CODE (uloc) == REG)
7745
                    var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
7746
                                 NULL);
7747
                  else if (GET_CODE (uloc) == MEM)
7748
                    var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
7749
                                 NULL);
7750
                }
7751
 
7752
              emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
7753
            }
7754
            break;
7755
 
7756
          case MO_VAL_SET:
7757
            {
7758
              rtx loc = mo->u.loc;
7759
              rtx val, vloc, uloc, reverse = NULL_RTX;
7760
 
7761
              vloc = loc;
7762
              if (VAL_EXPR_HAS_REVERSE (loc))
7763
                {
7764
                  reverse = XEXP (loc, 1);
7765
                  vloc = XEXP (loc, 0);
7766
                }
7767
              uloc = XEXP (vloc, 1);
7768
              val = XEXP (vloc, 0);
7769
              vloc = uloc;
7770
 
7771
              if (GET_CODE (val) == CONCAT)
7772
                {
7773
                  vloc = XEXP (val, 1);
7774
                  val = XEXP (val, 0);
7775
                }
7776
 
7777
              if (GET_CODE (vloc) == SET)
7778
                {
7779
                  rtx vsrc = SET_SRC (vloc);
7780
 
7781
                  gcc_assert (val != vsrc);
7782
                  gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
7783
 
7784
                  vloc = SET_DEST (vloc);
7785
 
7786
                  if (VAL_NEEDS_RESOLUTION (loc))
7787
                    val_resolve (set, val, vsrc, insn);
7788
                }
7789
              else if (VAL_NEEDS_RESOLUTION (loc))
7790
                {
7791
                  gcc_assert (GET_CODE (uloc) == SET
7792
                              && GET_CODE (SET_SRC (uloc)) == REG);
7793
                  val_resolve (set, val, SET_SRC (uloc), insn);
7794
                }
7795
 
7796
              if (VAL_HOLDS_TRACK_EXPR (loc))
7797
                {
7798
                  if (VAL_EXPR_IS_CLOBBERED (loc))
7799
                    {
7800
                      if (REG_P (uloc))
7801
                        var_reg_delete (set, uloc, true);
7802
                      else if (MEM_P (uloc))
7803
                        var_mem_delete (set, uloc, true);
7804
                    }
7805
                  else
7806
                    {
7807
                      bool copied_p = VAL_EXPR_IS_COPIED (loc);
7808
                      rtx set_src = NULL;
7809
                      enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
7810
 
7811
                      if (GET_CODE (uloc) == SET)
7812
                        {
7813
                          set_src = SET_SRC (uloc);
7814
                          uloc = SET_DEST (uloc);
7815
                        }
7816
 
7817
                      if (copied_p)
7818
                        {
7819
                          status = find_src_status (set, set_src);
7820
 
7821
                          set_src = find_src_set_src (set, set_src);
7822
                        }
7823
 
7824
                      if (REG_P (uloc))
7825
                        var_reg_delete_and_set (set, uloc, !copied_p,
7826
                                                status, set_src);
7827
                      else if (MEM_P (uloc))
7828
                        var_mem_delete_and_set (set, uloc, !copied_p,
7829
                                                status, set_src);
7830
                    }
7831
                }
7832
              else if (REG_P (uloc))
7833
                var_regno_delete (set, REGNO (uloc));
7834
 
7835
              val_store (set, val, vloc, insn, true);
7836
 
7837
              if (reverse)
7838
                val_store (set, XEXP (reverse, 0), XEXP (reverse, 1),
7839
                           insn, false);
7840
 
7841
              emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7842
                                      set->vars);
7843
            }
7844
            break;
7845
 
7846
          case MO_SET:
7847
            {
7848
              rtx loc = mo->u.loc;
7849
              rtx set_src = NULL;
7850
 
7851
              if (GET_CODE (loc) == SET)
7852
                {
7853
                  set_src = SET_SRC (loc);
7854
                  loc = SET_DEST (loc);
7855
                }
7856
 
7857
              if (REG_P (loc))
7858
                var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7859
                                        set_src);
7860
              else
7861
                var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
7862
                                        set_src);
7863
 
7864
              emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7865
                                      set->vars);
7866
            }
7867
            break;
7868
 
7869
          case MO_COPY:
7870
            {
7871
              rtx loc = mo->u.loc;
7872
              enum var_init_status src_status;
7873
              rtx set_src = NULL;
7874
 
7875
              if (GET_CODE (loc) == SET)
7876
                {
7877
                  set_src = SET_SRC (loc);
7878
                  loc = SET_DEST (loc);
7879
                }
7880
 
7881
              src_status = find_src_status (set, set_src);
7882
              set_src = find_src_set_src (set, set_src);
7883
 
7884
              if (REG_P (loc))
7885
                var_reg_delete_and_set (set, loc, false, src_status, set_src);
7886
              else
7887
                var_mem_delete_and_set (set, loc, false, src_status, set_src);
7888
 
7889
              emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7890
                                      set->vars);
7891
            }
7892
            break;
7893
 
7894
          case MO_USE_NO_VAR:
7895
            {
7896
              rtx loc = mo->u.loc;
7897
 
7898
              if (REG_P (loc))
7899
                var_reg_delete (set, loc, false);
7900
              else
7901
                var_mem_delete (set, loc, false);
7902
 
7903
              emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
7904
            }
7905
            break;
7906
 
7907
          case MO_CLOBBER:
7908
            {
7909
              rtx loc = mo->u.loc;
7910
 
7911
              if (REG_P (loc))
7912
                var_reg_delete (set, loc, true);
7913
              else
7914
                var_mem_delete (set, loc, true);
7915
 
7916
              emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN,
7917
                                      set->vars);
7918
            }
7919
            break;
7920
 
7921
          case MO_ADJUST:
7922
            set->stack_adjust += mo->u.adjust;
7923
            break;
7924
        }
7925
    }
7926
}
7927
 
7928
/* Emit notes for the whole function.  */
7929
 
7930
static void
7931
vt_emit_notes (void)
7932
{
7933
  basic_block bb;
7934
  dataflow_set cur;
7935
 
7936
#ifdef ENABLE_RTL_CHECKING
7937
  emitted_notes = pointer_map_create ();
7938
#endif
7939
  gcc_assert (!htab_elements (changed_variables));
7940
 
7941
  /* Free memory occupied by the out hash tables, as they aren't used
7942
     anymore.  */
7943
  FOR_EACH_BB (bb)
7944
    dataflow_set_clear (&VTI (bb)->out);
7945
 
7946
  /* Enable emitting notes by functions (mainly by set_variable_part and
7947
     delete_variable_part).  */
7948
  emit_notes = true;
7949
 
7950
  if (MAY_HAVE_DEBUG_INSNS)
7951
    {
7952
      unsigned int i;
7953
      rtx val;
7954
 
7955
      for (i = 0; VEC_iterate (rtx, preserved_values, i, val); i++)
7956
        add_cselib_value_chains (dv_from_value (val));
7957
      changed_variables_stack = VEC_alloc (variable, heap, 40);
7958
      changed_values_stack = VEC_alloc (rtx, heap, 40);
7959
    }
7960
 
7961
  dataflow_set_init (&cur);
7962
 
7963
  FOR_EACH_BB (bb)
7964
    {
7965
      /* Emit the notes for changes of variable locations between two
7966
         subsequent basic blocks.  */
7967
      emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
7968
 
7969
      /* Emit the notes for the changes in the basic block itself.  */
7970
      emit_notes_in_bb (bb, &cur);
7971
 
7972
      /* Free memory occupied by the in hash table, we won't need it
7973
         again.  */
7974
      dataflow_set_clear (&VTI (bb)->in);
7975
    }
7976
#ifdef ENABLE_CHECKING
7977
  htab_traverse (shared_hash_htab (cur.vars),
7978
                 emit_notes_for_differences_1,
7979
                 shared_hash_htab (empty_shared_hash));
7980
  if (MAY_HAVE_DEBUG_INSNS)
7981
    {
7982
      unsigned int i;
7983
      rtx val;
7984
 
7985
      for (i = 0; VEC_iterate (rtx, preserved_values, i, val); i++)
7986
        remove_cselib_value_chains (dv_from_value (val));
7987
      gcc_assert (htab_elements (value_chains) == 0);
7988
    }
7989
#endif
7990
  dataflow_set_destroy (&cur);
7991
 
7992
  if (MAY_HAVE_DEBUG_INSNS)
7993
    {
7994
      VEC_free (variable, heap, changed_variables_stack);
7995
      VEC_free (rtx, heap, changed_values_stack);
7996
    }
7997
 
7998
#ifdef ENABLE_RTL_CHECKING
7999
  pointer_map_destroy (emitted_notes);
8000
#endif
8001
  emit_notes = false;
8002
}
8003
 
8004
/* If there is a declaration and offset associated with register/memory RTL
8005
   assign declaration to *DECLP and offset to *OFFSETP, and return true.  */
8006
 
8007
static bool
8008
vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
8009
{
8010
  if (REG_P (rtl))
8011
    {
8012
      if (REG_ATTRS (rtl))
8013
        {
8014
          *declp = REG_EXPR (rtl);
8015
          *offsetp = REG_OFFSET (rtl);
8016
          return true;
8017
        }
8018
    }
8019
  else if (MEM_P (rtl))
8020
    {
8021
      if (MEM_ATTRS (rtl))
8022
        {
8023
          *declp = MEM_EXPR (rtl);
8024
          *offsetp = INT_MEM_OFFSET (rtl);
8025
          return true;
8026
        }
8027
    }
8028
  return false;
8029
}
8030
 
8031
/* Insert function parameters to IN and OUT sets of ENTRY_BLOCK.  */
8032
 
8033
static void
8034
vt_add_function_parameters (void)
8035
{
8036
  tree parm;
8037
 
8038
  for (parm = DECL_ARGUMENTS (current_function_decl);
8039
       parm; parm = TREE_CHAIN (parm))
8040
    {
8041
      rtx decl_rtl = DECL_RTL_IF_SET (parm);
8042
      rtx incoming = DECL_INCOMING_RTL (parm);
8043
      tree decl;
8044
      enum machine_mode mode;
8045
      HOST_WIDE_INT offset;
8046
      dataflow_set *out;
8047
      decl_or_value dv;
8048
 
8049
      if (TREE_CODE (parm) != PARM_DECL)
8050
        continue;
8051
 
8052
      if (!DECL_NAME (parm))
8053
        continue;
8054
 
8055
      if (!decl_rtl || !incoming)
8056
        continue;
8057
 
8058
      if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
8059
        continue;
8060
 
8061
      if (!vt_get_decl_and_offset (incoming, &decl, &offset))
8062
        {
8063
          if (REG_P (incoming) || MEM_P (incoming))
8064
            {
8065
              /* This means argument is passed by invisible reference.  */
8066
              offset = 0;
8067
              decl = parm;
8068
              incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming);
8069
            }
8070
          else
8071
            {
8072
              if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
8073
                continue;
8074
              offset += byte_lowpart_offset (GET_MODE (incoming),
8075
                                             GET_MODE (decl_rtl));
8076
            }
8077
        }
8078
 
8079
      if (!decl)
8080
        continue;
8081
 
8082
      if (parm != decl)
8083
        {
8084
          /* Assume that DECL_RTL was a pseudo that got spilled to
8085
             memory.  The spill slot sharing code will force the
8086
             memory to reference spill_slot_decl (%sfp), so we don't
8087
             match above.  That's ok, the pseudo must have referenced
8088
             the entire parameter, so just reset OFFSET.  */
8089
          gcc_assert (decl == get_spill_slot_decl (false));
8090
          offset = 0;
8091
        }
8092
 
8093
      if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
8094
        continue;
8095
 
8096
      out = &VTI (ENTRY_BLOCK_PTR)->out;
8097
 
8098
      dv = dv_from_decl (parm);
8099
 
8100
      if (target_for_debug_bind (parm)
8101
          /* We can't deal with these right now, because this kind of
8102
             variable is single-part.  ??? We could handle parallels
8103
             that describe multiple locations for the same single
8104
             value, but ATM we don't.  */
8105
          && GET_CODE (incoming) != PARALLEL)
8106
        {
8107
          cselib_val *val;
8108
 
8109
          /* ??? We shouldn't ever hit this, but it may happen because
8110
             arguments passed by invisible reference aren't dealt with
8111
             above: incoming-rtl will have Pmode rather than the
8112
             expected mode for the type.  */
8113
          if (offset)
8114
            continue;
8115
 
8116
          val = cselib_lookup (var_lowpart (mode, incoming), mode, true);
8117
 
8118
          /* ??? Float-typed values in memory are not handled by
8119
             cselib.  */
8120
          if (val)
8121
            {
8122
              preserve_value (val);
8123
              set_variable_part (out, val->val_rtx, dv, offset,
8124
                                 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8125
              dv = dv_from_value (val->val_rtx);
8126
            }
8127
        }
8128
 
8129
      if (REG_P (incoming))
8130
        {
8131
          incoming = var_lowpart (mode, incoming);
8132
          gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
8133
          attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
8134
                             incoming);
8135
          set_variable_part (out, incoming, dv, offset,
8136
                             VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8137
        }
8138
      else if (MEM_P (incoming))
8139
        {
8140
          incoming = var_lowpart (mode, incoming);
8141
          set_variable_part (out, incoming, dv, offset,
8142
                             VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
8143
        }
8144
    }
8145
 
8146
  if (MAY_HAVE_DEBUG_INSNS)
8147
    {
8148
      cselib_preserve_only_values ();
8149
      cselib_reset_table (cselib_get_next_uid ());
8150
    }
8151
 
8152
}
8153
 
8154
/* Return true if INSN in the prologue initializes hard_frame_pointer_rtx.  */
8155
 
8156
static bool
8157
fp_setter (rtx insn)
8158
{
8159
  rtx pat = PATTERN (insn);
8160
  if (RTX_FRAME_RELATED_P (insn))
8161
    {
8162
      rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
8163
      if (expr)
8164
        pat = XEXP (expr, 0);
8165
    }
8166
  if (GET_CODE (pat) == SET)
8167
    return SET_DEST (pat) == hard_frame_pointer_rtx;
8168
  else if (GET_CODE (pat) == PARALLEL)
8169
    {
8170
      int i;
8171
      for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
8172
        if (GET_CODE (XVECEXP (pat, 0, i)) == SET
8173
            && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx)
8174
          return true;
8175
    }
8176
  return false;
8177
}
8178
 
8179
/* Initialize cfa_base_rtx, create a preserved VALUE for it and
8180
   ensure it isn't flushed during cselib_reset_table.
8181
   Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
8182
   has been eliminated.  */
8183
 
8184
static void
8185
vt_init_cfa_base (void)
8186
{
8187
  cselib_val *val;
8188
 
8189
#ifdef FRAME_POINTER_CFA_OFFSET
8190
  cfa_base_rtx = frame_pointer_rtx;
8191
#else
8192
  cfa_base_rtx = arg_pointer_rtx;
8193
#endif
8194
  if (cfa_base_rtx == hard_frame_pointer_rtx
8195
      || !fixed_regs[REGNO (cfa_base_rtx)])
8196
    {
8197
      cfa_base_rtx = NULL_RTX;
8198
      return;
8199
    }
8200
  if (!MAY_HAVE_DEBUG_INSNS)
8201
    return;
8202
 
8203
  val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
8204
                                 get_insns ());
8205
  preserve_value (val);
8206
  cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
8207
  var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx,
8208
                    VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx),
8209
                    0, NULL_RTX, INSERT);
8210
}
8211
 
8212
/* Allocate and initialize the data structures for variable tracking
8213
   and parse the RTL to get the micro operations.  */
8214
 
8215
static bool
8216
vt_initialize (void)
8217
{
8218
  basic_block bb, prologue_bb = NULL;
8219
  HOST_WIDE_INT fp_cfa_offset = -1;
8220
 
8221
  alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
8222
 
8223
  attrs_pool = create_alloc_pool ("attrs_def pool",
8224
                                  sizeof (struct attrs_def), 1024);
8225
  var_pool = create_alloc_pool ("variable_def pool",
8226
                                sizeof (struct variable_def)
8227
                                + (MAX_VAR_PARTS - 1)
8228
                                * sizeof (((variable)NULL)->var_part[0]), 64);
8229
  loc_chain_pool = create_alloc_pool ("location_chain_def pool",
8230
                                      sizeof (struct location_chain_def),
8231
                                      1024);
8232
  shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
8233
                                        sizeof (struct shared_hash_def), 256);
8234
  empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
8235
  empty_shared_hash->refcount = 1;
8236
  empty_shared_hash->htab
8237
    = htab_create (1, variable_htab_hash, variable_htab_eq,
8238
                   variable_htab_free);
8239
  changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq,
8240
                                   variable_htab_free);
8241
  if (MAY_HAVE_DEBUG_INSNS)
8242
    {
8243
      value_chain_pool = create_alloc_pool ("value_chain_def pool",
8244
                                            sizeof (struct value_chain_def),
8245
                                            1024);
8246
      value_chains = htab_create (32, value_chain_htab_hash,
8247
                                  value_chain_htab_eq, NULL);
8248
    }
8249
 
8250
  /* Init the IN and OUT sets.  */
8251
  FOR_ALL_BB (bb)
8252
    {
8253
      VTI (bb)->visited = false;
8254
      VTI (bb)->flooded = false;
8255
      dataflow_set_init (&VTI (bb)->in);
8256
      dataflow_set_init (&VTI (bb)->out);
8257
      VTI (bb)->permp = NULL;
8258
    }
8259
 
8260
  if (MAY_HAVE_DEBUG_INSNS)
8261
    {
8262
      cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
8263
      scratch_regs = BITMAP_ALLOC (NULL);
8264
      valvar_pool = create_alloc_pool ("small variable_def pool",
8265
                                       sizeof (struct variable_def), 256);
8266
      preserved_values = VEC_alloc (rtx, heap, 256);
8267
    }
8268
  else
8269
    {
8270
      scratch_regs = NULL;
8271
      valvar_pool = NULL;
8272
    }
8273
 
8274
  if (!frame_pointer_needed)
8275
    {
8276
      rtx reg, elim;
8277
 
8278
      if (!vt_stack_adjustments ())
8279
        return false;
8280
 
8281
#ifdef FRAME_POINTER_CFA_OFFSET
8282
      reg = frame_pointer_rtx;
8283
#else
8284
      reg = arg_pointer_rtx;
8285
#endif
8286
      elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
8287
      if (elim != reg)
8288
        {
8289
          if (GET_CODE (elim) == PLUS)
8290
            elim = XEXP (elim, 0);
8291
          if (elim == stack_pointer_rtx)
8292
            vt_init_cfa_base ();
8293
        }
8294
    }
8295
  else if (!crtl->stack_realign_tried)
8296
    {
8297
      rtx reg, elim;
8298
 
8299
#ifdef FRAME_POINTER_CFA_OFFSET
8300
      reg = frame_pointer_rtx;
8301
      fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
8302
#else
8303
      reg = arg_pointer_rtx;
8304
      fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
8305
#endif
8306
      elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
8307
      if (elim != reg)
8308
        {
8309
          if (GET_CODE (elim) == PLUS)
8310
            {
8311
              fp_cfa_offset -= INTVAL (XEXP (elim, 1));
8312
              elim = XEXP (elim, 0);
8313
            }
8314
          if (elim != hard_frame_pointer_rtx)
8315
            fp_cfa_offset = -1;
8316
          else
8317
            prologue_bb = single_succ (ENTRY_BLOCK_PTR);
8318
        }
8319
    }
8320
 
8321
  hard_frame_pointer_adjustment = -1;
8322
 
8323
  FOR_EACH_BB (bb)
8324
    {
8325
      rtx insn;
8326
      HOST_WIDE_INT pre, post = 0;
8327
      basic_block first_bb, last_bb;
8328
 
8329
      if (MAY_HAVE_DEBUG_INSNS)
8330
        {
8331
          cselib_record_sets_hook = add_with_sets;
8332
          if (dump_file && (dump_flags & TDF_DETAILS))
8333
            fprintf (dump_file, "first value: %i\n",
8334
                     cselib_get_next_uid ());
8335
        }
8336
 
8337
      first_bb = bb;
8338
      for (;;)
8339
        {
8340
          edge e;
8341
          if (bb->next_bb == EXIT_BLOCK_PTR
8342
              || ! single_pred_p (bb->next_bb))
8343
            break;
8344
          e = find_edge (bb, bb->next_bb);
8345
          if (! e || (e->flags & EDGE_FALLTHRU) == 0)
8346
            break;
8347
          bb = bb->next_bb;
8348
        }
8349
      last_bb = bb;
8350
 
8351
      /* Add the micro-operations to the vector.  */
8352
      FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
8353
        {
8354
          HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
8355
          VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
8356
          for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
8357
               insn = NEXT_INSN (insn))
8358
            {
8359
              if (INSN_P (insn))
8360
                {
8361
                  if (!frame_pointer_needed)
8362
                    {
8363
                      insn_stack_adjust_offset_pre_post (insn, &pre, &post);
8364
                      if (pre)
8365
                        {
8366
                          micro_operation mo;
8367
                          mo.type = MO_ADJUST;
8368
                          mo.u.adjust = pre;
8369
                          mo.insn = insn;
8370
                          if (dump_file && (dump_flags & TDF_DETAILS))
8371
                            log_op_type (PATTERN (insn), bb, insn,
8372
                                         MO_ADJUST, dump_file);
8373
                          VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
8374
                                         &mo);
8375
                          VTI (bb)->out.stack_adjust += pre;
8376
                        }
8377
                    }
8378
 
8379
                  cselib_hook_called = false;
8380
                  adjust_insn (bb, insn);
8381
                  if (MAY_HAVE_DEBUG_INSNS)
8382
                    {
8383
                      cselib_process_insn (insn);
8384
                      if (dump_file && (dump_flags & TDF_DETAILS))
8385
                        {
8386
                          print_rtl_single (dump_file, insn);
8387
                          dump_cselib_table (dump_file);
8388
                        }
8389
                    }
8390
                  if (!cselib_hook_called)
8391
                    add_with_sets (insn, 0, 0);
8392
                  cancel_changes (0);
8393
 
8394
                  if (!frame_pointer_needed && post)
8395
                    {
8396
                      micro_operation mo;
8397
                      mo.type = MO_ADJUST;
8398
                      mo.u.adjust = post;
8399
                      mo.insn = insn;
8400
                      if (dump_file && (dump_flags & TDF_DETAILS))
8401
                        log_op_type (PATTERN (insn), bb, insn,
8402
                                     MO_ADJUST, dump_file);
8403
                      VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
8404
                                     &mo);
8405
                      VTI (bb)->out.stack_adjust += post;
8406
                    }
8407
 
8408
                  if (bb == prologue_bb
8409
                      && hard_frame_pointer_adjustment == -1
8410
                      && RTX_FRAME_RELATED_P (insn)
8411
                      && fp_setter (insn))
8412
                    {
8413
                      vt_init_cfa_base ();
8414
                      hard_frame_pointer_adjustment = fp_cfa_offset;
8415
                    }
8416
                }
8417
            }
8418
          gcc_assert (offset == VTI (bb)->out.stack_adjust);
8419
        }
8420
 
8421
      bb = last_bb;
8422
 
8423
      if (MAY_HAVE_DEBUG_INSNS)
8424
        {
8425
          cselib_preserve_only_values ();
8426
          cselib_reset_table (cselib_get_next_uid ());
8427
          cselib_record_sets_hook = NULL;
8428
        }
8429
    }
8430
 
8431
  hard_frame_pointer_adjustment = -1;
8432
  VTI (ENTRY_BLOCK_PTR)->flooded = true;
8433
  vt_add_function_parameters ();
8434
  cfa_base_rtx = NULL_RTX;
8435
  return true;
8436
}
8437
 
8438
/* Get rid of all debug insns from the insn stream.  */
8439
 
8440
static void
8441
delete_debug_insns (void)
8442
{
8443
  basic_block bb;
8444
  rtx insn, next;
8445
 
8446
  if (!MAY_HAVE_DEBUG_INSNS)
8447
    return;
8448
 
8449
  FOR_EACH_BB (bb)
8450
    {
8451
      FOR_BB_INSNS_SAFE (bb, insn, next)
8452
        if (DEBUG_INSN_P (insn))
8453
          delete_insn (insn);
8454
    }
8455
}
8456
 
8457
/* Run a fast, BB-local only version of var tracking, to take care of
8458
   information that we don't do global analysis on, such that not all
8459
   information is lost.  If SKIPPED holds, we're skipping the global
8460
   pass entirely, so we should try to use information it would have
8461
   handled as well..  */
8462
 
8463
static void
8464
vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
8465
{
8466
  /* ??? Just skip it all for now.  */
8467
  delete_debug_insns ();
8468
}
8469
 
8470
/* Free the data structures needed for variable tracking.  */
8471
 
8472
static void
8473
vt_finalize (void)
8474
{
8475
  basic_block bb;
8476
 
8477
  FOR_EACH_BB (bb)
8478
    {
8479
      VEC_free (micro_operation, heap, VTI (bb)->mos);
8480
    }
8481
 
8482
  FOR_ALL_BB (bb)
8483
    {
8484
      dataflow_set_destroy (&VTI (bb)->in);
8485
      dataflow_set_destroy (&VTI (bb)->out);
8486
      if (VTI (bb)->permp)
8487
        {
8488
          dataflow_set_destroy (VTI (bb)->permp);
8489
          XDELETE (VTI (bb)->permp);
8490
        }
8491
    }
8492
  free_aux_for_blocks ();
8493
  htab_delete (empty_shared_hash->htab);
8494
  htab_delete (changed_variables);
8495
  free_alloc_pool (attrs_pool);
8496
  free_alloc_pool (var_pool);
8497
  free_alloc_pool (loc_chain_pool);
8498
  free_alloc_pool (shared_hash_pool);
8499
 
8500
  if (MAY_HAVE_DEBUG_INSNS)
8501
    {
8502
      htab_delete (value_chains);
8503
      free_alloc_pool (value_chain_pool);
8504
      free_alloc_pool (valvar_pool);
8505
      VEC_free (rtx, heap, preserved_values);
8506
      cselib_finish ();
8507
      BITMAP_FREE (scratch_regs);
8508
      scratch_regs = NULL;
8509
    }
8510
 
8511
  if (vui_vec)
8512
    XDELETEVEC (vui_vec);
8513
  vui_vec = NULL;
8514
  vui_allocated = 0;
8515
}
8516
 
8517
/* The entry point to variable tracking pass.  */
8518
 
8519
static inline unsigned int
8520
variable_tracking_main_1 (void)
8521
{
8522
  bool success;
8523
 
8524
  if (flag_var_tracking_assignments < 0)
8525
    {
8526
      delete_debug_insns ();
8527
      return 0;
8528
    }
8529
 
8530
  if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
8531
    {
8532
      vt_debug_insns_local (true);
8533
      return 0;
8534
    }
8535
 
8536
  mark_dfs_back_edges ();
8537
  if (!vt_initialize ())
8538
    {
8539
      vt_finalize ();
8540
      vt_debug_insns_local (true);
8541
      return 0;
8542
    }
8543
 
8544
  success = vt_find_locations ();
8545
 
8546
  if (!success && flag_var_tracking_assignments > 0)
8547
    {
8548
      vt_finalize ();
8549
 
8550
      delete_debug_insns ();
8551
 
8552
      /* This is later restored by our caller.  */
8553
      flag_var_tracking_assignments = 0;
8554
 
8555
      success = vt_initialize ();
8556
      gcc_assert (success);
8557
 
8558
      success = vt_find_locations ();
8559
    }
8560
 
8561
  if (!success)
8562
    {
8563
      vt_finalize ();
8564
      vt_debug_insns_local (false);
8565
      return 0;
8566
    }
8567
 
8568
  if (dump_file && (dump_flags & TDF_DETAILS))
8569
    {
8570
      dump_dataflow_sets ();
8571
      dump_flow_info (dump_file, dump_flags);
8572
    }
8573
 
8574
  vt_emit_notes ();
8575
 
8576
  vt_finalize ();
8577
  vt_debug_insns_local (false);
8578
  return 0;
8579
}
8580
 
8581
unsigned int
8582
variable_tracking_main (void)
8583
{
8584
  unsigned int ret;
8585
  int save = flag_var_tracking_assignments;
8586
 
8587
  ret = variable_tracking_main_1 ();
8588
 
8589
  flag_var_tracking_assignments = save;
8590
 
8591
  return ret;
8592
}
8593
 
8594
static bool
8595
gate_handle_var_tracking (void)
8596
{
8597
  return (flag_var_tracking);
8598
}
8599
 
8600
 
8601
 
8602
struct rtl_opt_pass pass_variable_tracking =
8603
{
8604
 {
8605
  RTL_PASS,
8606
  "vartrack",                           /* name */
8607
  gate_handle_var_tracking,             /* gate */
8608
  variable_tracking_main,               /* execute */
8609
  NULL,                                 /* sub */
8610
  NULL,                                 /* next */
8611
  0,                                    /* static_pass_number */
8612
  TV_VAR_TRACKING,                      /* tv_id */
8613
  0,                                    /* properties_required */
8614
  0,                                    /* properties_provided */
8615
  0,                                    /* properties_destroyed */
8616
  0,                                    /* todo_flags_start */
8617
  TODO_dump_func | TODO_verify_rtl_sharing/* todo_flags_finish */
8618
 }
8619
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.