OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [function.c] - Blame information for rev 858

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Expands front end tree to back end RTL for GCC.
2
   Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3
   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
/* This file handles the generation of rtl code from tree structure
23
   at the level of the function as a whole.
24
   It creates the rtl expressions for parameters and auto variables
25
   and has full responsibility for allocating stack slots.
26
 
27
   `expand_function_start' is called at the beginning of a function,
28
   before the function body is parsed, and `expand_function_end' is
29
   called after parsing the body.
30
 
31
   Call `assign_stack_local' to allocate a stack slot for a local variable.
32
   This is usually done during the RTL generation for the function body,
33
   but it can also be done in the reload pass when a pseudo-register does
34
   not get a hard register.  */
35
 
36
#include "config.h"
37
#include "system.h"
38
#include "coretypes.h"
39
#include "tm.h"
40
#include "rtl.h"
41
#include "tree.h"
42
#include "flags.h"
43
#include "except.h"
44
#include "function.h"
45
#include "expr.h"
46
#include "optabs.h"
47
#include "libfuncs.h"
48
#include "regs.h"
49
#include "hard-reg-set.h"
50
#include "insn-config.h"
51
#include "recog.h"
52
#include "output.h"
53
#include "basic-block.h"
54
#include "toplev.h"
55
#include "hashtab.h"
56
#include "ggc.h"
57
#include "tm_p.h"
58
#include "integrate.h"
59
#include "langhooks.h"
60
#include "target.h"
61
#include "cfglayout.h"
62
#include "tree-gimple.h"
63
#include "tree-pass.h"
64
#include "predict.h"
65
#include "vecprim.h"
66
 
67
#ifndef LOCAL_ALIGNMENT
68
#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69
#endif
70
 
71
#ifndef STACK_ALIGNMENT_NEEDED
72
#define STACK_ALIGNMENT_NEEDED 1
73
#endif
74
 
75
#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
76
 
77
/* Some systems use __main in a way incompatible with its use in gcc, in these
78
   cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79
   give the same symbol without quotes for an alternative entry point.  You
80
   must define both, or neither.  */
81
#ifndef NAME__MAIN
82
#define NAME__MAIN "__main"
83
#endif
84
 
85
/* Round a value to the lowest integer less than it that is a multiple of
86
   the required alignment.  Avoid using division in case the value is
87
   negative.  Assume the alignment is a power of two.  */
88
#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
89
 
90
/* Similar, but round to the next highest integer that meets the
91
   alignment.  */
92
#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
93
 
94
/* Nonzero if function being compiled doesn't contain any calls
95
   (ignoring the prologue and epilogue).  This is set prior to
96
   local register allocation and is valid for the remaining
97
   compiler passes.  */
98
int current_function_is_leaf;
99
 
100
/* Nonzero if function being compiled doesn't modify the stack pointer
101
   (ignoring the prologue and epilogue).  This is only valid after
102
   life_analysis has run.  */
103
int current_function_sp_is_unchanging;
104
 
105
/* Nonzero if the function being compiled is a leaf function which only
106
   uses leaf registers.  This is valid after reload (specifically after
107
   sched2) and is useful only if the port defines LEAF_REGISTERS.  */
108
int current_function_uses_only_leaf_regs;
109
 
110
/* Nonzero once virtual register instantiation has been done.
111
   assign_stack_local uses frame_pointer_rtx when this is nonzero.
112
   calls.c:emit_library_call_value_1 uses it to set up
113
   post-instantiation libcalls.  */
114
int virtuals_instantiated;
115
 
116
/* Assign unique numbers to labels generated for profiling, debugging, etc.  */
117
static GTY(()) int funcdef_no;
118
 
119
/* These variables hold pointers to functions to create and destroy
120
   target specific, per-function data structures.  */
121
struct machine_function * (*init_machine_status) (void);
122
 
123
/* The currently compiled function.  */
124
struct function *cfun = 0;
125
 
126
/* These arrays record the INSN_UIDs of the prologue and epilogue insns.  */
127
static VEC(int,heap) *prologue;
128
static VEC(int,heap) *epilogue;
129
 
130
/* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
131
   in this function.  */
132
static VEC(int,heap) *sibcall_epilogue;
133
 
134
/* In order to evaluate some expressions, such as function calls returning
135
   structures in memory, we need to temporarily allocate stack locations.
136
   We record each allocated temporary in the following structure.
137
 
138
   Associated with each temporary slot is a nesting level.  When we pop up
139
   one level, all temporaries associated with the previous level are freed.
140
   Normally, all temporaries are freed after the execution of the statement
141
   in which they were created.  However, if we are inside a ({...}) grouping,
142
   the result may be in a temporary and hence must be preserved.  If the
143
   result could be in a temporary, we preserve it if we can determine which
144
   one it is in.  If we cannot determine which temporary may contain the
145
   result, all temporaries are preserved.  A temporary is preserved by
146
   pretending it was allocated at the previous nesting level.
147
 
148
   Automatic variables are also assigned temporary slots, at the nesting
149
   level where they are defined.  They are marked a "kept" so that
150
   free_temp_slots will not free them.  */
151
 
152
struct temp_slot GTY(())
153
{
154
  /* Points to next temporary slot.  */
155
  struct temp_slot *next;
156
  /* Points to previous temporary slot.  */
157
  struct temp_slot *prev;
158
 
159
  /* The rtx to used to reference the slot.  */
160
  rtx slot;
161
  /* The rtx used to represent the address if not the address of the
162
     slot above.  May be an EXPR_LIST if multiple addresses exist.  */
163
  rtx address;
164
  /* The alignment (in bits) of the slot.  */
165
  unsigned int align;
166
  /* The size, in units, of the slot.  */
167
  HOST_WIDE_INT size;
168
  /* The type of the object in the slot, or zero if it doesn't correspond
169
     to a type.  We use this to determine whether a slot can be reused.
170
     It can be reused if objects of the type of the new slot will always
171
     conflict with objects of the type of the old slot.  */
172
  tree type;
173
  /* Nonzero if this temporary is currently in use.  */
174
  char in_use;
175
  /* Nonzero if this temporary has its address taken.  */
176
  char addr_taken;
177
  /* Nesting level at which this slot is being used.  */
178
  int level;
179
  /* Nonzero if this should survive a call to free_temp_slots.  */
180
  int keep;
181
  /* The offset of the slot from the frame_pointer, including extra space
182
     for alignment.  This info is for combine_temp_slots.  */
183
  HOST_WIDE_INT base_offset;
184
  /* The size of the slot, including extra space for alignment.  This
185
     info is for combine_temp_slots.  */
186
  HOST_WIDE_INT full_size;
187
};
188
 
189
/* Forward declarations.  */
190
 
191
static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
192
                                 struct function *);
193
static struct temp_slot *find_temp_slot_from_address (rtx);
194
static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
195
static void pad_below (struct args_size *, enum machine_mode, tree);
196
static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
197
static int all_blocks (tree, tree *);
198
static tree *get_block_vector (tree, int *);
199
extern tree debug_find_var_in_block_tree (tree, tree);
200
/* We always define `record_insns' even if it's not used so that we
201
   can always export `prologue_epilogue_contains'.  */
202
static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
203
static int contains (rtx, VEC(int,heap) **);
204
#ifdef HAVE_return
205
static void emit_return_into_block (basic_block, rtx);
206
#endif
207
#if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
208
static rtx keep_stack_depressed (rtx);
209
#endif
210
static void prepare_function_start (tree);
211
static void do_clobber_return_reg (rtx, void *);
212
static void do_use_return_reg (rtx, void *);
213
static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
214
 
215
/* Pointer to chain of `struct function' for containing functions.  */
216
struct function *outer_function_chain;
217
 
218
/* Given a function decl for a containing function,
219
   return the `struct function' for it.  */
220
 
221
struct function *
222
find_function_data (tree decl)
223
{
224
  struct function *p;
225
 
226
  for (p = outer_function_chain; p; p = p->outer)
227
    if (p->decl == decl)
228
      return p;
229
 
230
  gcc_unreachable ();
231
}
232
 
233
/* Save the current context for compilation of a nested function.
234
   This is called from language-specific code.  The caller should use
235
   the enter_nested langhook to save any language-specific state,
236
   since this function knows only about language-independent
237
   variables.  */
238
 
239
void
240
push_function_context_to (tree context ATTRIBUTE_UNUSED)
241
{
242
  struct function *p;
243
 
244
  if (cfun == 0)
245
    init_dummy_function_start ();
246
  p = cfun;
247
 
248
  p->outer = outer_function_chain;
249
  outer_function_chain = p;
250
 
251
  lang_hooks.function.enter_nested (p);
252
 
253
  cfun = 0;
254
}
255
 
256
void
257
push_function_context (void)
258
{
259
  push_function_context_to (current_function_decl);
260
}
261
 
262
/* Restore the last saved context, at the end of a nested function.
263
   This function is called from language-specific code.  */
264
 
265
void
266
pop_function_context_from (tree context ATTRIBUTE_UNUSED)
267
{
268
  struct function *p = outer_function_chain;
269
 
270
  cfun = p;
271
  outer_function_chain = p->outer;
272
 
273
  current_function_decl = p->decl;
274
 
275
  lang_hooks.function.leave_nested (p);
276
 
277
  /* Reset variables that have known state during rtx generation.  */
278
  virtuals_instantiated = 0;
279
  generating_concat_p = 1;
280
}
281
 
282
void
283
pop_function_context (void)
284
{
285
  pop_function_context_from (current_function_decl);
286
}
287
 
288
/* Clear out all parts of the state in F that can safely be discarded
289
   after the function has been parsed, but not compiled, to let
290
   garbage collection reclaim the memory.  */
291
 
292
void
293
free_after_parsing (struct function *f)
294
{
295
  /* f->expr->forced_labels is used by code generation.  */
296
  /* f->emit->regno_reg_rtx is used by code generation.  */
297
  /* f->varasm is used by code generation.  */
298
  /* f->eh->eh_return_stub_label is used by code generation.  */
299
 
300
  lang_hooks.function.final (f);
301
}
302
 
303
/* Clear out all parts of the state in F that can safely be discarded
304
   after the function has been compiled, to let garbage collection
305
   reclaim the memory.  */
306
 
307
void
308
free_after_compilation (struct function *f)
309
{
310
  VEC_free (int, heap, prologue);
311
  VEC_free (int, heap, epilogue);
312
  VEC_free (int, heap, sibcall_epilogue);
313
 
314
  f->eh = NULL;
315
  f->expr = NULL;
316
  f->emit = NULL;
317
  f->varasm = NULL;
318
  f->machine = NULL;
319
  f->cfg = NULL;
320
 
321
  f->x_avail_temp_slots = NULL;
322
  f->x_used_temp_slots = NULL;
323
  f->arg_offset_rtx = NULL;
324
  f->return_rtx = NULL;
325
  f->internal_arg_pointer = NULL;
326
  f->x_nonlocal_goto_handler_labels = NULL;
327
  f->x_return_label = NULL;
328
  f->x_naked_return_label = NULL;
329
  f->x_stack_slot_list = NULL;
330
  f->x_stack_check_probe_note = NULL;
331
  f->x_arg_pointer_save_area = NULL;
332
  f->x_parm_birth_insn = NULL;
333
  f->epilogue_delay_list = NULL;
334
}
335
 
336
/* Allocate fixed slots in the stack frame of the current function.  */
337
 
338
/* Return size needed for stack frame based on slots so far allocated in
339
   function F.
340
   This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
341
   the caller may have to do that.  */
342
 
343
static HOST_WIDE_INT
344
get_func_frame_size (struct function *f)
345
{
346
  if (FRAME_GROWS_DOWNWARD)
347
    return -f->x_frame_offset;
348
  else
349
    return f->x_frame_offset;
350
}
351
 
352
/* Return size needed for stack frame based on slots so far allocated.
353
   This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
354
   the caller may have to do that.  */
355
 
356
HOST_WIDE_INT
357
get_frame_size (void)
358
{
359
  return get_func_frame_size (cfun);
360
}
361
 
362
/* Issue an error message and return TRUE if frame OFFSET overflows in
363
   the signed target pointer arithmetics for function FUNC.  Otherwise
364
   return FALSE.  */
365
 
366
bool
367
frame_offset_overflow (HOST_WIDE_INT offset, tree func)
368
{
369
  unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
370
 
371
  if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
372
               /* Leave room for the fixed part of the frame.  */
373
               - 64 * UNITS_PER_WORD)
374
    {
375
      error ("%Jtotal size of local objects too large", func);
376
      return TRUE;
377
    }
378
 
379
  return FALSE;
380
}
381
 
382
/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
383
   with machine mode MODE.
384
 
385
   ALIGN controls the amount of alignment for the address of the slot:
386
 
387
   -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
388
   -2 means use BITS_PER_UNIT,
389
   positive specifies alignment boundary in bits.
390
 
391
   We do not round to stack_boundary here.
392
 
393
   FUNCTION specifies the function to allocate in.  */
394
 
395
static rtx
396
assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
397
                      struct function *function)
398
{
399
  rtx x, addr;
400
  int bigend_correction = 0;
401
  unsigned int alignment;
402
  int frame_off, frame_alignment, frame_phase;
403
 
404
  if (align == 0)
405
    {
406
      tree type;
407
 
408
      if (mode == BLKmode)
409
        alignment = BIGGEST_ALIGNMENT;
410
      else
411
        alignment = GET_MODE_ALIGNMENT (mode);
412
 
413
      /* Allow the target to (possibly) increase the alignment of this
414
         stack slot.  */
415
      type = lang_hooks.types.type_for_mode (mode, 0);
416
      if (type)
417
        alignment = LOCAL_ALIGNMENT (type, alignment);
418
 
419
      alignment /= BITS_PER_UNIT;
420
    }
421
  else if (align == -1)
422
    {
423
      alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
424
      size = CEIL_ROUND (size, alignment);
425
    }
426
  else if (align == -2)
427
    alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
428
  else
429
    alignment = align / BITS_PER_UNIT;
430
 
431
  if (FRAME_GROWS_DOWNWARD)
432
    function->x_frame_offset -= size;
433
 
434
  /* Ignore alignment we can't do with expected alignment of the boundary.  */
435
  if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
436
    alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
437
 
438
  if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
439
    function->stack_alignment_needed = alignment * BITS_PER_UNIT;
440
 
441
  /* Calculate how many bytes the start of local variables is off from
442
     stack alignment.  */
443
  frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
444
  frame_off = STARTING_FRAME_OFFSET % frame_alignment;
445
  frame_phase = frame_off ? frame_alignment - frame_off : 0;
446
 
447
  /* Round the frame offset to the specified alignment.  The default is
448
     to always honor requests to align the stack but a port may choose to
449
     do its own stack alignment by defining STACK_ALIGNMENT_NEEDED.  */
450
  if (STACK_ALIGNMENT_NEEDED
451
      || mode != BLKmode
452
      || size != 0)
453
    {
454
      /*  We must be careful here, since FRAME_OFFSET might be negative and
455
          division with a negative dividend isn't as well defined as we might
456
          like.  So we instead assume that ALIGNMENT is a power of two and
457
          use logical operations which are unambiguous.  */
458
      if (FRAME_GROWS_DOWNWARD)
459
        function->x_frame_offset
460
          = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
461
                          (unsigned HOST_WIDE_INT) alignment)
462
             + frame_phase);
463
      else
464
        function->x_frame_offset
465
          = (CEIL_ROUND (function->x_frame_offset - frame_phase,
466
                         (unsigned HOST_WIDE_INT) alignment)
467
             + frame_phase);
468
    }
469
 
470
  /* On a big-endian machine, if we are allocating more space than we will use,
471
     use the least significant bytes of those that are allocated.  */
472
  if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
473
    bigend_correction = size - GET_MODE_SIZE (mode);
474
 
475
  /* If we have already instantiated virtual registers, return the actual
476
     address relative to the frame pointer.  */
477
  if (function == cfun && virtuals_instantiated)
478
    addr = plus_constant (frame_pointer_rtx,
479
                          trunc_int_for_mode
480
                          (frame_offset + bigend_correction
481
                           + STARTING_FRAME_OFFSET, Pmode));
482
  else
483
    addr = plus_constant (virtual_stack_vars_rtx,
484
                          trunc_int_for_mode
485
                          (function->x_frame_offset + bigend_correction,
486
                           Pmode));
487
 
488
  if (!FRAME_GROWS_DOWNWARD)
489
    function->x_frame_offset += size;
490
 
491
  x = gen_rtx_MEM (mode, addr);
492
  MEM_NOTRAP_P (x) = 1;
493
 
494
  function->x_stack_slot_list
495
    = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
496
 
497
  if (frame_offset_overflow (function->x_frame_offset, function->decl))
498
    function->x_frame_offset = 0;
499
 
500
  return x;
501
}
502
 
503
/* Wrapper around assign_stack_local_1;  assign a local stack slot for the
504
   current function.  */
505
 
506
rtx
507
assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
508
{
509
  return assign_stack_local_1 (mode, size, align, cfun);
510
}
511
 
512
 
513
/* Removes temporary slot TEMP from LIST.  */
514
 
515
static void
516
cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
517
{
518
  if (temp->next)
519
    temp->next->prev = temp->prev;
520
  if (temp->prev)
521
    temp->prev->next = temp->next;
522
  else
523
    *list = temp->next;
524
 
525
  temp->prev = temp->next = NULL;
526
}
527
 
528
/* Inserts temporary slot TEMP to LIST.  */
529
 
530
static void
531
insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
532
{
533
  temp->next = *list;
534
  if (*list)
535
    (*list)->prev = temp;
536
  temp->prev = NULL;
537
  *list = temp;
538
}
539
 
540
/* Returns the list of used temp slots at LEVEL.  */
541
 
542
static struct temp_slot **
543
temp_slots_at_level (int level)
544
{
545
  if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
546
    {
547
      size_t old_length = VEC_length (temp_slot_p, used_temp_slots);
548
      temp_slot_p *p;
549
 
550
      VEC_safe_grow (temp_slot_p, gc, used_temp_slots, level + 1);
551
      p = VEC_address (temp_slot_p, used_temp_slots);
552
      memset (&p[old_length], 0,
553
              sizeof (temp_slot_p) * (level + 1 - old_length));
554
    }
555
 
556
  return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
557
}
558
 
559
/* Returns the maximal temporary slot level.  */
560
 
561
static int
562
max_slot_level (void)
563
{
564
  if (!used_temp_slots)
565
    return -1;
566
 
567
  return VEC_length (temp_slot_p, used_temp_slots) - 1;
568
}
569
 
570
/* Moves temporary slot TEMP to LEVEL.  */
571
 
572
static void
573
move_slot_to_level (struct temp_slot *temp, int level)
574
{
575
  cut_slot_from_list (temp, temp_slots_at_level (temp->level));
576
  insert_slot_to_list (temp, temp_slots_at_level (level));
577
  temp->level = level;
578
}
579
 
580
/* Make temporary slot TEMP available.  */
581
 
582
static void
583
make_slot_available (struct temp_slot *temp)
584
{
585
  cut_slot_from_list (temp, temp_slots_at_level (temp->level));
586
  insert_slot_to_list (temp, &avail_temp_slots);
587
  temp->in_use = 0;
588
  temp->level = -1;
589
}
590
 
591
/* Allocate a temporary stack slot and record it for possible later
592
   reuse.
593
 
594
   MODE is the machine mode to be given to the returned rtx.
595
 
596
   SIZE is the size in units of the space required.  We do no rounding here
597
   since assign_stack_local will do any required rounding.
598
 
599
   KEEP is 1 if this slot is to be retained after a call to
600
   free_temp_slots.  Automatic variables for a block are allocated
601
   with this flag.  KEEP values of 2 or 3 were needed respectively
602
   for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
603
   or for SAVE_EXPRs, but they are now unused.
604
 
605
   TYPE is the type that will be used for the stack slot.  */
606
 
607
rtx
608
assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
609
                            int keep, tree type)
610
{
611
  unsigned int align;
612
  struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
613
  rtx slot;
614
 
615
  /* If SIZE is -1 it means that somebody tried to allocate a temporary
616
     of a variable size.  */
617
  gcc_assert (size != -1);
618
 
619
  /* These are now unused.  */
620
  gcc_assert (keep <= 1);
621
 
622
  if (mode == BLKmode)
623
    align = BIGGEST_ALIGNMENT;
624
  else
625
    align = GET_MODE_ALIGNMENT (mode);
626
 
627
  if (! type)
628
    type = lang_hooks.types.type_for_mode (mode, 0);
629
 
630
  if (type)
631
    align = LOCAL_ALIGNMENT (type, align);
632
 
633
  /* Try to find an available, already-allocated temporary of the proper
634
     mode which meets the size and alignment requirements.  Choose the
635
     smallest one with the closest alignment.
636
 
637
     If assign_stack_temp is called outside of the tree->rtl expansion,
638
     we cannot reuse the stack slots (that may still refer to
639
     VIRTUAL_STACK_VARS_REGNUM).  */
640
  if (!virtuals_instantiated)
641
    {
642
      for (p = avail_temp_slots; p; p = p->next)
643
        {
644
          if (p->align >= align && p->size >= size
645
              && GET_MODE (p->slot) == mode
646
              && objects_must_conflict_p (p->type, type)
647
              && (best_p == 0 || best_p->size > p->size
648
                  || (best_p->size == p->size && best_p->align > p->align)))
649
            {
650
              if (p->align == align && p->size == size)
651
                {
652
                  selected = p;
653
                  cut_slot_from_list (selected, &avail_temp_slots);
654
                  best_p = 0;
655
                  break;
656
                }
657
              best_p = p;
658
            }
659
        }
660
    }
661
 
662
  /* Make our best, if any, the one to use.  */
663
  if (best_p)
664
    {
665
      selected = best_p;
666
      cut_slot_from_list (selected, &avail_temp_slots);
667
 
668
      /* If there are enough aligned bytes left over, make them into a new
669
         temp_slot so that the extra bytes don't get wasted.  Do this only
670
         for BLKmode slots, so that we can be sure of the alignment.  */
671
      if (GET_MODE (best_p->slot) == BLKmode)
672
        {
673
          int alignment = best_p->align / BITS_PER_UNIT;
674
          HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
675
 
676
          if (best_p->size - rounded_size >= alignment)
677
            {
678
              p = ggc_alloc (sizeof (struct temp_slot));
679
              p->in_use = p->addr_taken = 0;
680
              p->size = best_p->size - rounded_size;
681
              p->base_offset = best_p->base_offset + rounded_size;
682
              p->full_size = best_p->full_size - rounded_size;
683
              p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
684
              p->align = best_p->align;
685
              p->address = 0;
686
              p->type = best_p->type;
687
              insert_slot_to_list (p, &avail_temp_slots);
688
 
689
              stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
690
                                                   stack_slot_list);
691
 
692
              best_p->size = rounded_size;
693
              best_p->full_size = rounded_size;
694
            }
695
        }
696
    }
697
 
698
  /* If we still didn't find one, make a new temporary.  */
699
  if (selected == 0)
700
    {
701
      HOST_WIDE_INT frame_offset_old = frame_offset;
702
 
703
      p = ggc_alloc (sizeof (struct temp_slot));
704
 
705
      /* We are passing an explicit alignment request to assign_stack_local.
706
         One side effect of that is assign_stack_local will not round SIZE
707
         to ensure the frame offset remains suitably aligned.
708
 
709
         So for requests which depended on the rounding of SIZE, we go ahead
710
         and round it now.  We also make sure ALIGNMENT is at least
711
         BIGGEST_ALIGNMENT.  */
712
      gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
713
      p->slot = assign_stack_local (mode,
714
                                    (mode == BLKmode
715
                                     ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
716
                                     : size),
717
                                    align);
718
 
719
      p->align = align;
720
 
721
      /* The following slot size computation is necessary because we don't
722
         know the actual size of the temporary slot until assign_stack_local
723
         has performed all the frame alignment and size rounding for the
724
         requested temporary.  Note that extra space added for alignment
725
         can be either above or below this stack slot depending on which
726
         way the frame grows.  We include the extra space if and only if it
727
         is above this slot.  */
728
      if (FRAME_GROWS_DOWNWARD)
729
        p->size = frame_offset_old - frame_offset;
730
      else
731
        p->size = size;
732
 
733
      /* Now define the fields used by combine_temp_slots.  */
734
      if (FRAME_GROWS_DOWNWARD)
735
        {
736
          p->base_offset = frame_offset;
737
          p->full_size = frame_offset_old - frame_offset;
738
        }
739
      else
740
        {
741
          p->base_offset = frame_offset_old;
742
          p->full_size = frame_offset - frame_offset_old;
743
        }
744
      p->address = 0;
745
 
746
      selected = p;
747
    }
748
 
749
  p = selected;
750
  p->in_use = 1;
751
  p->addr_taken = 0;
752
  p->type = type;
753
  p->level = temp_slot_level;
754
  p->keep = keep;
755
 
756
  pp = temp_slots_at_level (p->level);
757
  insert_slot_to_list (p, pp);
758
 
759
  /* Create a new MEM rtx to avoid clobbering MEM flags of old slots.  */
760
  slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
761
  stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
762
 
763
  /* If we know the alias set for the memory that will be used, use
764
     it.  If there's no TYPE, then we don't know anything about the
765
     alias set for the memory.  */
766
  set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
767
  set_mem_align (slot, align);
768
 
769
  /* If a type is specified, set the relevant flags.  */
770
  if (type != 0)
771
    {
772
      MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
773
      MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
774
    }
775
  MEM_NOTRAP_P (slot) = 1;
776
 
777
  return slot;
778
}
779
 
780
/* Allocate a temporary stack slot and record it for possible later
781
   reuse.  First three arguments are same as in preceding function.  */
782
 
783
rtx
784
assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
785
{
786
  return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
787
}
788
 
789
/* Assign a temporary.
790
   If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
791
   and so that should be used in error messages.  In either case, we
792
   allocate of the given type.
793
   KEEP is as for assign_stack_temp.
794
   MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
795
   it is 0 if a register is OK.
796
   DONT_PROMOTE is 1 if we should not promote values in register
797
   to wider modes.  */
798
 
799
rtx
800
assign_temp (tree type_or_decl, int keep, int memory_required,
801
             int dont_promote ATTRIBUTE_UNUSED)
802
{
803
  tree type, decl;
804
  enum machine_mode mode;
805
#ifdef PROMOTE_MODE
806
  int unsignedp;
807
#endif
808
 
809
  if (DECL_P (type_or_decl))
810
    decl = type_or_decl, type = TREE_TYPE (decl);
811
  else
812
    decl = NULL, type = type_or_decl;
813
 
814
  mode = TYPE_MODE (type);
815
#ifdef PROMOTE_MODE
816
  unsignedp = TYPE_UNSIGNED (type);
817
#endif
818
 
819
  if (mode == BLKmode || memory_required)
820
    {
821
      HOST_WIDE_INT size = int_size_in_bytes (type);
822
      rtx tmp;
823
 
824
      /* Zero sized arrays are GNU C extension.  Set size to 1 to avoid
825
         problems with allocating the stack space.  */
826
      if (size == 0)
827
        size = 1;
828
 
829
      /* Unfortunately, we don't yet know how to allocate variable-sized
830
         temporaries.  However, sometimes we can find a fixed upper limit on
831
         the size, so try that instead.  */
832
      else if (size == -1)
833
        size = max_int_size_in_bytes (type);
834
 
835
      /* The size of the temporary may be too large to fit into an integer.  */
836
      /* ??? Not sure this should happen except for user silliness, so limit
837
         this to things that aren't compiler-generated temporaries.  The
838
         rest of the time we'll die in assign_stack_temp_for_type.  */
839
      if (decl && size == -1
840
          && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
841
        {
842
          error ("size of variable %q+D is too large", decl);
843
          size = 1;
844
        }
845
 
846
      tmp = assign_stack_temp_for_type (mode, size, keep, type);
847
      return tmp;
848
    }
849
 
850
#ifdef PROMOTE_MODE
851
  if (! dont_promote)
852
    mode = promote_mode (type, mode, &unsignedp, 0);
853
#endif
854
 
855
  return gen_reg_rtx (mode);
856
}
857
 
858
/* Combine temporary stack slots which are adjacent on the stack.
859
 
860
   This allows for better use of already allocated stack space.  This is only
861
   done for BLKmode slots because we can be sure that we won't have alignment
862
   problems in this case.  */
863
 
864
static void
865
combine_temp_slots (void)
866
{
867
  struct temp_slot *p, *q, *next, *next_q;
868
  int num_slots;
869
 
870
  /* We can't combine slots, because the information about which slot
871
     is in which alias set will be lost.  */
872
  if (flag_strict_aliasing)
873
    return;
874
 
875
  /* If there are a lot of temp slots, don't do anything unless
876
     high levels of optimization.  */
877
  if (! flag_expensive_optimizations)
878
    for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
879
      if (num_slots > 100 || (num_slots > 10 && optimize == 0))
880
        return;
881
 
882
  for (p = avail_temp_slots; p; p = next)
883
    {
884
      int delete_p = 0;
885
 
886
      next = p->next;
887
 
888
      if (GET_MODE (p->slot) != BLKmode)
889
        continue;
890
 
891
      for (q = p->next; q; q = next_q)
892
        {
893
          int delete_q = 0;
894
 
895
          next_q = q->next;
896
 
897
          if (GET_MODE (q->slot) != BLKmode)
898
            continue;
899
 
900
          if (p->base_offset + p->full_size == q->base_offset)
901
            {
902
              /* Q comes after P; combine Q into P.  */
903
              p->size += q->size;
904
              p->full_size += q->full_size;
905
              delete_q = 1;
906
            }
907
          else if (q->base_offset + q->full_size == p->base_offset)
908
            {
909
              /* P comes after Q; combine P into Q.  */
910
              q->size += p->size;
911
              q->full_size += p->full_size;
912
              delete_p = 1;
913
              break;
914
            }
915
          if (delete_q)
916
            cut_slot_from_list (q, &avail_temp_slots);
917
        }
918
 
919
      /* Either delete P or advance past it.  */
920
      if (delete_p)
921
        cut_slot_from_list (p, &avail_temp_slots);
922
    }
923
}
924
 
925
/* Find the temp slot corresponding to the object at address X.  */
926
 
927
static struct temp_slot *
928
find_temp_slot_from_address (rtx x)
929
{
930
  struct temp_slot *p;
931
  rtx next;
932
  int i;
933
 
934
  for (i = max_slot_level (); i >= 0; i--)
935
    for (p = *temp_slots_at_level (i); p; p = p->next)
936
      {
937
        if (XEXP (p->slot, 0) == x
938
            || p->address == x
939
            || (GET_CODE (x) == PLUS
940
                && XEXP (x, 0) == virtual_stack_vars_rtx
941
                && GET_CODE (XEXP (x, 1)) == CONST_INT
942
                && INTVAL (XEXP (x, 1)) >= p->base_offset
943
                && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
944
          return p;
945
 
946
        else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
947
          for (next = p->address; next; next = XEXP (next, 1))
948
            if (XEXP (next, 0) == x)
949
              return p;
950
      }
951
 
952
  /* If we have a sum involving a register, see if it points to a temp
953
     slot.  */
954
  if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
955
      && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
956
    return p;
957
  else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
958
           && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
959
    return p;
960
 
961
  return 0;
962
}
963
 
964
/* Indicate that NEW is an alternate way of referring to the temp slot
965
   that previously was known by OLD.  */
966
 
967
void
968
update_temp_slot_address (rtx old, rtx new)
969
{
970
  struct temp_slot *p;
971
 
972
  if (rtx_equal_p (old, new))
973
    return;
974
 
975
  p = find_temp_slot_from_address (old);
976
 
977
  /* If we didn't find one, see if both OLD is a PLUS.  If so, and NEW
978
     is a register, see if one operand of the PLUS is a temporary
979
     location.  If so, NEW points into it.  Otherwise, if both OLD and
980
     NEW are a PLUS and if there is a register in common between them.
981
     If so, try a recursive call on those values.  */
982
  if (p == 0)
983
    {
984
      if (GET_CODE (old) != PLUS)
985
        return;
986
 
987
      if (REG_P (new))
988
        {
989
          update_temp_slot_address (XEXP (old, 0), new);
990
          update_temp_slot_address (XEXP (old, 1), new);
991
          return;
992
        }
993
      else if (GET_CODE (new) != PLUS)
994
        return;
995
 
996
      if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
997
        update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
998
      else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
999
        update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1000
      else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1001
        update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1002
      else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1003
        update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1004
 
1005
      return;
1006
    }
1007
 
1008
  /* Otherwise add an alias for the temp's address.  */
1009
  else if (p->address == 0)
1010
    p->address = new;
1011
  else
1012
    {
1013
      if (GET_CODE (p->address) != EXPR_LIST)
1014
        p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1015
 
1016
      p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1017
    }
1018
}
1019
 
1020
/* If X could be a reference to a temporary slot, mark the fact that its
1021
   address was taken.  */
1022
 
1023
void
1024
mark_temp_addr_taken (rtx x)
1025
{
1026
  struct temp_slot *p;
1027
 
1028
  if (x == 0)
1029
    return;
1030
 
1031
  /* If X is not in memory or is at a constant address, it cannot be in
1032
     a temporary slot.  */
1033
  if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1034
    return;
1035
 
1036
  p = find_temp_slot_from_address (XEXP (x, 0));
1037
  if (p != 0)
1038
    p->addr_taken = 1;
1039
}
1040
 
1041
/* If X could be a reference to a temporary slot, mark that slot as
1042
   belonging to the to one level higher than the current level.  If X
1043
   matched one of our slots, just mark that one.  Otherwise, we can't
1044
   easily predict which it is, so upgrade all of them.  Kept slots
1045
   need not be touched.
1046
 
1047
   This is called when an ({...}) construct occurs and a statement
1048
   returns a value in memory.  */
1049
 
1050
void
1051
preserve_temp_slots (rtx x)
1052
{
1053
  struct temp_slot *p = 0, *next;
1054
 
1055
  /* If there is no result, we still might have some objects whose address
1056
     were taken, so we need to make sure they stay around.  */
1057
  if (x == 0)
1058
    {
1059
      for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1060
        {
1061
          next = p->next;
1062
 
1063
          if (p->addr_taken)
1064
            move_slot_to_level (p, temp_slot_level - 1);
1065
        }
1066
 
1067
      return;
1068
    }
1069
 
1070
  /* If X is a register that is being used as a pointer, see if we have
1071
     a temporary slot we know it points to.  To be consistent with
1072
     the code below, we really should preserve all non-kept slots
1073
     if we can't find a match, but that seems to be much too costly.  */
1074
  if (REG_P (x) && REG_POINTER (x))
1075
    p = find_temp_slot_from_address (x);
1076
 
1077
  /* If X is not in memory or is at a constant address, it cannot be in
1078
     a temporary slot, but it can contain something whose address was
1079
     taken.  */
1080
  if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1081
    {
1082
      for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1083
        {
1084
          next = p->next;
1085
 
1086
          if (p->addr_taken)
1087
            move_slot_to_level (p, temp_slot_level - 1);
1088
        }
1089
 
1090
      return;
1091
    }
1092
 
1093
  /* First see if we can find a match.  */
1094
  if (p == 0)
1095
    p = find_temp_slot_from_address (XEXP (x, 0));
1096
 
1097
  if (p != 0)
1098
    {
1099
      /* Move everything at our level whose address was taken to our new
1100
         level in case we used its address.  */
1101
      struct temp_slot *q;
1102
 
1103
      if (p->level == temp_slot_level)
1104
        {
1105
          for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1106
            {
1107
              next = q->next;
1108
 
1109
              if (p != q && q->addr_taken)
1110
                move_slot_to_level (q, temp_slot_level - 1);
1111
            }
1112
 
1113
          move_slot_to_level (p, temp_slot_level - 1);
1114
          p->addr_taken = 0;
1115
        }
1116
      return;
1117
    }
1118
 
1119
  /* Otherwise, preserve all non-kept slots at this level.  */
1120
  for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1121
    {
1122
      next = p->next;
1123
 
1124
      if (!p->keep)
1125
        move_slot_to_level (p, temp_slot_level - 1);
1126
    }
1127
}
1128
 
1129
/* Free all temporaries used so far.  This is normally called at the
1130
   end of generating code for a statement.  */
1131
 
1132
void
1133
free_temp_slots (void)
1134
{
1135
  struct temp_slot *p, *next;
1136
 
1137
  for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1138
    {
1139
      next = p->next;
1140
 
1141
      if (!p->keep)
1142
        make_slot_available (p);
1143
    }
1144
 
1145
  combine_temp_slots ();
1146
}
1147
 
1148
/* Push deeper into the nesting level for stack temporaries.  */
1149
 
1150
void
1151
push_temp_slots (void)
1152
{
1153
  temp_slot_level++;
1154
}
1155
 
1156
/* Pop a temporary nesting level.  All slots in use in the current level
1157
   are freed.  */
1158
 
1159
void
1160
pop_temp_slots (void)
1161
{
1162
  struct temp_slot *p, *next;
1163
 
1164
  for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1165
    {
1166
      next = p->next;
1167
      make_slot_available (p);
1168
    }
1169
 
1170
  combine_temp_slots ();
1171
 
1172
  temp_slot_level--;
1173
}
1174
 
1175
/* Initialize temporary slots.  */
1176
 
1177
void
1178
init_temp_slots (void)
1179
{
1180
  /* We have not allocated any temporaries yet.  */
1181
  avail_temp_slots = 0;
1182
  used_temp_slots = 0;
1183
  temp_slot_level = 0;
1184
}
1185
 
1186
/* These routines are responsible for converting virtual register references
1187
   to the actual hard register references once RTL generation is complete.
1188
 
1189
   The following four variables are used for communication between the
1190
   routines.  They contain the offsets of the virtual registers from their
1191
   respective hard registers.  */
1192
 
1193
static int in_arg_offset;
1194
static int var_offset;
1195
static int dynamic_offset;
1196
static int out_arg_offset;
1197
static int cfa_offset;
1198
 
1199
/* In most machines, the stack pointer register is equivalent to the bottom
1200
   of the stack.  */
1201
 
1202
#ifndef STACK_POINTER_OFFSET
1203
#define STACK_POINTER_OFFSET    0
1204
#endif
1205
 
1206
/* If not defined, pick an appropriate default for the offset of dynamically
1207
   allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1208
   REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE.  */
1209
 
1210
#ifndef STACK_DYNAMIC_OFFSET
1211
 
1212
/* The bottom of the stack points to the actual arguments.  If
1213
   REG_PARM_STACK_SPACE is defined, this includes the space for the register
1214
   parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
1215
   stack space for register parameters is not pushed by the caller, but
1216
   rather part of the fixed stack areas and hence not included in
1217
   `current_function_outgoing_args_size'.  Nevertheless, we must allow
1218
   for it when allocating stack dynamic objects.  */
1219
 
1220
#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1221
#define STACK_DYNAMIC_OFFSET(FNDECL)    \
1222
((ACCUMULATE_OUTGOING_ARGS                                                    \
1223
  ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1224
 + (STACK_POINTER_OFFSET))                                                    \
1225
 
1226
#else
1227
#define STACK_DYNAMIC_OFFSET(FNDECL)    \
1228
((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0)          \
1229
 + (STACK_POINTER_OFFSET))
1230
#endif
1231
#endif
1232
 
1233
 
1234
/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1235
   is a virtual register, return the equivalent hard register and set the
1236
   offset indirectly through the pointer.  Otherwise, return 0.  */
1237
 
1238
static rtx
1239
instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1240
{
1241
  rtx new;
1242
  HOST_WIDE_INT offset;
1243
 
1244
  if (x == virtual_incoming_args_rtx)
1245
    new = arg_pointer_rtx, offset = in_arg_offset;
1246
  else if (x == virtual_stack_vars_rtx)
1247
    new = frame_pointer_rtx, offset = var_offset;
1248
  else if (x == virtual_stack_dynamic_rtx)
1249
    new = stack_pointer_rtx, offset = dynamic_offset;
1250
  else if (x == virtual_outgoing_args_rtx)
1251
    new = stack_pointer_rtx, offset = out_arg_offset;
1252
  else if (x == virtual_cfa_rtx)
1253
    {
1254
#ifdef FRAME_POINTER_CFA_OFFSET
1255
      new = frame_pointer_rtx;
1256
#else
1257
      new = arg_pointer_rtx;
1258
#endif
1259
      offset = cfa_offset;
1260
    }
1261
  else
1262
    return NULL_RTX;
1263
 
1264
  *poffset = offset;
1265
  return new;
1266
}
1267
 
1268
/* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1269
   Instantiate any virtual registers present inside of *LOC.  The expression
1270
   is simplified, as much as possible, but is not to be considered "valid"
1271
   in any sense implied by the target.  If any change is made, set CHANGED
1272
   to true.  */
1273
 
1274
static int
1275
instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1276
{
1277
  HOST_WIDE_INT offset;
1278
  bool *changed = (bool *) data;
1279
  rtx x, new;
1280
 
1281
  x = *loc;
1282
  if (x == 0)
1283
    return 0;
1284
 
1285
  switch (GET_CODE (x))
1286
    {
1287
    case REG:
1288
      new = instantiate_new_reg (x, &offset);
1289
      if (new)
1290
        {
1291
          *loc = plus_constant (new, offset);
1292
          if (changed)
1293
            *changed = true;
1294
        }
1295
      return -1;
1296
 
1297
    case PLUS:
1298
      new = instantiate_new_reg (XEXP (x, 0), &offset);
1299
      if (new)
1300
        {
1301
          new = plus_constant (new, offset);
1302
          *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1303
          if (changed)
1304
            *changed = true;
1305
          return -1;
1306
        }
1307
 
1308
      /* FIXME -- from old code */
1309
          /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1310
             we can commute the PLUS and SUBREG because pointers into the
1311
             frame are well-behaved.  */
1312
      break;
1313
 
1314
    default:
1315
      break;
1316
    }
1317
 
1318
  return 0;
1319
}
1320
 
1321
/* A subroutine of instantiate_virtual_regs_in_insn.  Return true if X
1322
   matches the predicate for insn CODE operand OPERAND.  */
1323
 
1324
static int
1325
safe_insn_predicate (int code, int operand, rtx x)
1326
{
1327
  const struct insn_operand_data *op_data;
1328
 
1329
  if (code < 0)
1330
    return true;
1331
 
1332
  op_data = &insn_data[code].operand[operand];
1333
  if (op_data->predicate == NULL)
1334
    return true;
1335
 
1336
  return op_data->predicate (x, op_data->mode);
1337
}
1338
 
1339
/* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
1340
   registers present inside of insn.  The result will be a valid insn.  */
1341
 
1342
static void
1343
instantiate_virtual_regs_in_insn (rtx insn)
1344
{
1345
  HOST_WIDE_INT offset;
1346
  int insn_code, i;
1347
  bool any_change = false;
1348
  rtx set, new, x, seq;
1349
 
1350
  /* There are some special cases to be handled first.  */
1351
  set = single_set (insn);
1352
  if (set)
1353
    {
1354
      /* We're allowed to assign to a virtual register.  This is interpreted
1355
         to mean that the underlying register gets assigned the inverse
1356
         transformation.  This is used, for example, in the handling of
1357
         non-local gotos.  */
1358
      new = instantiate_new_reg (SET_DEST (set), &offset);
1359
      if (new)
1360
        {
1361
          start_sequence ();
1362
 
1363
          for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1364
          x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1365
                                   GEN_INT (-offset));
1366
          x = force_operand (x, new);
1367
          if (x != new)
1368
            emit_move_insn (new, x);
1369
 
1370
          seq = get_insns ();
1371
          end_sequence ();
1372
 
1373
          emit_insn_before (seq, insn);
1374
          delete_insn (insn);
1375
          return;
1376
        }
1377
 
1378
      /* Handle a straight copy from a virtual register by generating a
1379
         new add insn.  The difference between this and falling through
1380
         to the generic case is avoiding a new pseudo and eliminating a
1381
         move insn in the initial rtl stream.  */
1382
      new = instantiate_new_reg (SET_SRC (set), &offset);
1383
      if (new && offset != 0
1384
          && REG_P (SET_DEST (set))
1385
          && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1386
        {
1387
          start_sequence ();
1388
 
1389
          x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1390
                                   new, GEN_INT (offset), SET_DEST (set),
1391
                                   1, OPTAB_LIB_WIDEN);
1392
          if (x != SET_DEST (set))
1393
            emit_move_insn (SET_DEST (set), x);
1394
 
1395
          seq = get_insns ();
1396
          end_sequence ();
1397
 
1398
          emit_insn_before (seq, insn);
1399
          delete_insn (insn);
1400
          return;
1401
        }
1402
 
1403
      extract_insn (insn);
1404
      insn_code = INSN_CODE (insn);
1405
 
1406
      /* Handle a plus involving a virtual register by determining if the
1407
         operands remain valid if they're modified in place.  */
1408
      if (GET_CODE (SET_SRC (set)) == PLUS
1409
          && recog_data.n_operands >= 3
1410
          && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1411
          && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1412
          && GET_CODE (recog_data.operand[2]) == CONST_INT
1413
          && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1414
        {
1415
          offset += INTVAL (recog_data.operand[2]);
1416
 
1417
          /* If the sum is zero, then replace with a plain move.  */
1418
          if (offset == 0
1419
              && REG_P (SET_DEST (set))
1420
              && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1421
            {
1422
              start_sequence ();
1423
              emit_move_insn (SET_DEST (set), new);
1424
              seq = get_insns ();
1425
              end_sequence ();
1426
 
1427
              emit_insn_before (seq, insn);
1428
              delete_insn (insn);
1429
              return;
1430
            }
1431
 
1432
          x = gen_int_mode (offset, recog_data.operand_mode[2]);
1433
 
1434
          /* Using validate_change and apply_change_group here leaves
1435
             recog_data in an invalid state.  Since we know exactly what
1436
             we want to check, do those two by hand.  */
1437
          if (safe_insn_predicate (insn_code, 1, new)
1438
              && safe_insn_predicate (insn_code, 2, x))
1439
            {
1440
              *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1441
              *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1442
              any_change = true;
1443
 
1444
              /* Fall through into the regular operand fixup loop in
1445
                 order to take care of operands other than 1 and 2.  */
1446
            }
1447
        }
1448
    }
1449
  else
1450
    {
1451
      extract_insn (insn);
1452
      insn_code = INSN_CODE (insn);
1453
    }
1454
 
1455
  /* In the general case, we expect virtual registers to appear only in
1456
     operands, and then only as either bare registers or inside memories.  */
1457
  for (i = 0; i < recog_data.n_operands; ++i)
1458
    {
1459
      x = recog_data.operand[i];
1460
      switch (GET_CODE (x))
1461
        {
1462
        case MEM:
1463
          {
1464
            rtx addr = XEXP (x, 0);
1465
            bool changed = false;
1466
 
1467
            for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1468
            if (!changed)
1469
              continue;
1470
 
1471
            start_sequence ();
1472
            x = replace_equiv_address (x, addr);
1473
            seq = get_insns ();
1474
            end_sequence ();
1475
            if (seq)
1476
              emit_insn_before (seq, insn);
1477
          }
1478
          break;
1479
 
1480
        case REG:
1481
          new = instantiate_new_reg (x, &offset);
1482
          if (new == NULL)
1483
            continue;
1484
          if (offset == 0)
1485
            x = new;
1486
          else
1487
            {
1488
              start_sequence ();
1489
 
1490
              /* Careful, special mode predicates may have stuff in
1491
                 insn_data[insn_code].operand[i].mode that isn't useful
1492
                 to us for computing a new value.  */
1493
              /* ??? Recognize address_operand and/or "p" constraints
1494
                 to see if (plus new offset) is a valid before we put
1495
                 this through expand_simple_binop.  */
1496
              x = expand_simple_binop (GET_MODE (x), PLUS, new,
1497
                                       GEN_INT (offset), NULL_RTX,
1498
                                       1, OPTAB_LIB_WIDEN);
1499
              seq = get_insns ();
1500
              end_sequence ();
1501
              emit_insn_before (seq, insn);
1502
            }
1503
          break;
1504
 
1505
        case SUBREG:
1506
          new = instantiate_new_reg (SUBREG_REG (x), &offset);
1507
          if (new == NULL)
1508
            continue;
1509
          if (offset != 0)
1510
            {
1511
              start_sequence ();
1512
              new = expand_simple_binop (GET_MODE (new), PLUS, new,
1513
                                         GEN_INT (offset), NULL_RTX,
1514
                                         1, OPTAB_LIB_WIDEN);
1515
              seq = get_insns ();
1516
              end_sequence ();
1517
              emit_insn_before (seq, insn);
1518
            }
1519
          x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1520
                                   GET_MODE (new), SUBREG_BYTE (x));
1521
          break;
1522
 
1523
        default:
1524
          continue;
1525
        }
1526
 
1527
      /* At this point, X contains the new value for the operand.
1528
         Validate the new value vs the insn predicate.  Note that
1529
         asm insns will have insn_code -1 here.  */
1530
      if (!safe_insn_predicate (insn_code, i, x))
1531
        {
1532
          start_sequence ();
1533
          x = force_reg (insn_data[insn_code].operand[i].mode, x);
1534
          seq = get_insns ();
1535
          end_sequence ();
1536
          if (seq)
1537
            emit_insn_before (seq, insn);
1538
        }
1539
 
1540
      *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1541
      any_change = true;
1542
    }
1543
 
1544
  if (any_change)
1545
    {
1546
      /* Propagate operand changes into the duplicates.  */
1547
      for (i = 0; i < recog_data.n_dups; ++i)
1548
        *recog_data.dup_loc[i]
1549
          = recog_data.operand[(unsigned)recog_data.dup_num[i]];
1550
 
1551
      /* Force re-recognition of the instruction for validation.  */
1552
      INSN_CODE (insn) = -1;
1553
    }
1554
 
1555
  if (asm_noperands (PATTERN (insn)) >= 0)
1556
    {
1557
      if (!check_asm_operands (PATTERN (insn)))
1558
        {
1559
          error_for_asm (insn, "impossible constraint in %<asm%>");
1560
          delete_insn (insn);
1561
        }
1562
    }
1563
  else
1564
    {
1565
      if (recog_memoized (insn) < 0)
1566
        fatal_insn_not_found (insn);
1567
    }
1568
}
1569
 
1570
/* Subroutine of instantiate_decls.  Given RTL representing a decl,
1571
   do any instantiation required.  */
1572
 
1573
static void
1574
instantiate_decl (rtx x)
1575
{
1576
  rtx addr;
1577
 
1578
  if (x == 0)
1579
    return;
1580
 
1581
  /* If this is a CONCAT, recurse for the pieces.  */
1582
  if (GET_CODE (x) == CONCAT)
1583
    {
1584
      instantiate_decl (XEXP (x, 0));
1585
      instantiate_decl (XEXP (x, 1));
1586
      return;
1587
    }
1588
 
1589
  /* If this is not a MEM, no need to do anything.  Similarly if the
1590
     address is a constant or a register that is not a virtual register.  */
1591
  if (!MEM_P (x))
1592
    return;
1593
 
1594
  addr = XEXP (x, 0);
1595
  if (CONSTANT_P (addr)
1596
      || (REG_P (addr)
1597
          && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1598
              || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1599
    return;
1600
 
1601
  for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1602
}
1603
 
1604
/* Helper for instantiate_decls called via walk_tree: Process all decls
1605
   in the given DECL_VALUE_EXPR.  */
1606
 
1607
static tree
1608
instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1609
{
1610
  tree t = *tp;
1611
  if (! EXPR_P (t))
1612
    {
1613
      *walk_subtrees = 0;
1614
      if (DECL_P (t) && DECL_RTL_SET_P (t))
1615
        instantiate_decl (DECL_RTL (t));
1616
    }
1617
  return NULL;
1618
}
1619
 
1620
/* Subroutine of instantiate_decls: Process all decls in the given
1621
   BLOCK node and all its subblocks.  */
1622
 
1623
static void
1624
instantiate_decls_1 (tree let)
1625
{
1626
  tree t;
1627
 
1628
  for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1629
    {
1630
      if (DECL_RTL_SET_P (t))
1631
        instantiate_decl (DECL_RTL (t));
1632
      if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1633
        {
1634
          tree v = DECL_VALUE_EXPR (t);
1635
          walk_tree (&v, instantiate_expr, NULL, NULL);
1636
        }
1637
    }
1638
 
1639
  /* Process all subblocks.  */
1640
  for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1641
    instantiate_decls_1 (t);
1642
}
1643
 
1644
/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1645
   all virtual registers in their DECL_RTL's.  */
1646
 
1647
static void
1648
instantiate_decls (tree fndecl)
1649
{
1650
  tree decl;
1651
 
1652
  /* Process all parameters of the function.  */
1653
  for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1654
    {
1655
      instantiate_decl (DECL_RTL (decl));
1656
      instantiate_decl (DECL_INCOMING_RTL (decl));
1657
      if (DECL_HAS_VALUE_EXPR_P (decl))
1658
        {
1659
          tree v = DECL_VALUE_EXPR (decl);
1660
          walk_tree (&v, instantiate_expr, NULL, NULL);
1661
        }
1662
    }
1663
 
1664
  /* Now process all variables defined in the function or its subblocks.  */
1665
  instantiate_decls_1 (DECL_INITIAL (fndecl));
1666
}
1667
 
1668
/* Pass through the INSNS of function FNDECL and convert virtual register
1669
   references to hard register references.  */
1670
 
1671
static unsigned int
1672
instantiate_virtual_regs (void)
1673
{
1674
  rtx insn;
1675
 
1676
  /* Compute the offsets to use for this function.  */
1677
  in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1678
  var_offset = STARTING_FRAME_OFFSET;
1679
  dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1680
  out_arg_offset = STACK_POINTER_OFFSET;
1681
#ifdef FRAME_POINTER_CFA_OFFSET
1682
  cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1683
#else
1684
  cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1685
#endif
1686
 
1687
  /* Initialize recognition, indicating that volatile is OK.  */
1688
  init_recog ();
1689
 
1690
  /* Scan through all the insns, instantiating every virtual register still
1691
     present.  */
1692
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1693
    if (INSN_P (insn))
1694
      {
1695
        /* These patterns in the instruction stream can never be recognized.
1696
           Fortunately, they shouldn't contain virtual registers either.  */
1697
        if (GET_CODE (PATTERN (insn)) == USE
1698
            || GET_CODE (PATTERN (insn)) == CLOBBER
1699
            || GET_CODE (PATTERN (insn)) == ADDR_VEC
1700
            || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1701
            || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1702
          continue;
1703
 
1704
        instantiate_virtual_regs_in_insn (insn);
1705
 
1706
        if (INSN_DELETED_P (insn))
1707
          continue;
1708
 
1709
        for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1710
 
1711
        /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE.  */
1712
        if (GET_CODE (insn) == CALL_INSN)
1713
          for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1714
                        instantiate_virtual_regs_in_rtx, NULL);
1715
      }
1716
 
1717
  /* Instantiate the virtual registers in the DECLs for debugging purposes.  */
1718
  instantiate_decls (current_function_decl);
1719
 
1720
  /* Indicate that, from now on, assign_stack_local should use
1721
     frame_pointer_rtx.  */
1722
  virtuals_instantiated = 1;
1723
  return 0;
1724
}
1725
 
1726
struct tree_opt_pass pass_instantiate_virtual_regs =
1727
{
1728
  "vregs",                              /* name */
1729
  NULL,                                 /* gate */
1730
  instantiate_virtual_regs,             /* execute */
1731
  NULL,                                 /* sub */
1732
  NULL,                                 /* next */
1733
  0,                                    /* static_pass_number */
1734
  0,                                    /* tv_id */
1735
  0,                                    /* properties_required */
1736
  0,                                    /* properties_provided */
1737
  0,                                    /* properties_destroyed */
1738
  0,                                    /* todo_flags_start */
1739
  TODO_dump_func,                       /* todo_flags_finish */
1740
 
1741
};
1742
 
1743
 
1744
/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1745
   This means a type for which function calls must pass an address to the
1746
   function or get an address back from the function.
1747
   EXP may be a type node or an expression (whose type is tested).  */
1748
 
1749
int
1750
aggregate_value_p (tree exp, tree fntype)
1751
{
1752
  int i, regno, nregs;
1753
  rtx reg;
1754
 
1755
  tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1756
 
1757
  /* DECL node associated with FNTYPE when relevant, which we might need to
1758
     check for by-invisible-reference returns, typically for CALL_EXPR input
1759
     EXPressions.  */
1760
  tree fndecl = NULL_TREE;
1761
 
1762
  if (fntype)
1763
    switch (TREE_CODE (fntype))
1764
      {
1765
      case CALL_EXPR:
1766
        fndecl = get_callee_fndecl (fntype);
1767
        fntype = fndecl ? TREE_TYPE (fndecl) : 0;
1768
        break;
1769
      case FUNCTION_DECL:
1770
        fndecl = fntype;
1771
        fntype = TREE_TYPE (fndecl);
1772
        break;
1773
      case FUNCTION_TYPE:
1774
      case METHOD_TYPE:
1775
        break;
1776
      case IDENTIFIER_NODE:
1777
        fntype = 0;
1778
        break;
1779
      default:
1780
        /* We don't expect other rtl types here.  */
1781
        gcc_unreachable ();
1782
      }
1783
 
1784
  if (TREE_CODE (type) == VOID_TYPE)
1785
    return 0;
1786
 
1787
  /* If the front end has decided that this needs to be passed by
1788
     reference, do so.  */
1789
  if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1790
      && DECL_BY_REFERENCE (exp))
1791
    return 1;
1792
 
1793
  /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1794
     called function RESULT_DECL, meaning the function returns in memory by
1795
     invisible reference.  This check lets front-ends not set TREE_ADDRESSABLE
1796
     on the function type, which used to be the way to request such a return
1797
     mechanism but might now be causing troubles at gimplification time if
1798
     temporaries with the function type need to be created.  */
1799
  if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1800
      && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1801
    return 1;
1802
 
1803
  if (targetm.calls.return_in_memory (type, fntype))
1804
    return 1;
1805
  /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1806
     and thus can't be returned in registers.  */
1807
  if (TREE_ADDRESSABLE (type))
1808
    return 1;
1809
  if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1810
    return 1;
1811
  /* Make sure we have suitable call-clobbered regs to return
1812
     the value in; if not, we must return it in memory.  */
1813
  reg = hard_function_value (type, 0, fntype, 0);
1814
 
1815
  /* If we have something other than a REG (e.g. a PARALLEL), then assume
1816
     it is OK.  */
1817
  if (!REG_P (reg))
1818
    return 0;
1819
 
1820
  regno = REGNO (reg);
1821
  nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1822
  for (i = 0; i < nregs; i++)
1823
    if (! call_used_regs[regno + i])
1824
      return 1;
1825
  return 0;
1826
}
1827
 
1828
/* Return true if we should assign DECL a pseudo register; false if it
1829
   should live on the local stack.  */
1830
 
1831
bool
1832
use_register_for_decl (tree decl)
1833
{
1834
  /* Honor volatile.  */
1835
  if (TREE_SIDE_EFFECTS (decl))
1836
    return false;
1837
 
1838
  /* Honor addressability.  */
1839
  if (TREE_ADDRESSABLE (decl))
1840
    return false;
1841
 
1842
  /* Only register-like things go in registers.  */
1843
  if (DECL_MODE (decl) == BLKmode)
1844
    return false;
1845
 
1846
  /* If -ffloat-store specified, don't put explicit float variables
1847
     into registers.  */
1848
  /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1849
     propagates values across these stores, and it probably shouldn't.  */
1850
  if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1851
    return false;
1852
 
1853
  /* If we're not interested in tracking debugging information for
1854
     this decl, then we can certainly put it in a register.  */
1855
  if (DECL_IGNORED_P (decl))
1856
    return true;
1857
 
1858
  return (optimize || DECL_REGISTER (decl));
1859
}
1860
 
1861
/* Return true if TYPE should be passed by invisible reference.  */
1862
 
1863
bool
1864
pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1865
                   tree type, bool named_arg)
1866
{
1867
  if (type)
1868
    {
1869
      /* If this type contains non-trivial constructors, then it is
1870
         forbidden for the middle-end to create any new copies.  */
1871
      if (TREE_ADDRESSABLE (type))
1872
        return true;
1873
 
1874
      /* GCC post 3.4 passes *all* variable sized types by reference.  */
1875
      if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1876
        return true;
1877
    }
1878
 
1879
  return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1880
}
1881
 
1882
/* Return true if TYPE, which is passed by reference, should be callee
1883
   copied instead of caller copied.  */
1884
 
1885
bool
1886
reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1887
                         tree type, bool named_arg)
1888
{
1889
  if (type && TREE_ADDRESSABLE (type))
1890
    return false;
1891
  return targetm.calls.callee_copies (ca, mode, type, named_arg);
1892
}
1893
 
1894
/* Structures to communicate between the subroutines of assign_parms.
1895
   The first holds data persistent across all parameters, the second
1896
   is cleared out for each parameter.  */
1897
 
1898
struct assign_parm_data_all
1899
{
1900
  CUMULATIVE_ARGS args_so_far;
1901
  struct args_size stack_args_size;
1902
  tree function_result_decl;
1903
  tree orig_fnargs;
1904
  rtx conversion_insns;
1905
  HOST_WIDE_INT pretend_args_size;
1906
  HOST_WIDE_INT extra_pretend_bytes;
1907
  int reg_parm_stack_space;
1908
};
1909
 
1910
struct assign_parm_data_one
1911
{
1912
  tree nominal_type;
1913
  tree passed_type;
1914
  rtx entry_parm;
1915
  rtx stack_parm;
1916
  enum machine_mode nominal_mode;
1917
  enum machine_mode passed_mode;
1918
  enum machine_mode promoted_mode;
1919
  struct locate_and_pad_arg_data locate;
1920
  int partial;
1921
  BOOL_BITFIELD named_arg : 1;
1922
  BOOL_BITFIELD passed_pointer : 1;
1923
  BOOL_BITFIELD on_stack : 1;
1924
  BOOL_BITFIELD loaded_in_reg : 1;
1925
};
1926
 
1927
/* A subroutine of assign_parms.  Initialize ALL.  */
1928
 
1929
static void
1930
assign_parms_initialize_all (struct assign_parm_data_all *all)
1931
{
1932
  tree fntype;
1933
 
1934
  memset (all, 0, sizeof (*all));
1935
 
1936
  fntype = TREE_TYPE (current_function_decl);
1937
 
1938
#ifdef INIT_CUMULATIVE_INCOMING_ARGS
1939
  INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1940
#else
1941
  INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1942
                        current_function_decl, -1);
1943
#endif
1944
 
1945
#ifdef REG_PARM_STACK_SPACE
1946
  all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1947
#endif
1948
}
1949
 
1950
/* If ARGS contains entries with complex types, split the entry into two
1951
   entries of the component type.  Return a new list of substitutions are
1952
   needed, else the old list.  */
1953
 
1954
static tree
1955
split_complex_args (tree args)
1956
{
1957
  tree p;
1958
 
1959
  /* Before allocating memory, check for the common case of no complex.  */
1960
  for (p = args; p; p = TREE_CHAIN (p))
1961
    {
1962
      tree type = TREE_TYPE (p);
1963
      if (TREE_CODE (type) == COMPLEX_TYPE
1964
          && targetm.calls.split_complex_arg (type))
1965
        goto found;
1966
    }
1967
  return args;
1968
 
1969
 found:
1970
  args = copy_list (args);
1971
 
1972
  for (p = args; p; p = TREE_CHAIN (p))
1973
    {
1974
      tree type = TREE_TYPE (p);
1975
      if (TREE_CODE (type) == COMPLEX_TYPE
1976
          && targetm.calls.split_complex_arg (type))
1977
        {
1978
          tree decl;
1979
          tree subtype = TREE_TYPE (type);
1980
          bool addressable = TREE_ADDRESSABLE (p);
1981
 
1982
          /* Rewrite the PARM_DECL's type with its component.  */
1983
          TREE_TYPE (p) = subtype;
1984
          DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1985
          DECL_MODE (p) = VOIDmode;
1986
          DECL_SIZE (p) = NULL;
1987
          DECL_SIZE_UNIT (p) = NULL;
1988
          /* If this arg must go in memory, put it in a pseudo here.
1989
             We can't allow it to go in memory as per normal parms,
1990
             because the usual place might not have the imag part
1991
             adjacent to the real part.  */
1992
          DECL_ARTIFICIAL (p) = addressable;
1993
          DECL_IGNORED_P (p) = addressable;
1994
          TREE_ADDRESSABLE (p) = 0;
1995
          layout_decl (p, 0);
1996
 
1997
          /* Build a second synthetic decl.  */
1998
          decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1999
          DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2000
          DECL_ARTIFICIAL (decl) = addressable;
2001
          DECL_IGNORED_P (decl) = addressable;
2002
          layout_decl (decl, 0);
2003
 
2004
          /* Splice it in; skip the new decl.  */
2005
          TREE_CHAIN (decl) = TREE_CHAIN (p);
2006
          TREE_CHAIN (p) = decl;
2007
          p = decl;
2008
        }
2009
    }
2010
 
2011
  return args;
2012
}
2013
 
2014
/* A subroutine of assign_parms.  Adjust the parameter list to incorporate
2015
   the hidden struct return argument, and (abi willing) complex args.
2016
   Return the new parameter list.  */
2017
 
2018
static tree
2019
assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2020
{
2021
  tree fndecl = current_function_decl;
2022
  tree fntype = TREE_TYPE (fndecl);
2023
  tree fnargs = DECL_ARGUMENTS (fndecl);
2024
 
2025
  /* If struct value address is treated as the first argument, make it so.  */
2026
  if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2027
      && ! current_function_returns_pcc_struct
2028
      && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2029
    {
2030
      tree type = build_pointer_type (TREE_TYPE (fntype));
2031
      tree decl;
2032
 
2033
      decl = build_decl (PARM_DECL, NULL_TREE, type);
2034
      DECL_ARG_TYPE (decl) = type;
2035
      DECL_ARTIFICIAL (decl) = 1;
2036
      DECL_IGNORED_P (decl) = 1;
2037
 
2038
      TREE_CHAIN (decl) = fnargs;
2039
      fnargs = decl;
2040
      all->function_result_decl = decl;
2041
    }
2042
 
2043
  all->orig_fnargs = fnargs;
2044
 
2045
  /* If the target wants to split complex arguments into scalars, do so.  */
2046
  if (targetm.calls.split_complex_arg)
2047
    fnargs = split_complex_args (fnargs);
2048
 
2049
  return fnargs;
2050
}
2051
 
2052
/* A subroutine of assign_parms.  Examine PARM and pull out type and mode
2053
   data for the parameter.  Incorporate ABI specifics such as pass-by-
2054
   reference and type promotion.  */
2055
 
2056
static void
2057
assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2058
                             struct assign_parm_data_one *data)
2059
{
2060
  tree nominal_type, passed_type;
2061
  enum machine_mode nominal_mode, passed_mode, promoted_mode;
2062
 
2063
  memset (data, 0, sizeof (*data));
2064
 
2065
  /* NAMED_ARG is a mis-nomer.  We really mean 'non-varadic'. */
2066
  if (!current_function_stdarg)
2067
    data->named_arg = 1;  /* No varadic parms.  */
2068
  else if (TREE_CHAIN (parm))
2069
    data->named_arg = 1;  /* Not the last non-varadic parm. */
2070
  else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2071
    data->named_arg = 1;  /* Only varadic ones are unnamed.  */
2072
  else
2073
    data->named_arg = 0;  /* Treat as varadic.  */
2074
 
2075
  nominal_type = TREE_TYPE (parm);
2076
  passed_type = DECL_ARG_TYPE (parm);
2077
 
2078
  /* Look out for errors propagating this far.  Also, if the parameter's
2079
     type is void then its value doesn't matter.  */
2080
  if (TREE_TYPE (parm) == error_mark_node
2081
      /* This can happen after weird syntax errors
2082
         or if an enum type is defined among the parms.  */
2083
      || TREE_CODE (parm) != PARM_DECL
2084
      || passed_type == NULL
2085
      || VOID_TYPE_P (nominal_type))
2086
    {
2087
      nominal_type = passed_type = void_type_node;
2088
      nominal_mode = passed_mode = promoted_mode = VOIDmode;
2089
      goto egress;
2090
    }
2091
 
2092
  /* Find mode of arg as it is passed, and mode of arg as it should be
2093
     during execution of this function.  */
2094
  passed_mode = TYPE_MODE (passed_type);
2095
  nominal_mode = TYPE_MODE (nominal_type);
2096
 
2097
  /* If the parm is to be passed as a transparent union, use the type of
2098
     the first field for the tests below.  We have already verified that
2099
     the modes are the same.  */
2100
  if (TREE_CODE (passed_type) == UNION_TYPE
2101
      && TYPE_TRANSPARENT_UNION (passed_type))
2102
    passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2103
 
2104
  /* See if this arg was passed by invisible reference.  */
2105
  if (pass_by_reference (&all->args_so_far, passed_mode,
2106
                         passed_type, data->named_arg))
2107
    {
2108
      passed_type = nominal_type = build_pointer_type (passed_type);
2109
      data->passed_pointer = true;
2110
      passed_mode = nominal_mode = Pmode;
2111
    }
2112
 
2113
  /* Find mode as it is passed by the ABI.  */
2114
  promoted_mode = passed_mode;
2115
  if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2116
    {
2117
      int unsignedp = TYPE_UNSIGNED (passed_type);
2118
      promoted_mode = promote_mode (passed_type, promoted_mode,
2119
                                    &unsignedp, 1);
2120
    }
2121
 
2122
 egress:
2123
  data->nominal_type = nominal_type;
2124
  data->passed_type = passed_type;
2125
  data->nominal_mode = nominal_mode;
2126
  data->passed_mode = passed_mode;
2127
  data->promoted_mode = promoted_mode;
2128
}
2129
 
2130
/* A subroutine of assign_parms.  Invoke setup_incoming_varargs.  */
2131
 
2132
static void
2133
assign_parms_setup_varargs (struct assign_parm_data_all *all,
2134
                            struct assign_parm_data_one *data, bool no_rtl)
2135
{
2136
  int varargs_pretend_bytes = 0;
2137
 
2138
  targetm.calls.setup_incoming_varargs (&all->args_so_far,
2139
                                        data->promoted_mode,
2140
                                        data->passed_type,
2141
                                        &varargs_pretend_bytes, no_rtl);
2142
 
2143
  /* If the back-end has requested extra stack space, record how much is
2144
     needed.  Do not change pretend_args_size otherwise since it may be
2145
     nonzero from an earlier partial argument.  */
2146
  if (varargs_pretend_bytes > 0)
2147
    all->pretend_args_size = varargs_pretend_bytes;
2148
}
2149
 
2150
/* A subroutine of assign_parms.  Set DATA->ENTRY_PARM corresponding to
2151
   the incoming location of the current parameter.  */
2152
 
2153
static void
2154
assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2155
                            struct assign_parm_data_one *data)
2156
{
2157
  HOST_WIDE_INT pretend_bytes = 0;
2158
  rtx entry_parm;
2159
  bool in_regs;
2160
 
2161
  if (data->promoted_mode == VOIDmode)
2162
    {
2163
      data->entry_parm = data->stack_parm = const0_rtx;
2164
      return;
2165
    }
2166
 
2167
#ifdef FUNCTION_INCOMING_ARG
2168
  entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2169
                                      data->passed_type, data->named_arg);
2170
#else
2171
  entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2172
                             data->passed_type, data->named_arg);
2173
#endif
2174
 
2175
  if (entry_parm == 0)
2176
    data->promoted_mode = data->passed_mode;
2177
 
2178
  /* Determine parm's home in the stack, in case it arrives in the stack
2179
     or we should pretend it did.  Compute the stack position and rtx where
2180
     the argument arrives and its size.
2181
 
2182
     There is one complexity here:  If this was a parameter that would
2183
     have been passed in registers, but wasn't only because it is
2184
     __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2185
     it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2186
     In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2187
     as it was the previous time.  */
2188
  in_regs = entry_parm != 0;
2189
#ifdef STACK_PARMS_IN_REG_PARM_AREA
2190
  in_regs = true;
2191
#endif
2192
  if (!in_regs && !data->named_arg)
2193
    {
2194
      if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2195
        {
2196
          rtx tem;
2197
#ifdef FUNCTION_INCOMING_ARG
2198
          tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2199
                                       data->passed_type, true);
2200
#else
2201
          tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2202
                              data->passed_type, true);
2203
#endif
2204
          in_regs = tem != NULL;
2205
        }
2206
    }
2207
 
2208
  /* If this parameter was passed both in registers and in the stack, use
2209
     the copy on the stack.  */
2210
  if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2211
                                        data->passed_type))
2212
    entry_parm = 0;
2213
 
2214
  if (entry_parm)
2215
    {
2216
      int partial;
2217
 
2218
      partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2219
                                                 data->promoted_mode,
2220
                                                 data->passed_type,
2221
                                                 data->named_arg);
2222
      data->partial = partial;
2223
 
2224
      /* The caller might already have allocated stack space for the
2225
         register parameters.  */
2226
      if (partial != 0 && all->reg_parm_stack_space == 0)
2227
        {
2228
          /* Part of this argument is passed in registers and part
2229
             is passed on the stack.  Ask the prologue code to extend
2230
             the stack part so that we can recreate the full value.
2231
 
2232
             PRETEND_BYTES is the size of the registers we need to store.
2233
             CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2234
             stack space that the prologue should allocate.
2235
 
2236
             Internally, gcc assumes that the argument pointer is aligned
2237
             to STACK_BOUNDARY bits.  This is used both for alignment
2238
             optimizations (see init_emit) and to locate arguments that are
2239
             aligned to more than PARM_BOUNDARY bits.  We must preserve this
2240
             invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2241
             a stack boundary.  */
2242
 
2243
          /* We assume at most one partial arg, and it must be the first
2244
             argument on the stack.  */
2245
          gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2246
 
2247
          pretend_bytes = partial;
2248
          all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2249
 
2250
          /* We want to align relative to the actual stack pointer, so
2251
             don't include this in the stack size until later.  */
2252
          all->extra_pretend_bytes = all->pretend_args_size;
2253
        }
2254
    }
2255
 
2256
  locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2257
                       entry_parm ? data->partial : 0, current_function_decl,
2258
                       &all->stack_args_size, &data->locate);
2259
 
2260
  /* Adjust offsets to include the pretend args.  */
2261
  pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2262
  data->locate.slot_offset.constant += pretend_bytes;
2263
  data->locate.offset.constant += pretend_bytes;
2264
 
2265
  data->entry_parm = entry_parm;
2266
}
2267
 
2268
/* A subroutine of assign_parms.  If there is actually space on the stack
2269
   for this parm, count it in stack_args_size and return true.  */
2270
 
2271
static bool
2272
assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2273
                           struct assign_parm_data_one *data)
2274
{
2275
  /* Trivially true if we've no incoming register.  */
2276
  if (data->entry_parm == NULL)
2277
    ;
2278
  /* Also true if we're partially in registers and partially not,
2279
     since we've arranged to drop the entire argument on the stack.  */
2280
  else if (data->partial != 0)
2281
    ;
2282
  /* Also true if the target says that it's passed in both registers
2283
     and on the stack.  */
2284
  else if (GET_CODE (data->entry_parm) == PARALLEL
2285
           && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2286
    ;
2287
  /* Also true if the target says that there's stack allocated for
2288
     all register parameters.  */
2289
  else if (all->reg_parm_stack_space > 0)
2290
    ;
2291
  /* Otherwise, no, this parameter has no ABI defined stack slot.  */
2292
  else
2293
    return false;
2294
 
2295
  all->stack_args_size.constant += data->locate.size.constant;
2296
  if (data->locate.size.var)
2297
    ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2298
 
2299
  return true;
2300
}
2301
 
2302
/* A subroutine of assign_parms.  Given that this parameter is allocated
2303
   stack space by the ABI, find it.  */
2304
 
2305
static void
2306
assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2307
{
2308
  rtx offset_rtx, stack_parm;
2309
  unsigned int align, boundary;
2310
 
2311
  /* If we're passing this arg using a reg, make its stack home the
2312
     aligned stack slot.  */
2313
  if (data->entry_parm)
2314
    offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2315
  else
2316
    offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2317
 
2318
  stack_parm = current_function_internal_arg_pointer;
2319
  if (offset_rtx != const0_rtx)
2320
    stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2321
  stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2322
 
2323
  set_mem_attributes (stack_parm, parm, 1);
2324
 
2325
  boundary = data->locate.boundary;
2326
  align = BITS_PER_UNIT;
2327
 
2328
  /* If we're padding upward, we know that the alignment of the slot
2329
     is FUNCTION_ARG_BOUNDARY.  If we're using slot_offset, we're
2330
     intentionally forcing upward padding.  Otherwise we have to come
2331
     up with a guess at the alignment based on OFFSET_RTX.  */
2332
  if (data->locate.where_pad != downward || data->entry_parm)
2333
    align = boundary;
2334
  else if (GET_CODE (offset_rtx) == CONST_INT)
2335
    {
2336
      align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2337
      align = align & -align;
2338
    }
2339
  set_mem_align (stack_parm, align);
2340
 
2341
  if (data->entry_parm)
2342
    set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2343
 
2344
  data->stack_parm = stack_parm;
2345
}
2346
 
2347
/* A subroutine of assign_parms.  Adjust DATA->ENTRY_RTL such that it's
2348
   always valid and contiguous.  */
2349
 
2350
static void
2351
assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2352
{
2353
  rtx entry_parm = data->entry_parm;
2354
  rtx stack_parm = data->stack_parm;
2355
 
2356
  /* If this parm was passed part in regs and part in memory, pretend it
2357
     arrived entirely in memory by pushing the register-part onto the stack.
2358
     In the special case of a DImode or DFmode that is split, we could put
2359
     it together in a pseudoreg directly, but for now that's not worth
2360
     bothering with.  */
2361
  if (data->partial != 0)
2362
    {
2363
      /* Handle calls that pass values in multiple non-contiguous
2364
         locations.  The Irix 6 ABI has examples of this.  */
2365
      if (GET_CODE (entry_parm) == PARALLEL)
2366
        emit_group_store (validize_mem (stack_parm), entry_parm,
2367
                          data->passed_type,
2368
                          int_size_in_bytes (data->passed_type));
2369
      else
2370
        {
2371
          gcc_assert (data->partial % UNITS_PER_WORD == 0);
2372
          move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2373
                               data->partial / UNITS_PER_WORD);
2374
        }
2375
 
2376
      entry_parm = stack_parm;
2377
    }
2378
 
2379
  /* If we didn't decide this parm came in a register, by default it came
2380
     on the stack.  */
2381
  else if (entry_parm == NULL)
2382
    entry_parm = stack_parm;
2383
 
2384
  /* When an argument is passed in multiple locations, we can't make use
2385
     of this information, but we can save some copying if the whole argument
2386
     is passed in a single register.  */
2387
  else if (GET_CODE (entry_parm) == PARALLEL
2388
           && data->nominal_mode != BLKmode
2389
           && data->passed_mode != BLKmode)
2390
    {
2391
      size_t i, len = XVECLEN (entry_parm, 0);
2392
 
2393
      for (i = 0; i < len; i++)
2394
        if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2395
            && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2396
            && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2397
                == data->passed_mode)
2398
            && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2399
          {
2400
            entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2401
            break;
2402
          }
2403
    }
2404
 
2405
  data->entry_parm = entry_parm;
2406
}
2407
 
2408
/* A subroutine of assign_parms.  Adjust DATA->STACK_RTL such that it's
2409
   always valid and properly aligned.  */
2410
 
2411
static void
2412
assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2413
{
2414
  rtx stack_parm = data->stack_parm;
2415
 
2416
  /* If we can't trust the parm stack slot to be aligned enough for its
2417
     ultimate type, don't use that slot after entry.  We'll make another
2418
     stack slot, if we need one.  */
2419
  if (stack_parm
2420
      && ((STRICT_ALIGNMENT
2421
           && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2422
          || (data->nominal_type
2423
              && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2424
              && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2425
    stack_parm = NULL;
2426
 
2427
  /* If parm was passed in memory, and we need to convert it on entry,
2428
     don't store it back in that same slot.  */
2429
  else if (data->entry_parm == stack_parm
2430
           && data->nominal_mode != BLKmode
2431
           && data->nominal_mode != data->passed_mode)
2432
    stack_parm = NULL;
2433
 
2434
  /* If stack protection is in effect for this function, don't leave any
2435
     pointers in their passed stack slots.  */
2436
  else if (cfun->stack_protect_guard
2437
           && (flag_stack_protect == 2
2438
               || data->passed_pointer
2439
               || POINTER_TYPE_P (data->nominal_type)))
2440
    stack_parm = NULL;
2441
 
2442
  data->stack_parm = stack_parm;
2443
}
2444
 
2445
/* A subroutine of assign_parms.  Return true if the current parameter
2446
   should be stored as a BLKmode in the current frame.  */
2447
 
2448
static bool
2449
assign_parm_setup_block_p (struct assign_parm_data_one *data)
2450
{
2451
  if (data->nominal_mode == BLKmode)
2452
    return true;
2453
  if (GET_CODE (data->entry_parm) == PARALLEL)
2454
    return true;
2455
 
2456
#ifdef BLOCK_REG_PADDING
2457
  /* Only assign_parm_setup_block knows how to deal with register arguments
2458
     that are padded at the least significant end.  */
2459
  if (REG_P (data->entry_parm)
2460
      && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2461
      && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2462
          == (BYTES_BIG_ENDIAN ? upward : downward)))
2463
    return true;
2464
#endif
2465
 
2466
  return false;
2467
}
2468
 
2469
/* A subroutine of assign_parms.  Arrange for the parameter to be
2470
   present and valid in DATA->STACK_RTL.  */
2471
 
2472
static void
2473
assign_parm_setup_block (struct assign_parm_data_all *all,
2474
                         tree parm, struct assign_parm_data_one *data)
2475
{
2476
  rtx entry_parm = data->entry_parm;
2477
  rtx stack_parm = data->stack_parm;
2478
  HOST_WIDE_INT size;
2479
  HOST_WIDE_INT size_stored;
2480
  rtx orig_entry_parm = entry_parm;
2481
 
2482
  if (GET_CODE (entry_parm) == PARALLEL)
2483
    entry_parm = emit_group_move_into_temps (entry_parm);
2484
 
2485
  /* If we've a non-block object that's nevertheless passed in parts,
2486
     reconstitute it in register operations rather than on the stack.  */
2487
  if (GET_CODE (entry_parm) == PARALLEL
2488
      && data->nominal_mode != BLKmode)
2489
    {
2490
      rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2491
 
2492
      if ((XVECLEN (entry_parm, 0) > 1
2493
           || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2494
          && use_register_for_decl (parm))
2495
        {
2496
          rtx parmreg = gen_reg_rtx (data->nominal_mode);
2497
 
2498
          push_to_sequence (all->conversion_insns);
2499
 
2500
          /* For values returned in multiple registers, handle possible
2501
             incompatible calls to emit_group_store.
2502
 
2503
             For example, the following would be invalid, and would have to
2504
             be fixed by the conditional below:
2505
 
2506
             emit_group_store ((reg:SF), (parallel:DF))
2507
             emit_group_store ((reg:SI), (parallel:DI))
2508
 
2509
             An example of this are doubles in e500 v2:
2510
             (parallel:DF (expr_list (reg:SI) (const_int 0))
2511
             (expr_list (reg:SI) (const_int 4))).  */
2512
          if (data->nominal_mode != data->passed_mode)
2513
            {
2514
              rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2515
              emit_group_store (t, entry_parm, NULL_TREE,
2516
                                GET_MODE_SIZE (GET_MODE (entry_parm)));
2517
              convert_move (parmreg, t, 0);
2518
            }
2519
          else
2520
            emit_group_store (parmreg, entry_parm, data->nominal_type,
2521
                              int_size_in_bytes (data->nominal_type));
2522
 
2523
          all->conversion_insns = get_insns ();
2524
          end_sequence ();
2525
 
2526
          SET_DECL_RTL (parm, parmreg);
2527
          return;
2528
        }
2529
    }
2530
 
2531
  size = int_size_in_bytes (data->passed_type);
2532
  size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2533
  if (stack_parm == 0)
2534
    {
2535
      DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2536
      stack_parm = assign_stack_local (BLKmode, size_stored,
2537
                                       DECL_ALIGN (parm));
2538
      if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2539
        PUT_MODE (stack_parm, GET_MODE (entry_parm));
2540
      set_mem_attributes (stack_parm, parm, 1);
2541
    }
2542
 
2543
  /* If a BLKmode arrives in registers, copy it to a stack slot.  Handle
2544
     calls that pass values in multiple non-contiguous locations.  */
2545
  if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2546
    {
2547
      rtx mem;
2548
 
2549
      /* Note that we will be storing an integral number of words.
2550
         So we have to be careful to ensure that we allocate an
2551
         integral number of words.  We do this above when we call
2552
         assign_stack_local if space was not allocated in the argument
2553
         list.  If it was, this will not work if PARM_BOUNDARY is not
2554
         a multiple of BITS_PER_WORD.  It isn't clear how to fix this
2555
         if it becomes a problem.  Exception is when BLKmode arrives
2556
         with arguments not conforming to word_mode.  */
2557
 
2558
      if (data->stack_parm == 0)
2559
        ;
2560
      else if (GET_CODE (entry_parm) == PARALLEL)
2561
        ;
2562
      else
2563
        gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2564
 
2565
      mem = validize_mem (stack_parm);
2566
 
2567
      /* Handle values in multiple non-contiguous locations.  */
2568
      if (GET_CODE (entry_parm) == PARALLEL)
2569
        {
2570
          push_to_sequence (all->conversion_insns);
2571
          emit_group_store (mem, entry_parm, data->passed_type, size);
2572
          all->conversion_insns = get_insns ();
2573
          end_sequence ();
2574
        }
2575
 
2576
      else if (size == 0)
2577
        ;
2578
 
2579
      /* If SIZE is that of a mode no bigger than a word, just use
2580
         that mode's store operation.  */
2581
      else if (size <= UNITS_PER_WORD)
2582
        {
2583
          enum machine_mode mode
2584
            = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2585
 
2586
          if (mode != BLKmode
2587
#ifdef BLOCK_REG_PADDING
2588
              && (size == UNITS_PER_WORD
2589
                  || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2590
                      != (BYTES_BIG_ENDIAN ? upward : downward)))
2591
#endif
2592
              )
2593
            {
2594
              rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2595
              emit_move_insn (change_address (mem, mode, 0), reg);
2596
            }
2597
 
2598
          /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2599
             machine must be aligned to the left before storing
2600
             to memory.  Note that the previous test doesn't
2601
             handle all cases (e.g. SIZE == 3).  */
2602
          else if (size != UNITS_PER_WORD
2603
#ifdef BLOCK_REG_PADDING
2604
                   && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2605
                       == downward)
2606
#else
2607
                   && BYTES_BIG_ENDIAN
2608
#endif
2609
                   )
2610
            {
2611
              rtx tem, x;
2612
              int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2613
              rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2614
 
2615
              x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2616
                                build_int_cst (NULL_TREE, by),
2617
                                NULL_RTX, 1);
2618
              tem = change_address (mem, word_mode, 0);
2619
              emit_move_insn (tem, x);
2620
            }
2621
          else
2622
            move_block_from_reg (REGNO (entry_parm), mem,
2623
                                 size_stored / UNITS_PER_WORD);
2624
        }
2625
      else
2626
        move_block_from_reg (REGNO (entry_parm), mem,
2627
                             size_stored / UNITS_PER_WORD);
2628
    }
2629
  else if (data->stack_parm == 0)
2630
    {
2631
      push_to_sequence (all->conversion_insns);
2632
      emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2633
                       BLOCK_OP_NORMAL);
2634
      all->conversion_insns = get_insns ();
2635
      end_sequence ();
2636
    }
2637
 
2638
  data->stack_parm = stack_parm;
2639
  SET_DECL_RTL (parm, stack_parm);
2640
}
2641
 
2642
/* A subroutine of assign_parms.  Allocate a pseudo to hold the current
2643
   parameter.  Get it there.  Perform all ABI specified conversions.  */
2644
 
2645
static void
2646
assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2647
                       struct assign_parm_data_one *data)
2648
{
2649
  rtx parmreg;
2650
  enum machine_mode promoted_nominal_mode;
2651
  int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2652
  bool did_conversion = false;
2653
 
2654
  /* Store the parm in a pseudoregister during the function, but we may
2655
     need to do it in a wider mode.  */
2656
 
2657
  /* This is not really promoting for a call.  However we need to be
2658
     consistent with assign_parm_find_data_types and expand_expr_real_1.  */
2659
  promoted_nominal_mode
2660
    = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2661
 
2662
  parmreg = gen_reg_rtx (promoted_nominal_mode);
2663
 
2664
  if (!DECL_ARTIFICIAL (parm))
2665
    mark_user_reg (parmreg);
2666
 
2667
  /* If this was an item that we received a pointer to,
2668
     set DECL_RTL appropriately.  */
2669
  if (data->passed_pointer)
2670
    {
2671
      rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2672
      set_mem_attributes (x, parm, 1);
2673
      SET_DECL_RTL (parm, x);
2674
    }
2675
  else
2676
    SET_DECL_RTL (parm, parmreg);
2677
 
2678
  /* Copy the value into the register.  */
2679
  if (data->nominal_mode != data->passed_mode
2680
      || promoted_nominal_mode != data->promoted_mode)
2681
    {
2682
      int save_tree_used;
2683
 
2684
      /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2685
         mode, by the caller.  We now have to convert it to
2686
         NOMINAL_MODE, if different.  However, PARMREG may be in
2687
         a different mode than NOMINAL_MODE if it is being stored
2688
         promoted.
2689
 
2690
         If ENTRY_PARM is a hard register, it might be in a register
2691
         not valid for operating in its mode (e.g., an odd-numbered
2692
         register for a DFmode).  In that case, moves are the only
2693
         thing valid, so we can't do a convert from there.  This
2694
         occurs when the calling sequence allow such misaligned
2695
         usages.
2696
 
2697
         In addition, the conversion may involve a call, which could
2698
         clobber parameters which haven't been copied to pseudo
2699
         registers yet.  Therefore, we must first copy the parm to
2700
         a pseudo reg here, and save the conversion until after all
2701
         parameters have been moved.  */
2702
 
2703
      rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2704
 
2705
      emit_move_insn (tempreg, validize_mem (data->entry_parm));
2706
 
2707
      push_to_sequence (all->conversion_insns);
2708
      tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2709
 
2710
      if (GET_CODE (tempreg) == SUBREG
2711
          && GET_MODE (tempreg) == data->nominal_mode
2712
          && REG_P (SUBREG_REG (tempreg))
2713
          && data->nominal_mode == data->passed_mode
2714
          && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2715
          && GET_MODE_SIZE (GET_MODE (tempreg))
2716
             < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2717
        {
2718
          /* The argument is already sign/zero extended, so note it
2719
             into the subreg.  */
2720
          SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2721
          SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2722
        }
2723
 
2724
      /* TREE_USED gets set erroneously during expand_assignment.  */
2725
      save_tree_used = TREE_USED (parm);
2726
      expand_assignment (parm, make_tree (data->nominal_type, tempreg));
2727
      TREE_USED (parm) = save_tree_used;
2728
      all->conversion_insns = get_insns ();
2729
      end_sequence ();
2730
 
2731
      did_conversion = true;
2732
    }
2733
  else
2734
    emit_move_insn (parmreg, validize_mem (data->entry_parm));
2735
 
2736
  /* If we were passed a pointer but the actual value can safely live
2737
     in a register, put it in one.  */
2738
  if (data->passed_pointer
2739
      && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2740
      /* If by-reference argument was promoted, demote it.  */
2741
      && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2742
          || use_register_for_decl (parm)))
2743
    {
2744
      /* We can't use nominal_mode, because it will have been set to
2745
         Pmode above.  We must use the actual mode of the parm.  */
2746
      parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2747
      mark_user_reg (parmreg);
2748
 
2749
      if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2750
        {
2751
          rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2752
          int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2753
 
2754
          push_to_sequence (all->conversion_insns);
2755
          emit_move_insn (tempreg, DECL_RTL (parm));
2756
          tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2757
          emit_move_insn (parmreg, tempreg);
2758
          all->conversion_insns = get_insns ();
2759
          end_sequence ();
2760
 
2761
          did_conversion = true;
2762
        }
2763
      else
2764
        emit_move_insn (parmreg, DECL_RTL (parm));
2765
 
2766
      SET_DECL_RTL (parm, parmreg);
2767
 
2768
      /* STACK_PARM is the pointer, not the parm, and PARMREG is
2769
         now the parm.  */
2770
      data->stack_parm = NULL;
2771
    }
2772
 
2773
  /* Mark the register as eliminable if we did no conversion and it was
2774
     copied from memory at a fixed offset, and the arg pointer was not
2775
     copied to a pseudo-reg.  If the arg pointer is a pseudo reg or the
2776
     offset formed an invalid address, such memory-equivalences as we
2777
     make here would screw up life analysis for it.  */
2778
  if (data->nominal_mode == data->passed_mode
2779
      && !did_conversion
2780
      && data->stack_parm != 0
2781
      && MEM_P (data->stack_parm)
2782
      && data->locate.offset.var == 0
2783
      && reg_mentioned_p (virtual_incoming_args_rtx,
2784
                          XEXP (data->stack_parm, 0)))
2785
    {
2786
      rtx linsn = get_last_insn ();
2787
      rtx sinsn, set;
2788
 
2789
      /* Mark complex types separately.  */
2790
      if (GET_CODE (parmreg) == CONCAT)
2791
        {
2792
          enum machine_mode submode
2793
            = GET_MODE_INNER (GET_MODE (parmreg));
2794
          int regnor = REGNO (XEXP (parmreg, 0));
2795
          int regnoi = REGNO (XEXP (parmreg, 1));
2796
          rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2797
          rtx stacki = adjust_address_nv (data->stack_parm, submode,
2798
                                          GET_MODE_SIZE (submode));
2799
 
2800
          /* Scan backwards for the set of the real and
2801
             imaginary parts.  */
2802
          for (sinsn = linsn; sinsn != 0;
2803
               sinsn = prev_nonnote_insn (sinsn))
2804
            {
2805
              set = single_set (sinsn);
2806
              if (set == 0)
2807
                continue;
2808
 
2809
              if (SET_DEST (set) == regno_reg_rtx [regnoi])
2810
                REG_NOTES (sinsn)
2811
                  = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2812
                                       REG_NOTES (sinsn));
2813
              else if (SET_DEST (set) == regno_reg_rtx [regnor])
2814
                REG_NOTES (sinsn)
2815
                  = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2816
                                       REG_NOTES (sinsn));
2817
            }
2818
        }
2819
      else if ((set = single_set (linsn)) != 0
2820
               && SET_DEST (set) == parmreg)
2821
        REG_NOTES (linsn)
2822
          = gen_rtx_EXPR_LIST (REG_EQUIV,
2823
                               data->stack_parm, REG_NOTES (linsn));
2824
    }
2825
 
2826
  /* For pointer data type, suggest pointer register.  */
2827
  if (POINTER_TYPE_P (TREE_TYPE (parm)))
2828
    mark_reg_pointer (parmreg,
2829
                      TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2830
}
2831
 
2832
/* A subroutine of assign_parms.  Allocate stack space to hold the current
2833
   parameter.  Get it there.  Perform all ABI specified conversions.  */
2834
 
2835
static void
2836
assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2837
                         struct assign_parm_data_one *data)
2838
{
2839
  /* Value must be stored in the stack slot STACK_PARM during function
2840
     execution.  */
2841
  bool to_conversion = false;
2842
 
2843
  if (data->promoted_mode != data->nominal_mode)
2844
    {
2845
      /* Conversion is required.  */
2846
      rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2847
 
2848
      emit_move_insn (tempreg, validize_mem (data->entry_parm));
2849
 
2850
      push_to_sequence (all->conversion_insns);
2851
      to_conversion = true;
2852
 
2853
      data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2854
                                          TYPE_UNSIGNED (TREE_TYPE (parm)));
2855
 
2856
      if (data->stack_parm)
2857
        /* ??? This may need a big-endian conversion on sparc64.  */
2858
        data->stack_parm
2859
          = adjust_address (data->stack_parm, data->nominal_mode, 0);
2860
    }
2861
 
2862
  if (data->entry_parm != data->stack_parm)
2863
    {
2864
      rtx src, dest;
2865
 
2866
      if (data->stack_parm == 0)
2867
        {
2868
          data->stack_parm
2869
            = assign_stack_local (GET_MODE (data->entry_parm),
2870
                                  GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2871
                                  TYPE_ALIGN (data->passed_type));
2872
          set_mem_attributes (data->stack_parm, parm, 1);
2873
        }
2874
 
2875
      dest = validize_mem (data->stack_parm);
2876
      src = validize_mem (data->entry_parm);
2877
 
2878
      if (MEM_P (src))
2879
        {
2880
          /* Use a block move to handle potentially misaligned entry_parm.  */
2881
          if (!to_conversion)
2882
            push_to_sequence (all->conversion_insns);
2883
          to_conversion = true;
2884
 
2885
          emit_block_move (dest, src,
2886
                           GEN_INT (int_size_in_bytes (data->passed_type)),
2887
                           BLOCK_OP_NORMAL);
2888
        }
2889
      else
2890
        emit_move_insn (dest, src);
2891
    }
2892
 
2893
  if (to_conversion)
2894
    {
2895
      all->conversion_insns = get_insns ();
2896
      end_sequence ();
2897
    }
2898
 
2899
  SET_DECL_RTL (parm, data->stack_parm);
2900
}
2901
 
2902
/* A subroutine of assign_parms.  If the ABI splits complex arguments, then
2903
   undo the frobbing that we did in assign_parms_augmented_arg_list.  */
2904
 
2905
static void
2906
assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2907
{
2908
  tree parm;
2909
  tree orig_fnargs = all->orig_fnargs;
2910
 
2911
  for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2912
    {
2913
      if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2914
          && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2915
        {
2916
          rtx tmp, real, imag;
2917
          enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2918
 
2919
          real = DECL_RTL (fnargs);
2920
          imag = DECL_RTL (TREE_CHAIN (fnargs));
2921
          if (inner != GET_MODE (real))
2922
            {
2923
              real = gen_lowpart_SUBREG (inner, real);
2924
              imag = gen_lowpart_SUBREG (inner, imag);
2925
            }
2926
 
2927
          if (TREE_ADDRESSABLE (parm))
2928
            {
2929
              rtx rmem, imem;
2930
              HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2931
 
2932
              /* split_complex_arg put the real and imag parts in
2933
                 pseudos.  Move them to memory.  */
2934
              tmp = assign_stack_local (DECL_MODE (parm), size,
2935
                                        TYPE_ALIGN (TREE_TYPE (parm)));
2936
              set_mem_attributes (tmp, parm, 1);
2937
              rmem = adjust_address_nv (tmp, inner, 0);
2938
              imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2939
              push_to_sequence (all->conversion_insns);
2940
              emit_move_insn (rmem, real);
2941
              emit_move_insn (imem, imag);
2942
              all->conversion_insns = get_insns ();
2943
              end_sequence ();
2944
            }
2945
          else
2946
            tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2947
          SET_DECL_RTL (parm, tmp);
2948
 
2949
          real = DECL_INCOMING_RTL (fnargs);
2950
          imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2951
          if (inner != GET_MODE (real))
2952
            {
2953
              real = gen_lowpart_SUBREG (inner, real);
2954
              imag = gen_lowpart_SUBREG (inner, imag);
2955
            }
2956
          tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2957
          set_decl_incoming_rtl (parm, tmp);
2958
          fnargs = TREE_CHAIN (fnargs);
2959
        }
2960
      else
2961
        {
2962
          SET_DECL_RTL (parm, DECL_RTL (fnargs));
2963
          set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
2964
 
2965
          /* Set MEM_EXPR to the original decl, i.e. to PARM,
2966
             instead of the copy of decl, i.e. FNARGS.  */
2967
          if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2968
            set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2969
        }
2970
 
2971
      fnargs = TREE_CHAIN (fnargs);
2972
    }
2973
}
2974
 
2975
/* Assign RTL expressions to the function's parameters.  This may involve
2976
   copying them into registers and using those registers as the DECL_RTL.  */
2977
 
2978
static void
2979
assign_parms (tree fndecl)
2980
{
2981
  struct assign_parm_data_all all;
2982
  tree fnargs, parm;
2983
 
2984
  current_function_internal_arg_pointer
2985
    = targetm.calls.internal_arg_pointer ();
2986
 
2987
  assign_parms_initialize_all (&all);
2988
  fnargs = assign_parms_augmented_arg_list (&all);
2989
 
2990
  for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2991
    {
2992
      struct assign_parm_data_one data;
2993
 
2994
      /* Extract the type of PARM; adjust it according to ABI.  */
2995
      assign_parm_find_data_types (&all, parm, &data);
2996
 
2997
      /* Early out for errors and void parameters.  */
2998
      if (data.passed_mode == VOIDmode)
2999
        {
3000
          SET_DECL_RTL (parm, const0_rtx);
3001
          DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3002
          continue;
3003
        }
3004
 
3005
      if (current_function_stdarg && !TREE_CHAIN (parm))
3006
        assign_parms_setup_varargs (&all, &data, false);
3007
 
3008
      /* Find out where the parameter arrives in this function.  */
3009
      assign_parm_find_entry_rtl (&all, &data);
3010
 
3011
      /* Find out where stack space for this parameter might be.  */
3012
      if (assign_parm_is_stack_parm (&all, &data))
3013
        {
3014
          assign_parm_find_stack_rtl (parm, &data);
3015
          assign_parm_adjust_entry_rtl (&data);
3016
        }
3017
 
3018
      /* Record permanently how this parm was passed.  */
3019
      set_decl_incoming_rtl (parm, data.entry_parm);
3020
 
3021
      /* Update info on where next arg arrives in registers.  */
3022
      FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3023
                            data.passed_type, data.named_arg);
3024
 
3025
      assign_parm_adjust_stack_rtl (&data);
3026
 
3027
      if (assign_parm_setup_block_p (&data))
3028
        assign_parm_setup_block (&all, parm, &data);
3029
      else if (data.passed_pointer || use_register_for_decl (parm))
3030
        assign_parm_setup_reg (&all, parm, &data);
3031
      else
3032
        assign_parm_setup_stack (&all, parm, &data);
3033
    }
3034
 
3035
  if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3036
    assign_parms_unsplit_complex (&all, fnargs);
3037
 
3038
  /* Output all parameter conversion instructions (possibly including calls)
3039
     now that all parameters have been copied out of hard registers.  */
3040
  emit_insn (all.conversion_insns);
3041
 
3042
  /* If we are receiving a struct value address as the first argument, set up
3043
     the RTL for the function result. As this might require code to convert
3044
     the transmitted address to Pmode, we do this here to ensure that possible
3045
     preliminary conversions of the address have been emitted already.  */
3046
  if (all.function_result_decl)
3047
    {
3048
      tree result = DECL_RESULT (current_function_decl);
3049
      rtx addr = DECL_RTL (all.function_result_decl);
3050
      rtx x;
3051
 
3052
      if (DECL_BY_REFERENCE (result))
3053
        x = addr;
3054
      else
3055
        {
3056
          addr = convert_memory_address (Pmode, addr);
3057
          x = gen_rtx_MEM (DECL_MODE (result), addr);
3058
          set_mem_attributes (x, result, 1);
3059
        }
3060
      SET_DECL_RTL (result, x);
3061
    }
3062
 
3063
  /* We have aligned all the args, so add space for the pretend args.  */
3064
  current_function_pretend_args_size = all.pretend_args_size;
3065
  all.stack_args_size.constant += all.extra_pretend_bytes;
3066
  current_function_args_size = all.stack_args_size.constant;
3067
 
3068
  /* Adjust function incoming argument size for alignment and
3069
     minimum length.  */
3070
 
3071
#ifdef REG_PARM_STACK_SPACE
3072
  current_function_args_size = MAX (current_function_args_size,
3073
                                    REG_PARM_STACK_SPACE (fndecl));
3074
#endif
3075
 
3076
  current_function_args_size = CEIL_ROUND (current_function_args_size,
3077
                                           PARM_BOUNDARY / BITS_PER_UNIT);
3078
 
3079
#ifdef ARGS_GROW_DOWNWARD
3080
  current_function_arg_offset_rtx
3081
    = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3082
       : expand_expr (size_diffop (all.stack_args_size.var,
3083
                                   size_int (-all.stack_args_size.constant)),
3084
                      NULL_RTX, VOIDmode, 0));
3085
#else
3086
  current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3087
#endif
3088
 
3089
  /* See how many bytes, if any, of its args a function should try to pop
3090
     on return.  */
3091
 
3092
  current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3093
                                                 current_function_args_size);
3094
 
3095
  /* For stdarg.h function, save info about
3096
     regs and stack space used by the named args.  */
3097
 
3098
  current_function_args_info = all.args_so_far;
3099
 
3100
  /* Set the rtx used for the function return value.  Put this in its
3101
     own variable so any optimizers that need this information don't have
3102
     to include tree.h.  Do this here so it gets done when an inlined
3103
     function gets output.  */
3104
 
3105
  current_function_return_rtx
3106
    = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3107
       ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3108
 
3109
  /* If scalar return value was computed in a pseudo-reg, or was a named
3110
     return value that got dumped to the stack, copy that to the hard
3111
     return register.  */
3112
  if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3113
    {
3114
      tree decl_result = DECL_RESULT (fndecl);
3115
      rtx decl_rtl = DECL_RTL (decl_result);
3116
 
3117
      if (REG_P (decl_rtl)
3118
          ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3119
          : DECL_REGISTER (decl_result))
3120
        {
3121
          rtx real_decl_rtl;
3122
 
3123
          real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3124
                                                        fndecl, true);
3125
          REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3126
          /* The delay slot scheduler assumes that current_function_return_rtx
3127
             holds the hard register containing the return value, not a
3128
             temporary pseudo.  */
3129
          current_function_return_rtx = real_decl_rtl;
3130
        }
3131
    }
3132
}
3133
 
3134
/* A subroutine of gimplify_parameters, invoked via walk_tree.
3135
   For all seen types, gimplify their sizes.  */
3136
 
3137
static tree
3138
gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3139
{
3140
  tree t = *tp;
3141
 
3142
  *walk_subtrees = 0;
3143
  if (TYPE_P (t))
3144
    {
3145
      if (POINTER_TYPE_P (t))
3146
        *walk_subtrees = 1;
3147
      else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3148
               && !TYPE_SIZES_GIMPLIFIED (t))
3149
        {
3150
          gimplify_type_sizes (t, (tree *) data);
3151
          *walk_subtrees = 1;
3152
        }
3153
    }
3154
 
3155
  return NULL;
3156
}
3157
 
3158
/* Gimplify the parameter list for current_function_decl.  This involves
3159
   evaluating SAVE_EXPRs of variable sized parameters and generating code
3160
   to implement callee-copies reference parameters.  Returns a list of
3161
   statements to add to the beginning of the function, or NULL if nothing
3162
   to do.  */
3163
 
3164
tree
3165
gimplify_parameters (void)
3166
{
3167
  struct assign_parm_data_all all;
3168
  tree fnargs, parm, stmts = NULL;
3169
 
3170
  assign_parms_initialize_all (&all);
3171
  fnargs = assign_parms_augmented_arg_list (&all);
3172
 
3173
  for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3174
    {
3175
      struct assign_parm_data_one data;
3176
 
3177
      /* Extract the type of PARM; adjust it according to ABI.  */
3178
      assign_parm_find_data_types (&all, parm, &data);
3179
 
3180
      /* Early out for errors and void parameters.  */
3181
      if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3182
        continue;
3183
 
3184
      /* Update info on where next arg arrives in registers.  */
3185
      FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3186
                            data.passed_type, data.named_arg);
3187
 
3188
      /* ??? Once upon a time variable_size stuffed parameter list
3189
         SAVE_EXPRs (amongst others) onto a pending sizes list.  This
3190
         turned out to be less than manageable in the gimple world.
3191
         Now we have to hunt them down ourselves.  */
3192
      walk_tree_without_duplicates (&data.passed_type,
3193
                                    gimplify_parm_type, &stmts);
3194
 
3195
      if (!TREE_CONSTANT (DECL_SIZE (parm)))
3196
        {
3197
          gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3198
          gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3199
        }
3200
 
3201
      if (data.passed_pointer)
3202
        {
3203
          tree type = TREE_TYPE (data.passed_type);
3204
          if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3205
                                       type, data.named_arg))
3206
            {
3207
              tree local, t;
3208
 
3209
              /* For constant sized objects, this is trivial; for
3210
                 variable-sized objects, we have to play games.  */
3211
              if (TREE_CONSTANT (DECL_SIZE (parm)))
3212
                {
3213
                  local = create_tmp_var (type, get_name (parm));
3214
                  DECL_IGNORED_P (local) = 0;
3215
                }
3216
              else
3217
                {
3218
                  tree ptr_type, addr, args;
3219
 
3220
                  ptr_type = build_pointer_type (type);
3221
                  addr = create_tmp_var (ptr_type, get_name (parm));
3222
                  DECL_IGNORED_P (addr) = 0;
3223
                  local = build_fold_indirect_ref (addr);
3224
 
3225
                  args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL);
3226
                  t = built_in_decls[BUILT_IN_ALLOCA];
3227
                  t = build_function_call_expr (t, args);
3228
                  t = fold_convert (ptr_type, t);
3229
                  t = build2 (MODIFY_EXPR, void_type_node, addr, t);
3230
                  gimplify_and_add (t, &stmts);
3231
                }
3232
 
3233
              t = build2 (MODIFY_EXPR, void_type_node, local, parm);
3234
              gimplify_and_add (t, &stmts);
3235
 
3236
              SET_DECL_VALUE_EXPR (parm, local);
3237
              DECL_HAS_VALUE_EXPR_P (parm) = 1;
3238
            }
3239
        }
3240
    }
3241
 
3242
  return stmts;
3243
}
3244
 
3245
/* Indicate whether REGNO is an incoming argument to the current function
3246
   that was promoted to a wider mode.  If so, return the RTX for the
3247
   register (to get its mode).  PMODE and PUNSIGNEDP are set to the mode
3248
   that REGNO is promoted from and whether the promotion was signed or
3249
   unsigned.  */
3250
 
3251
rtx
3252
promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
3253
{
3254
  tree arg;
3255
 
3256
  for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3257
       arg = TREE_CHAIN (arg))
3258
    if (REG_P (DECL_INCOMING_RTL (arg))
3259
        && REGNO (DECL_INCOMING_RTL (arg)) == regno
3260
        && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3261
      {
3262
        enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3263
        int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
3264
 
3265
        mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3266
        if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3267
            && mode != DECL_MODE (arg))
3268
          {
3269
            *pmode = DECL_MODE (arg);
3270
            *punsignedp = unsignedp;
3271
            return DECL_INCOMING_RTL (arg);
3272
          }
3273
      }
3274
 
3275
  return 0;
3276
}
3277
 
3278
 
3279
/* Compute the size and offset from the start of the stacked arguments for a
3280
   parm passed in mode PASSED_MODE and with type TYPE.
3281
 
3282
   INITIAL_OFFSET_PTR points to the current offset into the stacked
3283
   arguments.
3284
 
3285
   The starting offset and size for this parm are returned in
3286
   LOCATE->OFFSET and LOCATE->SIZE, respectively.  When IN_REGS is
3287
   nonzero, the offset is that of stack slot, which is returned in
3288
   LOCATE->SLOT_OFFSET.  LOCATE->ALIGNMENT_PAD is the amount of
3289
   padding required from the initial offset ptr to the stack slot.
3290
 
3291
   IN_REGS is nonzero if the argument will be passed in registers.  It will
3292
   never be set if REG_PARM_STACK_SPACE is not defined.
3293
 
3294
   FNDECL is the function in which the argument was defined.
3295
 
3296
   There are two types of rounding that are done.  The first, controlled by
3297
   FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3298
   list to be aligned to the specific boundary (in bits).  This rounding
3299
   affects the initial and starting offsets, but not the argument size.
3300
 
3301
   The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3302
   optionally rounds the size of the parm to PARM_BOUNDARY.  The
3303
   initial offset is not affected by this rounding, while the size always
3304
   is and the starting offset may be.  */
3305
 
3306
/*  LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3307
    INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3308
    callers pass in the total size of args so far as
3309
    INITIAL_OFFSET_PTR.  LOCATE->SIZE is always positive.  */
3310
 
3311
void
3312
locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3313
                     int partial, tree fndecl ATTRIBUTE_UNUSED,
3314
                     struct args_size *initial_offset_ptr,
3315
                     struct locate_and_pad_arg_data *locate)
3316
{
3317
  tree sizetree;
3318
  enum direction where_pad;
3319
  unsigned int boundary;
3320
  int reg_parm_stack_space = 0;
3321
  int part_size_in_regs;
3322
 
3323
#ifdef REG_PARM_STACK_SPACE
3324
  reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3325
 
3326
  /* If we have found a stack parm before we reach the end of the
3327
     area reserved for registers, skip that area.  */
3328
  if (! in_regs)
3329
    {
3330
      if (reg_parm_stack_space > 0)
3331
        {
3332
          if (initial_offset_ptr->var)
3333
            {
3334
              initial_offset_ptr->var
3335
                = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3336
                              ssize_int (reg_parm_stack_space));
3337
              initial_offset_ptr->constant = 0;
3338
            }
3339
          else if (initial_offset_ptr->constant < reg_parm_stack_space)
3340
            initial_offset_ptr->constant = reg_parm_stack_space;
3341
        }
3342
    }
3343
#endif /* REG_PARM_STACK_SPACE */
3344
 
3345
  part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3346
 
3347
  sizetree
3348
    = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3349
  where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3350
  boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3351
  locate->where_pad = where_pad;
3352
  locate->boundary = boundary;
3353
 
3354
  /* Remember if the outgoing parameter requires extra alignment on the
3355
     calling function side.  */
3356
  if (boundary > PREFERRED_STACK_BOUNDARY)
3357
    boundary = PREFERRED_STACK_BOUNDARY;
3358
  if (cfun->stack_alignment_needed < boundary)
3359
    cfun->stack_alignment_needed = boundary;
3360
 
3361
#ifdef ARGS_GROW_DOWNWARD
3362
  locate->slot_offset.constant = -initial_offset_ptr->constant;
3363
  if (initial_offset_ptr->var)
3364
    locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3365
                                          initial_offset_ptr->var);
3366
 
3367
  {
3368
    tree s2 = sizetree;
3369
    if (where_pad != none
3370
        && (!host_integerp (sizetree, 1)
3371
            || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3372
      s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3373
    SUB_PARM_SIZE (locate->slot_offset, s2);
3374
  }
3375
 
3376
  locate->slot_offset.constant += part_size_in_regs;
3377
 
3378
  if (!in_regs
3379
#ifdef REG_PARM_STACK_SPACE
3380
      || REG_PARM_STACK_SPACE (fndecl) > 0
3381
#endif
3382
     )
3383
    pad_to_arg_alignment (&locate->slot_offset, boundary,
3384
                          &locate->alignment_pad);
3385
 
3386
  locate->size.constant = (-initial_offset_ptr->constant
3387
                           - locate->slot_offset.constant);
3388
  if (initial_offset_ptr->var)
3389
    locate->size.var = size_binop (MINUS_EXPR,
3390
                                   size_binop (MINUS_EXPR,
3391
                                               ssize_int (0),
3392
                                               initial_offset_ptr->var),
3393
                                   locate->slot_offset.var);
3394
 
3395
  /* Pad_below needs the pre-rounded size to know how much to pad
3396
     below.  */
3397
  locate->offset = locate->slot_offset;
3398
  if (where_pad == downward)
3399
    pad_below (&locate->offset, passed_mode, sizetree);
3400
 
3401
#else /* !ARGS_GROW_DOWNWARD */
3402
  if (!in_regs
3403
#ifdef REG_PARM_STACK_SPACE
3404
      || REG_PARM_STACK_SPACE (fndecl) > 0
3405
#endif
3406
      )
3407
    pad_to_arg_alignment (initial_offset_ptr, boundary,
3408
                          &locate->alignment_pad);
3409
  locate->slot_offset = *initial_offset_ptr;
3410
 
3411
#ifdef PUSH_ROUNDING
3412
  if (passed_mode != BLKmode)
3413
    sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3414
#endif
3415
 
3416
  /* Pad_below needs the pre-rounded size to know how much to pad below
3417
     so this must be done before rounding up.  */
3418
  locate->offset = locate->slot_offset;
3419
  if (where_pad == downward)
3420
    pad_below (&locate->offset, passed_mode, sizetree);
3421
 
3422
  if (where_pad != none
3423
      && (!host_integerp (sizetree, 1)
3424
          || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3425
    sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3426
 
3427
  ADD_PARM_SIZE (locate->size, sizetree);
3428
 
3429
  locate->size.constant -= part_size_in_regs;
3430
#endif /* ARGS_GROW_DOWNWARD */
3431
}
3432
 
3433
/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3434
   BOUNDARY is measured in bits, but must be a multiple of a storage unit.  */
3435
 
3436
static void
3437
pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3438
                      struct args_size *alignment_pad)
3439
{
3440
  tree save_var = NULL_TREE;
3441
  HOST_WIDE_INT save_constant = 0;
3442
  int boundary_in_bytes = boundary / BITS_PER_UNIT;
3443
  HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3444
 
3445
#ifdef SPARC_STACK_BOUNDARY_HACK
3446
  /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3447
     the real alignment of %sp.  However, when it does this, the
3448
     alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
3449
  if (SPARC_STACK_BOUNDARY_HACK)
3450
    sp_offset = 0;
3451
#endif
3452
 
3453
  if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3454
    {
3455
      save_var = offset_ptr->var;
3456
      save_constant = offset_ptr->constant;
3457
    }
3458
 
3459
  alignment_pad->var = NULL_TREE;
3460
  alignment_pad->constant = 0;
3461
 
3462
  if (boundary > BITS_PER_UNIT)
3463
    {
3464
      if (offset_ptr->var)
3465
        {
3466
          tree sp_offset_tree = ssize_int (sp_offset);
3467
          tree offset = size_binop (PLUS_EXPR,
3468
                                    ARGS_SIZE_TREE (*offset_ptr),
3469
                                    sp_offset_tree);
3470
#ifdef ARGS_GROW_DOWNWARD
3471
          tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3472
#else
3473
          tree rounded = round_up   (offset, boundary / BITS_PER_UNIT);
3474
#endif
3475
 
3476
          offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3477
          /* ARGS_SIZE_TREE includes constant term.  */
3478
          offset_ptr->constant = 0;
3479
          if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3480
            alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3481
                                             save_var);
3482
        }
3483
      else
3484
        {
3485
          offset_ptr->constant = -sp_offset +
3486
#ifdef ARGS_GROW_DOWNWARD
3487
            FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3488
#else
3489
            CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3490
#endif
3491
            if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3492
              alignment_pad->constant = offset_ptr->constant - save_constant;
3493
        }
3494
    }
3495
}
3496
 
3497
static void
3498
pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3499
{
3500
  if (passed_mode != BLKmode)
3501
    {
3502
      if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3503
        offset_ptr->constant
3504
          += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3505
               / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3506
              - GET_MODE_SIZE (passed_mode));
3507
    }
3508
  else
3509
    {
3510
      if (TREE_CODE (sizetree) != INTEGER_CST
3511
          || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3512
        {
3513
          /* Round the size up to multiple of PARM_BOUNDARY bits.  */
3514
          tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3515
          /* Add it in.  */
3516
          ADD_PARM_SIZE (*offset_ptr, s2);
3517
          SUB_PARM_SIZE (*offset_ptr, sizetree);
3518
        }
3519
    }
3520
}
3521
 
3522
/* Walk the tree of blocks describing the binding levels within a function
3523
   and warn about variables the might be killed by setjmp or vfork.
3524
   This is done after calling flow_analysis and before global_alloc
3525
   clobbers the pseudo-regs to hard regs.  */
3526
 
3527
void
3528
setjmp_vars_warning (tree block)
3529
{
3530
  tree decl, sub;
3531
 
3532
  for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3533
    {
3534
      if (TREE_CODE (decl) == VAR_DECL
3535
          && DECL_RTL_SET_P (decl)
3536
          && REG_P (DECL_RTL (decl))
3537
          && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3538
        warning (0, "variable %q+D might be clobbered by %<longjmp%>"
3539
                 " or %<vfork%>",
3540
                 decl);
3541
    }
3542
 
3543
  for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3544
    setjmp_vars_warning (sub);
3545
}
3546
 
3547
/* Do the appropriate part of setjmp_vars_warning
3548
   but for arguments instead of local variables.  */
3549
 
3550
void
3551
setjmp_args_warning (void)
3552
{
3553
  tree decl;
3554
  for (decl = DECL_ARGUMENTS (current_function_decl);
3555
       decl; decl = TREE_CHAIN (decl))
3556
    if (DECL_RTL (decl) != 0
3557
        && REG_P (DECL_RTL (decl))
3558
        && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3559
      warning (0, "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3560
               decl);
3561
}
3562
 
3563
 
3564
/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3565
   and create duplicate blocks.  */
3566
/* ??? Need an option to either create block fragments or to create
3567
   abstract origin duplicates of a source block.  It really depends
3568
   on what optimization has been performed.  */
3569
 
3570
void
3571
reorder_blocks (void)
3572
{
3573
  tree block = DECL_INITIAL (current_function_decl);
3574
  VEC(tree,heap) *block_stack;
3575
 
3576
  if (block == NULL_TREE)
3577
    return;
3578
 
3579
  block_stack = VEC_alloc (tree, heap, 10);
3580
 
3581
  /* Reset the TREE_ASM_WRITTEN bit for all blocks.  */
3582
  clear_block_marks (block);
3583
 
3584
  /* Prune the old trees away, so that they don't get in the way.  */
3585
  BLOCK_SUBBLOCKS (block) = NULL_TREE;
3586
  BLOCK_CHAIN (block) = NULL_TREE;
3587
 
3588
  /* Recreate the block tree from the note nesting.  */
3589
  reorder_blocks_1 (get_insns (), block, &block_stack);
3590
  BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3591
 
3592
  VEC_free (tree, heap, block_stack);
3593
}
3594
 
3595
/* Helper function for reorder_blocks.  Reset TREE_ASM_WRITTEN.  */
3596
 
3597
void
3598
clear_block_marks (tree block)
3599
{
3600
  while (block)
3601
    {
3602
      TREE_ASM_WRITTEN (block) = 0;
3603
      clear_block_marks (BLOCK_SUBBLOCKS (block));
3604
      block = BLOCK_CHAIN (block);
3605
    }
3606
}
3607
 
3608
static void
3609
reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3610
{
3611
  rtx insn;
3612
 
3613
  for (insn = insns; insn; insn = NEXT_INSN (insn))
3614
    {
3615
      if (NOTE_P (insn))
3616
        {
3617
          if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3618
            {
3619
              tree block = NOTE_BLOCK (insn);
3620
              tree origin;
3621
 
3622
              origin = (BLOCK_FRAGMENT_ORIGIN (block)
3623
                        ? BLOCK_FRAGMENT_ORIGIN (block)
3624
                        : block);
3625
 
3626
              /* If we have seen this block before, that means it now
3627
                 spans multiple address regions.  Create a new fragment.  */
3628
              if (TREE_ASM_WRITTEN (block))
3629
                {
3630
                  tree new_block = copy_node (block);
3631
 
3632
                  BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3633
                  BLOCK_FRAGMENT_CHAIN (new_block)
3634
                    = BLOCK_FRAGMENT_CHAIN (origin);
3635
                  BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3636
 
3637
                  NOTE_BLOCK (insn) = new_block;
3638
                  block = new_block;
3639
                }
3640
 
3641
              BLOCK_SUBBLOCKS (block) = 0;
3642
              TREE_ASM_WRITTEN (block) = 1;
3643
              /* When there's only one block for the entire function,
3644
                 current_block == block and we mustn't do this, it
3645
                 will cause infinite recursion.  */
3646
              if (block != current_block)
3647
                {
3648
                  if (block != origin)
3649
                    gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3650
 
3651
                  BLOCK_SUPERCONTEXT (block) = current_block;
3652
                  BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3653
                  BLOCK_SUBBLOCKS (current_block) = block;
3654
                  current_block = origin;
3655
                }
3656
              VEC_safe_push (tree, heap, *p_block_stack, block);
3657
            }
3658
          else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3659
            {
3660
              NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3661
              BLOCK_SUBBLOCKS (current_block)
3662
                = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3663
              current_block = BLOCK_SUPERCONTEXT (current_block);
3664
            }
3665
        }
3666
    }
3667
}
3668
 
3669
/* Reverse the order of elements in the chain T of blocks,
3670
   and return the new head of the chain (old last element).  */
3671
 
3672
tree
3673
blocks_nreverse (tree t)
3674
{
3675
  tree prev = 0, decl, next;
3676
  for (decl = t; decl; decl = next)
3677
    {
3678
      next = BLOCK_CHAIN (decl);
3679
      BLOCK_CHAIN (decl) = prev;
3680
      prev = decl;
3681
    }
3682
  return prev;
3683
}
3684
 
3685
/* Count the subblocks of the list starting with BLOCK.  If VECTOR is
3686
   non-NULL, list them all into VECTOR, in a depth-first preorder
3687
   traversal of the block tree.  Also clear TREE_ASM_WRITTEN in all
3688
   blocks.  */
3689
 
3690
static int
3691
all_blocks (tree block, tree *vector)
3692
{
3693
  int n_blocks = 0;
3694
 
3695
  while (block)
3696
    {
3697
      TREE_ASM_WRITTEN (block) = 0;
3698
 
3699
      /* Record this block.  */
3700
      if (vector)
3701
        vector[n_blocks] = block;
3702
 
3703
      ++n_blocks;
3704
 
3705
      /* Record the subblocks, and their subblocks...  */
3706
      n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3707
                              vector ? vector + n_blocks : 0);
3708
      block = BLOCK_CHAIN (block);
3709
    }
3710
 
3711
  return n_blocks;
3712
}
3713
 
3714
/* Return a vector containing all the blocks rooted at BLOCK.  The
3715
   number of elements in the vector is stored in N_BLOCKS_P.  The
3716
   vector is dynamically allocated; it is the caller's responsibility
3717
   to call `free' on the pointer returned.  */
3718
 
3719
static tree *
3720
get_block_vector (tree block, int *n_blocks_p)
3721
{
3722
  tree *block_vector;
3723
 
3724
  *n_blocks_p = all_blocks (block, NULL);
3725
  block_vector = XNEWVEC (tree, *n_blocks_p);
3726
  all_blocks (block, block_vector);
3727
 
3728
  return block_vector;
3729
}
3730
 
3731
static GTY(()) int next_block_index = 2;
3732
 
3733
/* Set BLOCK_NUMBER for all the blocks in FN.  */
3734
 
3735
void
3736
number_blocks (tree fn)
3737
{
3738
  int i;
3739
  int n_blocks;
3740
  tree *block_vector;
3741
 
3742
  /* For SDB and XCOFF debugging output, we start numbering the blocks
3743
     from 1 within each function, rather than keeping a running
3744
     count.  */
3745
#if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3746
  if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3747
    next_block_index = 1;
3748
#endif
3749
 
3750
  block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3751
 
3752
  /* The top-level BLOCK isn't numbered at all.  */
3753
  for (i = 1; i < n_blocks; ++i)
3754
    /* We number the blocks from two.  */
3755
    BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3756
 
3757
  free (block_vector);
3758
 
3759
  return;
3760
}
3761
 
3762
/* If VAR is present in a subblock of BLOCK, return the subblock.  */
3763
 
3764
tree
3765
debug_find_var_in_block_tree (tree var, tree block)
3766
{
3767
  tree t;
3768
 
3769
  for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3770
    if (t == var)
3771
      return block;
3772
 
3773
  for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3774
    {
3775
      tree ret = debug_find_var_in_block_tree (var, t);
3776
      if (ret)
3777
        return ret;
3778
    }
3779
 
3780
  return NULL_TREE;
3781
}
3782
 
3783
/* Allocate a function structure for FNDECL and set its contents
3784
   to the defaults.  */
3785
 
3786
void
3787
allocate_struct_function (tree fndecl)
3788
{
3789
  tree result;
3790
  tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3791
 
3792
  cfun = ggc_alloc_cleared (sizeof (struct function));
3793
 
3794
  cfun->stack_alignment_needed = STACK_BOUNDARY;
3795
  cfun->preferred_stack_boundary = STACK_BOUNDARY;
3796
 
3797
  current_function_funcdef_no = funcdef_no++;
3798
 
3799
  cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3800
 
3801
  init_eh_for_function ();
3802
 
3803
  lang_hooks.function.init (cfun);
3804
  if (init_machine_status)
3805
    cfun->machine = (*init_machine_status) ();
3806
 
3807
  if (fndecl == NULL)
3808
    return;
3809
 
3810
  DECL_STRUCT_FUNCTION (fndecl) = cfun;
3811
  cfun->decl = fndecl;
3812
 
3813
  result = DECL_RESULT (fndecl);
3814
  if (aggregate_value_p (result, fndecl))
3815
    {
3816
#ifdef PCC_STATIC_STRUCT_RETURN
3817
      current_function_returns_pcc_struct = 1;
3818
#endif
3819
      current_function_returns_struct = 1;
3820
    }
3821
 
3822
  current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
3823
 
3824
  current_function_stdarg
3825
    = (fntype
3826
       && TYPE_ARG_TYPES (fntype) != 0
3827
       && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3828
           != void_type_node));
3829
 
3830
  /* Assume all registers in stdarg functions need to be saved.  */
3831
  cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3832
  cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3833
}
3834
 
3835
/* Reset cfun, and other non-struct-function variables to defaults as
3836
   appropriate for emitting rtl at the start of a function.  */
3837
 
3838
static void
3839
prepare_function_start (tree fndecl)
3840
{
3841
  if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3842
    cfun = DECL_STRUCT_FUNCTION (fndecl);
3843
  else
3844
    allocate_struct_function (fndecl);
3845
  init_emit ();
3846
  init_varasm_status (cfun);
3847
  init_expr ();
3848
 
3849
  cse_not_expected = ! optimize;
3850
 
3851
  /* Caller save not needed yet.  */
3852
  caller_save_needed = 0;
3853
 
3854
  /* We haven't done register allocation yet.  */
3855
  reg_renumber = 0;
3856
 
3857
  /* Indicate that we have not instantiated virtual registers yet.  */
3858
  virtuals_instantiated = 0;
3859
 
3860
  /* Indicate that we want CONCATs now.  */
3861
  generating_concat_p = 1;
3862
 
3863
  /* Indicate we have no need of a frame pointer yet.  */
3864
  frame_pointer_needed = 0;
3865
}
3866
 
3867
/* Initialize the rtl expansion mechanism so that we can do simple things
3868
   like generate sequences.  This is used to provide a context during global
3869
   initialization of some passes.  */
3870
void
3871
init_dummy_function_start (void)
3872
{
3873
  prepare_function_start (NULL);
3874
}
3875
 
3876
/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3877
   and initialize static variables for generating RTL for the statements
3878
   of the function.  */
3879
 
3880
void
3881
init_function_start (tree subr)
3882
{
3883
  prepare_function_start (subr);
3884
 
3885
  /* Prevent ever trying to delete the first instruction of a
3886
     function.  Also tell final how to output a linenum before the
3887
     function prologue.  Note linenums could be missing, e.g. when
3888
     compiling a Java .class file.  */
3889
  if (! DECL_IS_BUILTIN (subr))
3890
    emit_line_note (DECL_SOURCE_LOCATION (subr));
3891
 
3892
  /* Make sure first insn is a note even if we don't want linenums.
3893
     This makes sure the first insn will never be deleted.
3894
     Also, final expects a note to appear there.  */
3895
  emit_note (NOTE_INSN_DELETED);
3896
 
3897
  /* Warn if this value is an aggregate type,
3898
     regardless of which calling convention we are using for it.  */
3899
  if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3900
    warning (OPT_Waggregate_return, "function returns an aggregate");
3901
}
3902
 
3903
/* Make sure all values used by the optimization passes have sane
3904
   defaults.  */
3905
unsigned int
3906
init_function_for_compilation (void)
3907
{
3908
  reg_renumber = 0;
3909
 
3910
  /* No prologue/epilogue insns yet.  Make sure that these vectors are
3911
     empty.  */
3912
  gcc_assert (VEC_length (int, prologue) == 0);
3913
  gcc_assert (VEC_length (int, epilogue) == 0);
3914
  gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
3915
  return 0;
3916
}
3917
 
3918
struct tree_opt_pass pass_init_function =
3919
{
3920
  NULL,                                 /* name */
3921
  NULL,                                 /* gate */
3922
  init_function_for_compilation,        /* execute */
3923
  NULL,                                 /* sub */
3924
  NULL,                                 /* next */
3925
  0,                                    /* static_pass_number */
3926
  0,                                    /* tv_id */
3927
  0,                                    /* properties_required */
3928
  0,                                    /* properties_provided */
3929
  0,                                    /* properties_destroyed */
3930
  0,                                    /* todo_flags_start */
3931
  0,                                    /* todo_flags_finish */
3932
 
3933
};
3934
 
3935
 
3936
void
3937
expand_main_function (void)
3938
{
3939
#if (defined(INVOKE__main)                              \
3940
     || (!defined(HAS_INIT_SECTION)                     \
3941
         && !defined(INIT_SECTION_ASM_OP)               \
3942
         && !defined(INIT_ARRAY_SECTION_ASM_OP)))
3943
  emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
3944
#endif
3945
}
3946
 
3947
/* Expand code to initialize the stack_protect_guard.  This is invoked at
3948
   the beginning of a function to be protected.  */
3949
 
3950
#ifndef HAVE_stack_protect_set
3951
# define HAVE_stack_protect_set         0
3952
# define gen_stack_protect_set(x,y)     (gcc_unreachable (), NULL_RTX)
3953
#endif
3954
 
3955
void
3956
stack_protect_prologue (void)
3957
{
3958
  tree guard_decl = targetm.stack_protect_guard ();
3959
  rtx x, y;
3960
 
3961
  /* Avoid expand_expr here, because we don't want guard_decl pulled
3962
     into registers unless absolutely necessary.  And we know that
3963
     cfun->stack_protect_guard is a local stack slot, so this skips
3964
     all the fluff.  */
3965
  x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
3966
  y = validize_mem (DECL_RTL (guard_decl));
3967
 
3968
  /* Allow the target to copy from Y to X without leaking Y into a
3969
     register.  */
3970
  if (HAVE_stack_protect_set)
3971
    {
3972
      rtx insn = gen_stack_protect_set (x, y);
3973
      if (insn)
3974
        {
3975
          emit_insn (insn);
3976
          return;
3977
        }
3978
    }
3979
 
3980
  /* Otherwise do a straight move.  */
3981
  emit_move_insn (x, y);
3982
}
3983
 
3984
/* Expand code to verify the stack_protect_guard.  This is invoked at
3985
   the end of a function to be protected.  */
3986
 
3987
#ifndef HAVE_stack_protect_test
3988
# define HAVE_stack_protect_test                0
3989
# define gen_stack_protect_test(x, y, z)        (gcc_unreachable (), NULL_RTX)
3990
#endif
3991
 
3992
void
3993
stack_protect_epilogue (void)
3994
{
3995
  tree guard_decl = targetm.stack_protect_guard ();
3996
  rtx label = gen_label_rtx ();
3997
  rtx x, y, tmp;
3998
 
3999
  /* Avoid expand_expr here, because we don't want guard_decl pulled
4000
     into registers unless absolutely necessary.  And we know that
4001
     cfun->stack_protect_guard is a local stack slot, so this skips
4002
     all the fluff.  */
4003
  x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4004
  y = validize_mem (DECL_RTL (guard_decl));
4005
 
4006
  /* Allow the target to compare Y with X without leaking either into
4007
     a register.  */
4008
  switch (HAVE_stack_protect_test != 0)
4009
    {
4010
    case 1:
4011
      tmp = gen_stack_protect_test (x, y, label);
4012
      if (tmp)
4013
        {
4014
          emit_insn (tmp);
4015
          break;
4016
        }
4017
      /* FALLTHRU */
4018
 
4019
    default:
4020
      emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4021
      break;
4022
    }
4023
 
4024
  /* The noreturn predictor has been moved to the tree level.  The rtl-level
4025
     predictors estimate this branch about 20%, which isn't enough to get
4026
     things moved out of line.  Since this is the only extant case of adding
4027
     a noreturn function at the rtl level, it doesn't seem worth doing ought
4028
     except adding the prediction by hand.  */
4029
  tmp = get_last_insn ();
4030
  if (JUMP_P (tmp))
4031
    predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4032
 
4033
  expand_expr_stmt (targetm.stack_protect_fail ());
4034
  emit_label (label);
4035
}
4036
 
4037
/* Start the RTL for a new function, and set variables used for
4038
   emitting RTL.
4039
   SUBR is the FUNCTION_DECL node.
4040
   PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4041
   the function's parameters, which must be run at any return statement.  */
4042
 
4043
void
4044
expand_function_start (tree subr)
4045
{
4046
  /* Make sure volatile mem refs aren't considered
4047
     valid operands of arithmetic insns.  */
4048
  init_recog_no_volatile ();
4049
 
4050
  current_function_profile
4051
    = (profile_flag
4052
       && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4053
 
4054
  current_function_limit_stack
4055
    = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4056
 
4057
  /* Make the label for return statements to jump to.  Do not special
4058
     case machines with special return instructions -- they will be
4059
     handled later during jump, ifcvt, or epilogue creation.  */
4060
  return_label = gen_label_rtx ();
4061
 
4062
  /* Initialize rtx used to return the value.  */
4063
  /* Do this before assign_parms so that we copy the struct value address
4064
     before any library calls that assign parms might generate.  */
4065
 
4066
  /* Decide whether to return the value in memory or in a register.  */
4067
  if (aggregate_value_p (DECL_RESULT (subr), subr))
4068
    {
4069
      /* Returning something that won't go in a register.  */
4070
      rtx value_address = 0;
4071
 
4072
#ifdef PCC_STATIC_STRUCT_RETURN
4073
      if (current_function_returns_pcc_struct)
4074
        {
4075
          int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4076
          value_address = assemble_static_space (size);
4077
        }
4078
      else
4079
#endif
4080
        {
4081
          rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4082
          /* Expect to be passed the address of a place to store the value.
4083
             If it is passed as an argument, assign_parms will take care of
4084
             it.  */
4085
          if (sv)
4086
            {
4087
              value_address = gen_reg_rtx (Pmode);
4088
              emit_move_insn (value_address, sv);
4089
            }
4090
        }
4091
      if (value_address)
4092
        {
4093
          rtx x = value_address;
4094
          if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4095
            {
4096
              x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4097
              set_mem_attributes (x, DECL_RESULT (subr), 1);
4098
            }
4099
          SET_DECL_RTL (DECL_RESULT (subr), x);
4100
        }
4101
    }
4102
  else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4103
    /* If return mode is void, this decl rtl should not be used.  */
4104
    SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4105
  else
4106
    {
4107
      /* Compute the return values into a pseudo reg, which we will copy
4108
         into the true return register after the cleanups are done.  */
4109
      tree return_type = TREE_TYPE (DECL_RESULT (subr));
4110
      if (TYPE_MODE (return_type) != BLKmode
4111
          && targetm.calls.return_in_msb (return_type))
4112
        /* expand_function_end will insert the appropriate padding in
4113
           this case.  Use the return value's natural (unpadded) mode
4114
           within the function proper.  */
4115
        SET_DECL_RTL (DECL_RESULT (subr),
4116
                      gen_reg_rtx (TYPE_MODE (return_type)));
4117
      else
4118
        {
4119
          /* In order to figure out what mode to use for the pseudo, we
4120
             figure out what the mode of the eventual return register will
4121
             actually be, and use that.  */
4122
          rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4123
 
4124
          /* Structures that are returned in registers are not
4125
             aggregate_value_p, so we may see a PARALLEL or a REG.  */
4126
          if (REG_P (hard_reg))
4127
            SET_DECL_RTL (DECL_RESULT (subr),
4128
                          gen_reg_rtx (GET_MODE (hard_reg)));
4129
          else
4130
            {
4131
              gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4132
              SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4133
            }
4134
        }
4135
 
4136
      /* Set DECL_REGISTER flag so that expand_function_end will copy the
4137
         result to the real return register(s).  */
4138
      DECL_REGISTER (DECL_RESULT (subr)) = 1;
4139
    }
4140
 
4141
  /* Initialize rtx for parameters and local variables.
4142
     In some cases this requires emitting insns.  */
4143
  assign_parms (subr);
4144
 
4145
  /* If function gets a static chain arg, store it.  */
4146
  if (cfun->static_chain_decl)
4147
    {
4148
      tree parm = cfun->static_chain_decl;
4149
      rtx local = gen_reg_rtx (Pmode);
4150
 
4151
      set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4152
      SET_DECL_RTL (parm, local);
4153
      mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4154
 
4155
      emit_move_insn (local, static_chain_incoming_rtx);
4156
    }
4157
 
4158
  /* If the function receives a non-local goto, then store the
4159
     bits we need to restore the frame pointer.  */
4160
  if (cfun->nonlocal_goto_save_area)
4161
    {
4162
      tree t_save;
4163
      rtx r_save;
4164
 
4165
      /* ??? We need to do this save early.  Unfortunately here is
4166
         before the frame variable gets declared.  Help out...  */
4167
      expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4168
 
4169
      t_save = build4 (ARRAY_REF, ptr_type_node,
4170
                       cfun->nonlocal_goto_save_area,
4171
                       integer_zero_node, NULL_TREE, NULL_TREE);
4172
      r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4173
      r_save = convert_memory_address (Pmode, r_save);
4174
 
4175
      emit_move_insn (r_save, virtual_stack_vars_rtx);
4176
      update_nonlocal_goto_save_area ();
4177
    }
4178
 
4179
  /* The following was moved from init_function_start.
4180
     The move is supposed to make sdb output more accurate.  */
4181
  /* Indicate the beginning of the function body,
4182
     as opposed to parm setup.  */
4183
  emit_note (NOTE_INSN_FUNCTION_BEG);
4184
 
4185
  gcc_assert (NOTE_P (get_last_insn ()));
4186
 
4187
  parm_birth_insn = get_last_insn ();
4188
 
4189
  if (current_function_profile)
4190
    {
4191
#ifdef PROFILE_HOOK
4192
      PROFILE_HOOK (current_function_funcdef_no);
4193
#endif
4194
    }
4195
 
4196
  /* After the display initializations is where the stack checking
4197
     probe should go.  */
4198
  if(flag_stack_check)
4199
    stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4200
 
4201
  /* Make sure there is a line number after the function entry setup code.  */
4202
  force_next_line_note ();
4203
}
4204
 
4205
/* Undo the effects of init_dummy_function_start.  */
4206
void
4207
expand_dummy_function_end (void)
4208
{
4209
  /* End any sequences that failed to be closed due to syntax errors.  */
4210
  while (in_sequence_p ())
4211
    end_sequence ();
4212
 
4213
  /* Outside function body, can't compute type's actual size
4214
     until next function's body starts.  */
4215
 
4216
  free_after_parsing (cfun);
4217
  free_after_compilation (cfun);
4218
  cfun = 0;
4219
}
4220
 
4221
/* Call DOIT for each hard register used as a return value from
4222
   the current function.  */
4223
 
4224
void
4225
diddle_return_value (void (*doit) (rtx, void *), void *arg)
4226
{
4227
  rtx outgoing = current_function_return_rtx;
4228
 
4229
  if (! outgoing)
4230
    return;
4231
 
4232
  if (REG_P (outgoing))
4233
    (*doit) (outgoing, arg);
4234
  else if (GET_CODE (outgoing) == PARALLEL)
4235
    {
4236
      int i;
4237
 
4238
      for (i = 0; i < XVECLEN (outgoing, 0); i++)
4239
        {
4240
          rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4241
 
4242
          if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4243
            (*doit) (x, arg);
4244
        }
4245
    }
4246
}
4247
 
4248
static void
4249
do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4250
{
4251
  emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4252
}
4253
 
4254
void
4255
clobber_return_register (void)
4256
{
4257
  diddle_return_value (do_clobber_return_reg, NULL);
4258
 
4259
  /* In case we do use pseudo to return value, clobber it too.  */
4260
  if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4261
    {
4262
      tree decl_result = DECL_RESULT (current_function_decl);
4263
      rtx decl_rtl = DECL_RTL (decl_result);
4264
      if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4265
        {
4266
          do_clobber_return_reg (decl_rtl, NULL);
4267
        }
4268
    }
4269
}
4270
 
4271
static void
4272
do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4273
{
4274
  emit_insn (gen_rtx_USE (VOIDmode, reg));
4275
}
4276
 
4277
static void
4278
use_return_register (void)
4279
{
4280
  diddle_return_value (do_use_return_reg, NULL);
4281
}
4282
 
4283
/* Possibly warn about unused parameters.  */
4284
void
4285
do_warn_unused_parameter (tree fn)
4286
{
4287
  tree decl;
4288
 
4289
  for (decl = DECL_ARGUMENTS (fn);
4290
       decl; decl = TREE_CHAIN (decl))
4291
    if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4292
        && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
4293
      warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4294
}
4295
 
4296
static GTY(()) rtx initial_trampoline;
4297
 
4298
/* Generate RTL for the end of the current function.  */
4299
 
4300
void
4301
expand_function_end (void)
4302
{
4303
  rtx clobber_after;
4304
 
4305
  /* If arg_pointer_save_area was referenced only from a nested
4306
     function, we will not have initialized it yet.  Do that now.  */
4307
  if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4308
    get_arg_pointer_save_area (cfun);
4309
 
4310
  /* If we are doing stack checking and this function makes calls,
4311
     do a stack probe at the start of the function to ensure we have enough
4312
     space for another stack frame.  */
4313
  if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4314
    {
4315
      rtx insn, seq;
4316
 
4317
      for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4318
        if (CALL_P (insn))
4319
          {
4320
            start_sequence ();
4321
            probe_stack_range (STACK_CHECK_PROTECT,
4322
                               GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4323
            seq = get_insns ();
4324
            end_sequence ();
4325
            emit_insn_before (seq, stack_check_probe_note);
4326
            break;
4327
          }
4328
    }
4329
 
4330
  /* Possibly warn about unused parameters.
4331
     When frontend does unit-at-a-time, the warning is already
4332
     issued at finalization time.  */
4333
  if (warn_unused_parameter
4334
      && !lang_hooks.callgraph.expand_function)
4335
    do_warn_unused_parameter (current_function_decl);
4336
 
4337
  /* End any sequences that failed to be closed due to syntax errors.  */
4338
  while (in_sequence_p ())
4339
    end_sequence ();
4340
 
4341
  clear_pending_stack_adjust ();
4342
  do_pending_stack_adjust ();
4343
 
4344
  /* Mark the end of the function body.
4345
     If control reaches this insn, the function can drop through
4346
     without returning a value.  */
4347
  emit_note (NOTE_INSN_FUNCTION_END);
4348
 
4349
  /* Must mark the last line number note in the function, so that the test
4350
     coverage code can avoid counting the last line twice.  This just tells
4351
     the code to ignore the immediately following line note, since there
4352
     already exists a copy of this note somewhere above.  This line number
4353
     note is still needed for debugging though, so we can't delete it.  */
4354
  if (flag_test_coverage)
4355
    emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
4356
 
4357
  /* Output a linenumber for the end of the function.
4358
     SDB depends on this.  */
4359
  force_next_line_note ();
4360
  emit_line_note (input_location);
4361
 
4362
  /* Before the return label (if any), clobber the return
4363
     registers so that they are not propagated live to the rest of
4364
     the function.  This can only happen with functions that drop
4365
     through; if there had been a return statement, there would
4366
     have either been a return rtx, or a jump to the return label.
4367
 
4368
     We delay actual code generation after the current_function_value_rtx
4369
     is computed.  */
4370
  clobber_after = get_last_insn ();
4371
 
4372
  /* Output the label for the actual return from the function.  */
4373
  emit_label (return_label);
4374
 
4375
  if (USING_SJLJ_EXCEPTIONS)
4376
    {
4377
      /* Let except.c know where it should emit the call to unregister
4378
         the function context for sjlj exceptions.  */
4379
      if (flag_exceptions)
4380
        sjlj_emit_function_exit_after (get_last_insn ());
4381
    }
4382
  else
4383
    {
4384
      /* @@@ This is a kludge.  We want to ensure that instructions that
4385
         may trap are not moved into the epilogue by scheduling, because
4386
         we don't always emit unwind information for the epilogue.
4387
         However, not all machine descriptions define a blockage insn, so
4388
         emit an ASM_INPUT to act as one.  */
4389
      if (flag_non_call_exceptions)
4390
        emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4391
    }
4392
 
4393
  /* If this is an implementation of throw, do what's necessary to
4394
     communicate between __builtin_eh_return and the epilogue.  */
4395
  expand_eh_return ();
4396
 
4397
  /* If scalar return value was computed in a pseudo-reg, or was a named
4398
     return value that got dumped to the stack, copy that to the hard
4399
     return register.  */
4400
  if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4401
    {
4402
      tree decl_result = DECL_RESULT (current_function_decl);
4403
      rtx decl_rtl = DECL_RTL (decl_result);
4404
 
4405
      if (REG_P (decl_rtl)
4406
          ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4407
          : DECL_REGISTER (decl_result))
4408
        {
4409
          rtx real_decl_rtl = current_function_return_rtx;
4410
 
4411
          /* This should be set in assign_parms.  */
4412
          gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4413
 
4414
          /* If this is a BLKmode structure being returned in registers,
4415
             then use the mode computed in expand_return.  Note that if
4416
             decl_rtl is memory, then its mode may have been changed,
4417
             but that current_function_return_rtx has not.  */
4418
          if (GET_MODE (real_decl_rtl) == BLKmode)
4419
            PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4420
 
4421
          /* If a non-BLKmode return value should be padded at the least
4422
             significant end of the register, shift it left by the appropriate
4423
             amount.  BLKmode results are handled using the group load/store
4424
             machinery.  */
4425
          if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4426
              && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4427
            {
4428
              emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4429
                                           REGNO (real_decl_rtl)),
4430
                              decl_rtl);
4431
              shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4432
            }
4433
          /* If a named return value dumped decl_return to memory, then
4434
             we may need to re-do the PROMOTE_MODE signed/unsigned
4435
             extension.  */
4436
          else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4437
            {
4438
              int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4439
 
4440
              if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4441
                promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4442
                              &unsignedp, 1);
4443
 
4444
              convert_move (real_decl_rtl, decl_rtl, unsignedp);
4445
            }
4446
          else if (GET_CODE (real_decl_rtl) == PARALLEL)
4447
            {
4448
              /* If expand_function_start has created a PARALLEL for decl_rtl,
4449
                 move the result to the real return registers.  Otherwise, do
4450
                 a group load from decl_rtl for a named return.  */
4451
              if (GET_CODE (decl_rtl) == PARALLEL)
4452
                emit_group_move (real_decl_rtl, decl_rtl);
4453
              else
4454
                emit_group_load (real_decl_rtl, decl_rtl,
4455
                                 TREE_TYPE (decl_result),
4456
                                 int_size_in_bytes (TREE_TYPE (decl_result)));
4457
            }
4458
          /* In the case of complex integer modes smaller than a word, we'll
4459
             need to generate some non-trivial bitfield insertions.  Do that
4460
             on a pseudo and not the hard register.  */
4461
          else if (GET_CODE (decl_rtl) == CONCAT
4462
                   && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4463
                   && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4464
            {
4465
              int old_generating_concat_p;
4466
              rtx tmp;
4467
 
4468
              old_generating_concat_p = generating_concat_p;
4469
              generating_concat_p = 0;
4470
              tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4471
              generating_concat_p = old_generating_concat_p;
4472
 
4473
              emit_move_insn (tmp, decl_rtl);
4474
              emit_move_insn (real_decl_rtl, tmp);
4475
            }
4476
          else
4477
            emit_move_insn (real_decl_rtl, decl_rtl);
4478
        }
4479
    }
4480
 
4481
  /* If returning a structure, arrange to return the address of the value
4482
     in a place where debuggers expect to find it.
4483
 
4484
     If returning a structure PCC style,
4485
     the caller also depends on this value.
4486
     And current_function_returns_pcc_struct is not necessarily set.  */
4487
  if (current_function_returns_struct
4488
      || current_function_returns_pcc_struct)
4489
    {
4490
      rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4491
      tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4492
      rtx outgoing;
4493
 
4494
      if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4495
        type = TREE_TYPE (type);
4496
      else
4497
        value_address = XEXP (value_address, 0);
4498
 
4499
      outgoing = targetm.calls.function_value (build_pointer_type (type),
4500
                                               current_function_decl, true);
4501
 
4502
      /* Mark this as a function return value so integrate will delete the
4503
         assignment and USE below when inlining this function.  */
4504
      REG_FUNCTION_VALUE_P (outgoing) = 1;
4505
 
4506
      /* The address may be ptr_mode and OUTGOING may be Pmode.  */
4507
      value_address = convert_memory_address (GET_MODE (outgoing),
4508
                                              value_address);
4509
 
4510
      emit_move_insn (outgoing, value_address);
4511
 
4512
      /* Show return register used to hold result (in this case the address
4513
         of the result.  */
4514
      current_function_return_rtx = outgoing;
4515
    }
4516
 
4517
  /* Emit the actual code to clobber return register.  */
4518
  {
4519
    rtx seq;
4520
 
4521
    start_sequence ();
4522
    clobber_return_register ();
4523
    expand_naked_return ();
4524
    seq = get_insns ();
4525
    end_sequence ();
4526
 
4527
    emit_insn_after (seq, clobber_after);
4528
  }
4529
 
4530
  /* Output the label for the naked return from the function.  */
4531
  emit_label (naked_return_label);
4532
 
4533
  /* If stack protection is enabled for this function, check the guard.  */
4534
  if (cfun->stack_protect_guard)
4535
    stack_protect_epilogue ();
4536
 
4537
  /* If we had calls to alloca, and this machine needs
4538
     an accurate stack pointer to exit the function,
4539
     insert some code to save and restore the stack pointer.  */
4540
  if (! EXIT_IGNORE_STACK
4541
      && current_function_calls_alloca)
4542
    {
4543
      rtx tem = 0;
4544
 
4545
      emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4546
      emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4547
    }
4548
 
4549
  /* ??? This should no longer be necessary since stupid is no longer with
4550
     us, but there are some parts of the compiler (eg reload_combine, and
4551
     sh mach_dep_reorg) that still try and compute their own lifetime info
4552
     instead of using the general framework.  */
4553
  use_return_register ();
4554
}
4555
 
4556
rtx
4557
get_arg_pointer_save_area (struct function *f)
4558
{
4559
  rtx ret = f->x_arg_pointer_save_area;
4560
 
4561
  if (! ret)
4562
    {
4563
      ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4564
      f->x_arg_pointer_save_area = ret;
4565
    }
4566
 
4567
  if (f == cfun && ! f->arg_pointer_save_area_init)
4568
    {
4569
      rtx seq;
4570
 
4571
      /* Save the arg pointer at the beginning of the function.  The
4572
         generated stack slot may not be a valid memory address, so we
4573
         have to check it and fix it if necessary.  */
4574
      start_sequence ();
4575
      emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4576
      seq = get_insns ();
4577
      end_sequence ();
4578
 
4579
      push_topmost_sequence ();
4580
      emit_insn_after (seq, entry_of_function ());
4581
      pop_topmost_sequence ();
4582
    }
4583
 
4584
  return ret;
4585
}
4586
 
4587
/* Extend a vector that records the INSN_UIDs of INSNS
4588
   (a list of one or more insns).  */
4589
 
4590
static void
4591
record_insns (rtx insns, VEC(int,heap) **vecp)
4592
{
4593
  rtx tmp;
4594
 
4595
  for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4596
    VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4597
}
4598
 
4599
/* Set the locator of the insn chain starting at INSN to LOC.  */
4600
static void
4601
set_insn_locators (rtx insn, int loc)
4602
{
4603
  while (insn != NULL_RTX)
4604
    {
4605
      if (INSN_P (insn))
4606
        INSN_LOCATOR (insn) = loc;
4607
      insn = NEXT_INSN (insn);
4608
    }
4609
}
4610
 
4611
/* Determine how many INSN_UIDs in VEC are part of INSN.  Because we can
4612
   be running after reorg, SEQUENCE rtl is possible.  */
4613
 
4614
static int
4615
contains (rtx insn, VEC(int,heap) **vec)
4616
{
4617
  int i, j;
4618
 
4619
  if (NONJUMP_INSN_P (insn)
4620
      && GET_CODE (PATTERN (insn)) == SEQUENCE)
4621
    {
4622
      int count = 0;
4623
      for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4624
        for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4625
          if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4626
              == VEC_index (int, *vec, j))
4627
            count++;
4628
      return count;
4629
    }
4630
  else
4631
    {
4632
      for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4633
        if (INSN_UID (insn) == VEC_index (int, *vec, j))
4634
          return 1;
4635
    }
4636
  return 0;
4637
}
4638
 
4639
int
4640
prologue_epilogue_contains (rtx insn)
4641
{
4642
  if (contains (insn, &prologue))
4643
    return 1;
4644
  if (contains (insn, &epilogue))
4645
    return 1;
4646
  return 0;
4647
}
4648
 
4649
int
4650
sibcall_epilogue_contains (rtx insn)
4651
{
4652
  if (sibcall_epilogue)
4653
    return contains (insn, &sibcall_epilogue);
4654
  return 0;
4655
}
4656
 
4657
#ifdef HAVE_return
4658
/* Insert gen_return at the end of block BB.  This also means updating
4659
   block_for_insn appropriately.  */
4660
 
4661
static void
4662
emit_return_into_block (basic_block bb, rtx line_note)
4663
{
4664
  emit_jump_insn_after (gen_return (), BB_END (bb));
4665
  if (line_note)
4666
    emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
4667
}
4668
#endif /* HAVE_return */
4669
 
4670
#if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4671
 
4672
/* These functions convert the epilogue into a variant that does not
4673
   modify the stack pointer.  This is used in cases where a function
4674
   returns an object whose size is not known until it is computed.
4675
   The called function leaves the object on the stack, leaves the
4676
   stack depressed, and returns a pointer to the object.
4677
 
4678
   What we need to do is track all modifications and references to the
4679
   stack pointer, deleting the modifications and changing the
4680
   references to point to the location the stack pointer would have
4681
   pointed to had the modifications taken place.
4682
 
4683
   These functions need to be portable so we need to make as few
4684
   assumptions about the epilogue as we can.  However, the epilogue
4685
   basically contains three things: instructions to reset the stack
4686
   pointer, instructions to reload registers, possibly including the
4687
   frame pointer, and an instruction to return to the caller.
4688
 
4689
   We must be sure of what a relevant epilogue insn is doing.  We also
4690
   make no attempt to validate the insns we make since if they are
4691
   invalid, we probably can't do anything valid.  The intent is that
4692
   these routines get "smarter" as more and more machines start to use
4693
   them and they try operating on different epilogues.
4694
 
4695
   We use the following structure to track what the part of the
4696
   epilogue that we've already processed has done.  We keep two copies
4697
   of the SP equivalence, one for use during the insn we are
4698
   processing and one for use in the next insn.  The difference is
4699
   because one part of a PARALLEL may adjust SP and the other may use
4700
   it.  */
4701
 
4702
struct epi_info
4703
{
4704
  rtx sp_equiv_reg;             /* REG that SP is set from, perhaps SP.  */
4705
  HOST_WIDE_INT sp_offset;      /* Offset from SP_EQUIV_REG of present SP.  */
4706
  rtx new_sp_equiv_reg;         /* REG to be used at end of insn.  */
4707
  HOST_WIDE_INT new_sp_offset;  /* Offset to be used at end of insn.  */
4708
  rtx equiv_reg_src;            /* If nonzero, the value that SP_EQUIV_REG
4709
                                   should be set to once we no longer need
4710
                                   its value.  */
4711
  rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4712
                                             for registers.  */
4713
};
4714
 
4715
static void handle_epilogue_set (rtx, struct epi_info *);
4716
static void update_epilogue_consts (rtx, rtx, void *);
4717
static void emit_equiv_load (struct epi_info *);
4718
 
4719
/* Modify INSN, a list of one or more insns that is part of the epilogue, to
4720
   no modifications to the stack pointer.  Return the new list of insns.  */
4721
 
4722
static rtx
4723
keep_stack_depressed (rtx insns)
4724
{
4725
  int j;
4726
  struct epi_info info;
4727
  rtx insn, next;
4728
 
4729
  /* If the epilogue is just a single instruction, it must be OK as is.  */
4730
  if (NEXT_INSN (insns) == NULL_RTX)
4731
    return insns;
4732
 
4733
  /* Otherwise, start a sequence, initialize the information we have, and
4734
     process all the insns we were given.  */
4735
  start_sequence ();
4736
 
4737
  info.sp_equiv_reg = stack_pointer_rtx;
4738
  info.sp_offset = 0;
4739
  info.equiv_reg_src = 0;
4740
 
4741
  for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4742
    info.const_equiv[j] = 0;
4743
 
4744
  insn = insns;
4745
  next = NULL_RTX;
4746
  while (insn != NULL_RTX)
4747
    {
4748
      next = NEXT_INSN (insn);
4749
 
4750
      if (!INSN_P (insn))
4751
        {
4752
          add_insn (insn);
4753
          insn = next;
4754
          continue;
4755
        }
4756
 
4757
      /* If this insn references the register that SP is equivalent to and
4758
         we have a pending load to that register, we must force out the load
4759
         first and then indicate we no longer know what SP's equivalent is.  */
4760
      if (info.equiv_reg_src != 0
4761
          && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4762
        {
4763
          emit_equiv_load (&info);
4764
          info.sp_equiv_reg = 0;
4765
        }
4766
 
4767
      info.new_sp_equiv_reg = info.sp_equiv_reg;
4768
      info.new_sp_offset = info.sp_offset;
4769
 
4770
      /* If this is a (RETURN) and the return address is on the stack,
4771
         update the address and change to an indirect jump.  */
4772
      if (GET_CODE (PATTERN (insn)) == RETURN
4773
          || (GET_CODE (PATTERN (insn)) == PARALLEL
4774
              && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4775
        {
4776
          rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4777
          rtx base = 0;
4778
          HOST_WIDE_INT offset = 0;
4779
          rtx jump_insn, jump_set;
4780
 
4781
          /* If the return address is in a register, we can emit the insn
4782
             unchanged.  Otherwise, it must be a MEM and we see what the
4783
             base register and offset are.  In any case, we have to emit any
4784
             pending load to the equivalent reg of SP, if any.  */
4785
          if (REG_P (retaddr))
4786
            {
4787
              emit_equiv_load (&info);
4788
              add_insn (insn);
4789
              insn = next;
4790
              continue;
4791
            }
4792
          else
4793
            {
4794
              rtx ret_ptr;
4795
              gcc_assert (MEM_P (retaddr));
4796
 
4797
              ret_ptr = XEXP (retaddr, 0);
4798
 
4799
              if (REG_P (ret_ptr))
4800
                {
4801
                  base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4802
                  offset = 0;
4803
                }
4804
              else
4805
                {
4806
                  gcc_assert (GET_CODE (ret_ptr) == PLUS
4807
                              && REG_P (XEXP (ret_ptr, 0))
4808
                              && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4809
                  base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4810
                  offset = INTVAL (XEXP (ret_ptr, 1));
4811
                }
4812
            }
4813
 
4814
          /* If the base of the location containing the return pointer
4815
             is SP, we must update it with the replacement address.  Otherwise,
4816
             just build the necessary MEM.  */
4817
          retaddr = plus_constant (base, offset);
4818
          if (base == stack_pointer_rtx)
4819
            retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4820
                                            plus_constant (info.sp_equiv_reg,
4821
                                                           info.sp_offset));
4822
 
4823
          retaddr = gen_rtx_MEM (Pmode, retaddr);
4824
          MEM_NOTRAP_P (retaddr) = 1;
4825
 
4826
          /* If there is a pending load to the equivalent register for SP
4827
             and we reference that register, we must load our address into
4828
             a scratch register and then do that load.  */
4829
          if (info.equiv_reg_src
4830
              && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4831
            {
4832
              unsigned int regno;
4833
              rtx reg;
4834
 
4835
              for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4836
                if (HARD_REGNO_MODE_OK (regno, Pmode)
4837
                    && !fixed_regs[regno]
4838
                    && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4839
                    && !REGNO_REG_SET_P
4840
                         (EXIT_BLOCK_PTR->il.rtl->global_live_at_start, regno)
4841
                    && !refers_to_regno_p (regno,
4842
                                           regno + hard_regno_nregs[regno]
4843
                                                                   [Pmode],
4844
                                           info.equiv_reg_src, NULL)
4845
                    && info.const_equiv[regno] == 0)
4846
                  break;
4847
 
4848
              gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4849
 
4850
              reg = gen_rtx_REG (Pmode, regno);
4851
              emit_move_insn (reg, retaddr);
4852
              retaddr = reg;
4853
            }
4854
 
4855
          emit_equiv_load (&info);
4856
          jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4857
 
4858
          /* Show the SET in the above insn is a RETURN.  */
4859
          jump_set = single_set (jump_insn);
4860
          gcc_assert (jump_set);
4861
          SET_IS_RETURN_P (jump_set) = 1;
4862
        }
4863
 
4864
      /* If SP is not mentioned in the pattern and its equivalent register, if
4865
         any, is not modified, just emit it.  Otherwise, if neither is set,
4866
         replace the reference to SP and emit the insn.  If none of those are
4867
         true, handle each SET individually.  */
4868
      else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4869
               && (info.sp_equiv_reg == stack_pointer_rtx
4870
                   || !reg_set_p (info.sp_equiv_reg, insn)))
4871
        add_insn (insn);
4872
      else if (! reg_set_p (stack_pointer_rtx, insn)
4873
               && (info.sp_equiv_reg == stack_pointer_rtx
4874
                   || !reg_set_p (info.sp_equiv_reg, insn)))
4875
        {
4876
          int changed;
4877
 
4878
          changed = validate_replace_rtx (stack_pointer_rtx,
4879
                                          plus_constant (info.sp_equiv_reg,
4880
                                                         info.sp_offset),
4881
                                          insn);
4882
          gcc_assert (changed);
4883
 
4884
          add_insn (insn);
4885
        }
4886
      else if (GET_CODE (PATTERN (insn)) == SET)
4887
        handle_epilogue_set (PATTERN (insn), &info);
4888
      else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4889
        {
4890
          for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4891
            if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4892
              handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4893
        }
4894
      else
4895
        add_insn (insn);
4896
 
4897
      info.sp_equiv_reg = info.new_sp_equiv_reg;
4898
      info.sp_offset = info.new_sp_offset;
4899
 
4900
      /* Now update any constants this insn sets.  */
4901
      note_stores (PATTERN (insn), update_epilogue_consts, &info);
4902
      insn = next;
4903
    }
4904
 
4905
  insns = get_insns ();
4906
  end_sequence ();
4907
  return insns;
4908
}
4909
 
4910
/* SET is a SET from an insn in the epilogue.  P is a pointer to the epi_info
4911
   structure that contains information about what we've seen so far.  We
4912
   process this SET by either updating that data or by emitting one or
4913
   more insns.  */
4914
 
4915
static void
4916
handle_epilogue_set (rtx set, struct epi_info *p)
4917
{
4918
  /* First handle the case where we are setting SP.  Record what it is being
4919
     set from, which we must be able to determine  */
4920
  if (reg_set_p (stack_pointer_rtx, set))
4921
    {
4922
      gcc_assert (SET_DEST (set) == stack_pointer_rtx);
4923
 
4924
      if (GET_CODE (SET_SRC (set)) == PLUS)
4925
        {
4926
          p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4927
          if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4928
            p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4929
          else
4930
            {
4931
              gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4932
                          && (REGNO (XEXP (SET_SRC (set), 1))
4933
                              < FIRST_PSEUDO_REGISTER)
4934
                          && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4935
              p->new_sp_offset
4936
                = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4937
            }
4938
        }
4939
      else
4940
        p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4941
 
4942
      /* If we are adjusting SP, we adjust from the old data.  */
4943
      if (p->new_sp_equiv_reg == stack_pointer_rtx)
4944
        {
4945
          p->new_sp_equiv_reg = p->sp_equiv_reg;
4946
          p->new_sp_offset += p->sp_offset;
4947
        }
4948
 
4949
      gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
4950
 
4951
      return;
4952
    }
4953
 
4954
  /* Next handle the case where we are setting SP's equivalent
4955
     register.  We must not already have a value to set it to.  We
4956
     could update, but there seems little point in handling that case.
4957
     Note that we have to allow for the case where we are setting the
4958
     register set in the previous part of a PARALLEL inside a single
4959
     insn.  But use the old offset for any updates within this insn.
4960
     We must allow for the case where the register is being set in a
4961
     different (usually wider) mode than Pmode).  */
4962
  else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
4963
    {
4964
      gcc_assert (!p->equiv_reg_src
4965
                  && REG_P (p->new_sp_equiv_reg)
4966
                  && REG_P (SET_DEST (set))
4967
                  && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
4968
                      <= BITS_PER_WORD)
4969
                  && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
4970
      p->equiv_reg_src
4971
        = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4972
                                plus_constant (p->sp_equiv_reg,
4973
                                               p->sp_offset));
4974
    }
4975
 
4976
  /* Otherwise, replace any references to SP in the insn to its new value
4977
     and emit the insn.  */
4978
  else
4979
    {
4980
      SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4981
                                            plus_constant (p->sp_equiv_reg,
4982
                                                           p->sp_offset));
4983
      SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
4984
                                             plus_constant (p->sp_equiv_reg,
4985
                                                            p->sp_offset));
4986
      emit_insn (set);
4987
    }
4988
}
4989
 
4990
/* Update the tracking information for registers set to constants.  */
4991
 
4992
static void
4993
update_epilogue_consts (rtx dest, rtx x, void *data)
4994
{
4995
  struct epi_info *p = (struct epi_info *) data;
4996
  rtx new;
4997
 
4998
  if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4999
    return;
5000
 
5001
  /* If we are either clobbering a register or doing a partial set,
5002
     show we don't know the value.  */
5003
  else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
5004
    p->const_equiv[REGNO (dest)] = 0;
5005
 
5006
  /* If we are setting it to a constant, record that constant.  */
5007
  else if (GET_CODE (SET_SRC (x)) == CONST_INT)
5008
    p->const_equiv[REGNO (dest)] = SET_SRC (x);
5009
 
5010
  /* If this is a binary operation between a register we have been tracking
5011
     and a constant, see if we can compute a new constant value.  */
5012
  else if (ARITHMETIC_P (SET_SRC (x))
5013
           && REG_P (XEXP (SET_SRC (x), 0))
5014
           && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5015
           && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5016
           && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5017
           && 0 != (new = simplify_binary_operation
5018
                    (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5019
                     p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5020
                     XEXP (SET_SRC (x), 1)))
5021
           && GET_CODE (new) == CONST_INT)
5022
    p->const_equiv[REGNO (dest)] = new;
5023
 
5024
  /* Otherwise, we can't do anything with this value.  */
5025
  else
5026
    p->const_equiv[REGNO (dest)] = 0;
5027
}
5028
 
5029
/* Emit an insn to do the load shown in p->equiv_reg_src, if needed.  */
5030
 
5031
static void
5032
emit_equiv_load (struct epi_info *p)
5033
{
5034
  if (p->equiv_reg_src != 0)
5035
    {
5036
      rtx dest = p->sp_equiv_reg;
5037
 
5038
      if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5039
        dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5040
                            REGNO (p->sp_equiv_reg));
5041
 
5042
      emit_move_insn (dest, p->equiv_reg_src);
5043
      p->equiv_reg_src = 0;
5044
    }
5045
}
5046
#endif
5047
 
5048
/* Generate the prologue and epilogue RTL if the machine supports it.  Thread
5049
   this into place with notes indicating where the prologue ends and where
5050
   the epilogue begins.  Update the basic block information when possible.  */
5051
 
5052
void
5053
thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
5054
{
5055
  int inserted = 0;
5056
  edge e;
5057
#if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5058
  rtx seq;
5059
#endif
5060
#ifdef HAVE_prologue
5061
  rtx prologue_end = NULL_RTX;
5062
#endif
5063
#if defined (HAVE_epilogue) || defined(HAVE_return)
5064
  rtx epilogue_end = NULL_RTX;
5065
#endif
5066
  edge_iterator ei;
5067
 
5068
#ifdef HAVE_prologue
5069
  if (HAVE_prologue)
5070
    {
5071
      start_sequence ();
5072
      seq = gen_prologue ();
5073
      emit_insn (seq);
5074
 
5075
      /* Retain a map of the prologue insns.  */
5076
      record_insns (seq, &prologue);
5077
      prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
5078
 
5079
#ifndef PROFILE_BEFORE_PROLOGUE
5080
      /* Ensure that instructions are not moved into the prologue when
5081
         profiling is on.  The call to the profiling routine can be
5082
         emitted within the live range of a call-clobbered register.  */
5083
      if (current_function_profile)
5084
        emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
5085
#endif
5086
 
5087
      seq = get_insns ();
5088
      end_sequence ();
5089
      set_insn_locators (seq, prologue_locator);
5090
 
5091
      /* Can't deal with multiple successors of the entry block
5092
         at the moment.  Function should always have at least one
5093
         entry point.  */
5094
      gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5095
 
5096
      insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5097
      inserted = 1;
5098
    }
5099
#endif
5100
 
5101
  /* If the exit block has no non-fake predecessors, we don't need
5102
     an epilogue.  */
5103
  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5104
    if ((e->flags & EDGE_FAKE) == 0)
5105
      break;
5106
  if (e == NULL)
5107
    goto epilogue_done;
5108
 
5109
#ifdef HAVE_return
5110
  if (optimize && HAVE_return)
5111
    {
5112
      /* If we're allowed to generate a simple return instruction,
5113
         then by definition we don't need a full epilogue.  Examine
5114
         the block that falls through to EXIT.   If it does not
5115
         contain any code, examine its predecessors and try to
5116
         emit (conditional) return instructions.  */
5117
 
5118
      basic_block last;
5119
      rtx label;
5120
 
5121
      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5122
        if (e->flags & EDGE_FALLTHRU)
5123
          break;
5124
      if (e == NULL)
5125
        goto epilogue_done;
5126
      last = e->src;
5127
 
5128
      /* Verify that there are no active instructions in the last block.  */
5129
      label = BB_END (last);
5130
      while (label && !LABEL_P (label))
5131
        {
5132
          if (active_insn_p (label))
5133
            break;
5134
          label = PREV_INSN (label);
5135
        }
5136
 
5137
      if (BB_HEAD (last) == label && LABEL_P (label))
5138
        {
5139
          edge_iterator ei2;
5140
          rtx epilogue_line_note = NULL_RTX;
5141
 
5142
          /* Locate the line number associated with the closing brace,
5143
             if we can find one.  */
5144
          for (seq = get_last_insn ();
5145
               seq && ! active_insn_p (seq);
5146
               seq = PREV_INSN (seq))
5147
            if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
5148
              {
5149
                epilogue_line_note = seq;
5150
                break;
5151
              }
5152
 
5153
          for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5154
            {
5155
              basic_block bb = e->src;
5156
              rtx jump;
5157
 
5158
              if (bb == ENTRY_BLOCK_PTR)
5159
                {
5160
                  ei_next (&ei2);
5161
                  continue;
5162
                }
5163
 
5164
              jump = BB_END (bb);
5165
              if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5166
                {
5167
                  ei_next (&ei2);
5168
                  continue;
5169
                }
5170
 
5171
              /* If we have an unconditional jump, we can replace that
5172
                 with a simple return instruction.  */
5173
              if (simplejump_p (jump))
5174
                {
5175
                  emit_return_into_block (bb, epilogue_line_note);
5176
                  delete_insn (jump);
5177
                }
5178
 
5179
              /* If we have a conditional jump, we can try to replace
5180
                 that with a conditional return instruction.  */
5181
              else if (condjump_p (jump))
5182
                {
5183
                  if (! redirect_jump (jump, 0, 0))
5184
                    {
5185
                      ei_next (&ei2);
5186
                      continue;
5187
                    }
5188
 
5189
                  /* If this block has only one successor, it both jumps
5190
                     and falls through to the fallthru block, so we can't
5191
                     delete the edge.  */
5192
                  if (single_succ_p (bb))
5193
                    {
5194
                      ei_next (&ei2);
5195
                      continue;
5196
                    }
5197
                }
5198
              else
5199
                {
5200
                  ei_next (&ei2);
5201
                  continue;
5202
                }
5203
 
5204
              /* Fix up the CFG for the successful change we just made.  */
5205
              redirect_edge_succ (e, EXIT_BLOCK_PTR);
5206
            }
5207
 
5208
          /* Emit a return insn for the exit fallthru block.  Whether
5209
             this is still reachable will be determined later.  */
5210
 
5211
          emit_barrier_after (BB_END (last));
5212
          emit_return_into_block (last, epilogue_line_note);
5213
          epilogue_end = BB_END (last);
5214
          single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5215
          goto epilogue_done;
5216
        }
5217
    }
5218
#endif
5219
  /* Find the edge that falls through to EXIT.  Other edges may exist
5220
     due to RETURN instructions, but those don't need epilogues.
5221
     There really shouldn't be a mixture -- either all should have
5222
     been converted or none, however...  */
5223
 
5224
  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5225
    if (e->flags & EDGE_FALLTHRU)
5226
      break;
5227
  if (e == NULL)
5228
    goto epilogue_done;
5229
 
5230
#ifdef HAVE_epilogue
5231
  if (HAVE_epilogue)
5232
    {
5233
      start_sequence ();
5234
      epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5235
 
5236
      seq = gen_epilogue ();
5237
 
5238
#ifdef INCOMING_RETURN_ADDR_RTX
5239
      /* If this function returns with the stack depressed and we can support
5240
         it, massage the epilogue to actually do that.  */
5241
      if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5242
          && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5243
        seq = keep_stack_depressed (seq);
5244
#endif
5245
 
5246
      emit_jump_insn (seq);
5247
 
5248
      /* Retain a map of the epilogue insns.  */
5249
      record_insns (seq, &epilogue);
5250
      set_insn_locators (seq, epilogue_locator);
5251
 
5252
      seq = get_insns ();
5253
      end_sequence ();
5254
 
5255
      insert_insn_on_edge (seq, e);
5256
      inserted = 1;
5257
    }
5258
  else
5259
#endif
5260
    {
5261
      basic_block cur_bb;
5262
 
5263
      if (! next_active_insn (BB_END (e->src)))
5264
        goto epilogue_done;
5265
      /* We have a fall-through edge to the exit block, the source is not
5266
         at the end of the function, and there will be an assembler epilogue
5267
         at the end of the function.
5268
         We can't use force_nonfallthru here, because that would try to
5269
         use return.  Inserting a jump 'by hand' is extremely messy, so
5270
         we take advantage of cfg_layout_finalize using
5271
        fixup_fallthru_exit_predecessor.  */
5272
      cfg_layout_initialize (0);
5273
      FOR_EACH_BB (cur_bb)
5274
        if (cur_bb->index >= NUM_FIXED_BLOCKS
5275
            && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5276
          cur_bb->aux = cur_bb->next_bb;
5277
      cfg_layout_finalize ();
5278
    }
5279
epilogue_done:
5280
 
5281
  if (inserted)
5282
    commit_edge_insertions ();
5283
 
5284
#ifdef HAVE_sibcall_epilogue
5285
  /* Emit sibling epilogues before any sibling call sites.  */
5286
  for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5287
    {
5288
      basic_block bb = e->src;
5289
      rtx insn = BB_END (bb);
5290
 
5291
      if (!CALL_P (insn)
5292
          || ! SIBLING_CALL_P (insn))
5293
        {
5294
          ei_next (&ei);
5295
          continue;
5296
        }
5297
 
5298
      start_sequence ();
5299
      emit_insn (gen_sibcall_epilogue ());
5300
      seq = get_insns ();
5301
      end_sequence ();
5302
 
5303
      /* Retain a map of the epilogue insns.  Used in life analysis to
5304
         avoid getting rid of sibcall epilogue insns.  Do this before we
5305
         actually emit the sequence.  */
5306
      record_insns (seq, &sibcall_epilogue);
5307
      set_insn_locators (seq, epilogue_locator);
5308
 
5309
      emit_insn_before (seq, insn);
5310
      ei_next (&ei);
5311
    }
5312
#endif
5313
 
5314
#ifdef HAVE_prologue
5315
  /* This is probably all useless now that we use locators.  */
5316
  if (prologue_end)
5317
    {
5318
      rtx insn, prev;
5319
 
5320
      /* GDB handles `break f' by setting a breakpoint on the first
5321
         line note after the prologue.  Which means (1) that if
5322
         there are line number notes before where we inserted the
5323
         prologue we should move them, and (2) we should generate a
5324
         note before the end of the first basic block, if there isn't
5325
         one already there.
5326
 
5327
         ??? This behavior is completely broken when dealing with
5328
         multiple entry functions.  We simply place the note always
5329
         into first basic block and let alternate entry points
5330
         to be missed.
5331
       */
5332
 
5333
      for (insn = prologue_end; insn; insn = prev)
5334
        {
5335
          prev = PREV_INSN (insn);
5336
          if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5337
            {
5338
              /* Note that we cannot reorder the first insn in the
5339
                 chain, since rest_of_compilation relies on that
5340
                 remaining constant.  */
5341
              if (prev == NULL)
5342
                break;
5343
              reorder_insns (insn, insn, prologue_end);
5344
            }
5345
        }
5346
 
5347
      /* Find the last line number note in the first block.  */
5348
      for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
5349
           insn != prologue_end && insn;
5350
           insn = PREV_INSN (insn))
5351
        if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5352
          break;
5353
 
5354
      /* If we didn't find one, make a copy of the first line number
5355
         we run across.  */
5356
      if (! insn)
5357
        {
5358
          for (insn = next_active_insn (prologue_end);
5359
               insn;
5360
               insn = PREV_INSN (insn))
5361
            if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5362
              {
5363
                emit_note_copy_after (insn, prologue_end);
5364
                break;
5365
              }
5366
        }
5367
    }
5368
#endif
5369
#ifdef HAVE_epilogue
5370
  if (epilogue_end)
5371
    {
5372
      rtx insn, next;
5373
 
5374
      /* Similarly, move any line notes that appear after the epilogue.
5375
         There is no need, however, to be quite so anal about the existence
5376
         of such a note.  Also move the NOTE_INSN_FUNCTION_END and (possibly)
5377
         NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5378
         info generation.  */
5379
      for (insn = epilogue_end; insn; insn = next)
5380
        {
5381
          next = NEXT_INSN (insn);
5382
          if (NOTE_P (insn)
5383
              && (NOTE_LINE_NUMBER (insn) > 0
5384
                  || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5385
                  || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
5386
            reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5387
        }
5388
    }
5389
#endif
5390
}
5391
 
5392
/* Reposition the prologue-end and epilogue-begin notes after instruction
5393
   scheduling and delayed branch scheduling.  */
5394
 
5395
void
5396
reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
5397
{
5398
#if defined (HAVE_prologue) || defined (HAVE_epilogue)
5399
  rtx insn, last, note;
5400
  int len;
5401
 
5402
  if ((len = VEC_length (int, prologue)) > 0)
5403
    {
5404
      last = 0, note = 0;
5405
 
5406
      /* Scan from the beginning until we reach the last prologue insn.
5407
         We apparently can't depend on basic_block_{head,end} after
5408
         reorg has run.  */
5409
      for (insn = f; insn; insn = NEXT_INSN (insn))
5410
        {
5411
          if (NOTE_P (insn))
5412
            {
5413
              if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5414
                note = insn;
5415
            }
5416
          else if (contains (insn, &prologue))
5417
            {
5418
              last = insn;
5419
              if (--len == 0)
5420
                break;
5421
            }
5422
        }
5423
 
5424
      if (last)
5425
        {
5426
          /* Find the prologue-end note if we haven't already, and
5427
             move it to just after the last prologue insn.  */
5428
          if (note == 0)
5429
            {
5430
              for (note = last; (note = NEXT_INSN (note));)
5431
                if (NOTE_P (note)
5432
                    && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5433
                  break;
5434
            }
5435
 
5436
          /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note.  */
5437
          if (LABEL_P (last))
5438
            last = NEXT_INSN (last);
5439
          reorder_insns (note, note, last);
5440
        }
5441
    }
5442
 
5443
  if ((len = VEC_length (int, epilogue)) > 0)
5444
    {
5445
      last = 0, note = 0;
5446
 
5447
      /* Scan from the end until we reach the first epilogue insn.
5448
         We apparently can't depend on basic_block_{head,end} after
5449
         reorg has run.  */
5450
      for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5451
        {
5452
          if (NOTE_P (insn))
5453
            {
5454
              if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5455
                note = insn;
5456
            }
5457
          else if (contains (insn, &epilogue))
5458
            {
5459
              last = insn;
5460
              if (--len == 0)
5461
                break;
5462
            }
5463
        }
5464
 
5465
      if (last)
5466
        {
5467
          /* Find the epilogue-begin note if we haven't already, and
5468
             move it to just before the first epilogue insn.  */
5469
          if (note == 0)
5470
            {
5471
              for (note = insn; (note = PREV_INSN (note));)
5472
                if (NOTE_P (note)
5473
                    && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5474
                  break;
5475
            }
5476
 
5477
          if (PREV_INSN (last) != note)
5478
            reorder_insns (note, note, PREV_INSN (last));
5479
        }
5480
    }
5481
#endif /* HAVE_prologue or HAVE_epilogue */
5482
}
5483
 
5484
/* Resets insn_block_boundaries array.  */
5485
 
5486
void
5487
reset_block_changes (void)
5488
{
5489
  cfun->ib_boundaries_block = VEC_alloc (tree, gc, 100);
5490
  VEC_quick_push (tree, cfun->ib_boundaries_block, NULL_TREE);
5491
}
5492
 
5493
/* Record the boundary for BLOCK.  */
5494
void
5495
record_block_change (tree block)
5496
{
5497
  int i, n;
5498
  tree last_block;
5499
 
5500
  if (!block)
5501
    return;
5502
 
5503
  if(!cfun->ib_boundaries_block)
5504
    return;
5505
 
5506
  last_block = VEC_pop (tree, cfun->ib_boundaries_block);
5507
  n = get_max_uid ();
5508
  for (i = VEC_length (tree, cfun->ib_boundaries_block); i < n; i++)
5509
    VEC_safe_push (tree, gc, cfun->ib_boundaries_block, last_block);
5510
 
5511
  VEC_safe_push (tree, gc, cfun->ib_boundaries_block, block);
5512
}
5513
 
5514
/* Finishes record of boundaries.  */
5515
void
5516
finalize_block_changes (void)
5517
{
5518
  record_block_change (DECL_INITIAL (current_function_decl));
5519
}
5520
 
5521
/* For INSN return the BLOCK it belongs to.  */
5522
void
5523
check_block_change (rtx insn, tree *block)
5524
{
5525
  unsigned uid = INSN_UID (insn);
5526
 
5527
  if (uid >= VEC_length (tree, cfun->ib_boundaries_block))
5528
    return;
5529
 
5530
  *block = VEC_index (tree, cfun->ib_boundaries_block, uid);
5531
}
5532
 
5533
/* Releases the ib_boundaries_block records.  */
5534
void
5535
free_block_changes (void)
5536
{
5537
  VEC_free (tree, gc, cfun->ib_boundaries_block);
5538
}
5539
 
5540
/* Returns the name of the current function.  */
5541
const char *
5542
current_function_name (void)
5543
{
5544
  return lang_hooks.decl_printable_name (cfun->decl, 2);
5545
}
5546
 
5547
 
5548
static unsigned int
5549
rest_of_handle_check_leaf_regs (void)
5550
{
5551
#ifdef LEAF_REGISTERS
5552
  current_function_uses_only_leaf_regs
5553
    = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5554
#endif
5555
  return 0;
5556
}
5557
 
5558
/* Insert a TYPE into the used types hash table of CFUN.  */
5559
static void
5560
used_types_insert_helper (tree type, struct function *func)
5561
{
5562
  if (type != NULL && func != NULL)
5563
    {
5564
      void **slot;
5565
 
5566
      if (func->used_types_hash == NULL)
5567
        func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5568
                                                 htab_eq_pointer, NULL);
5569
      slot = htab_find_slot (func->used_types_hash, type, INSERT);
5570
      if (*slot == NULL)
5571
        *slot = type;
5572
    }
5573
}
5574
 
5575
/* Given a type, insert it into the used hash table in cfun.  */
5576
void
5577
used_types_insert (tree t)
5578
{
5579
  while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5580
    t = TREE_TYPE (t);
5581
  t = TYPE_MAIN_VARIANT (t);
5582
  if (debug_info_level > DINFO_LEVEL_NONE)
5583
    used_types_insert_helper (t, cfun);
5584
}
5585
 
5586
struct tree_opt_pass pass_leaf_regs =
5587
{
5588
  NULL,                                 /* name */
5589
  NULL,                                 /* gate */
5590
  rest_of_handle_check_leaf_regs,       /* execute */
5591
  NULL,                                 /* sub */
5592
  NULL,                                 /* next */
5593
  0,                                    /* static_pass_number */
5594
  0,                                    /* tv_id */
5595
  0,                                    /* properties_required */
5596
  0,                                    /* properties_provided */
5597
  0,                                    /* properties_destroyed */
5598
  0,                                    /* todo_flags_start */
5599
  0,                                    /* todo_flags_finish */
5600
 
5601
};
5602
 
5603
 
5604
#include "gt-function.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.