OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.2.2/] [gcc/] [calls.c] - Blame information for rev 258

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Convert function calls to rtl insns, for GNU C compiler.
2
   Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "rtl.h"
27
#include "tree.h"
28
#include "flags.h"
29
#include "expr.h"
30
#include "optabs.h"
31
#include "libfuncs.h"
32
#include "function.h"
33
#include "regs.h"
34
#include "toplev.h"
35
#include "output.h"
36
#include "tm_p.h"
37
#include "timevar.h"
38
#include "sbitmap.h"
39
#include "langhooks.h"
40
#include "target.h"
41
#include "cgraph.h"
42
#include "except.h"
43
 
44
/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits.  */
45
#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
46
 
47
/* Data structure and subroutines used within expand_call.  */
48
 
49
struct arg_data
50
{
51
  /* Tree node for this argument.  */
52
  tree tree_value;
53
  /* Mode for value; TYPE_MODE unless promoted.  */
54
  enum machine_mode mode;
55
  /* Current RTL value for argument, or 0 if it isn't precomputed.  */
56
  rtx value;
57
  /* Initially-compute RTL value for argument; only for const functions.  */
58
  rtx initial_value;
59
  /* Register to pass this argument in, 0 if passed on stack, or an
60
     PARALLEL if the arg is to be copied into multiple non-contiguous
61
     registers.  */
62
  rtx reg;
63
  /* Register to pass this argument in when generating tail call sequence.
64
     This is not the same register as for normal calls on machines with
65
     register windows.  */
66
  rtx tail_call_reg;
67
  /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
68
     form for emit_group_move.  */
69
  rtx parallel_value;
70
  /* If REG was promoted from the actual mode of the argument expression,
71
     indicates whether the promotion is sign- or zero-extended.  */
72
  int unsignedp;
73
  /* Number of bytes to put in registers.  0 means put the whole arg
74
     in registers.  Also 0 if not passed in registers.  */
75
  int partial;
76
  /* Nonzero if argument must be passed on stack.
77
     Note that some arguments may be passed on the stack
78
     even though pass_on_stack is zero, just because FUNCTION_ARG says so.
79
     pass_on_stack identifies arguments that *cannot* go in registers.  */
80
  int pass_on_stack;
81
  /* Some fields packaged up for locate_and_pad_parm.  */
82
  struct locate_and_pad_arg_data locate;
83
  /* Location on the stack at which parameter should be stored.  The store
84
     has already been done if STACK == VALUE.  */
85
  rtx stack;
86
  /* Location on the stack of the start of this argument slot.  This can
87
     differ from STACK if this arg pads downward.  This location is known
88
     to be aligned to FUNCTION_ARG_BOUNDARY.  */
89
  rtx stack_slot;
90
  /* Place that this stack area has been saved, if needed.  */
91
  rtx save_area;
92
  /* If an argument's alignment does not permit direct copying into registers,
93
     copy in smaller-sized pieces into pseudos.  These are stored in a
94
     block pointed to by this field.  The next field says how many
95
     word-sized pseudos we made.  */
96
  rtx *aligned_regs;
97
  int n_aligned_regs;
98
};
99
 
100
/* A vector of one char per byte of stack space.  A byte if nonzero if
101
   the corresponding stack location has been used.
102
   This vector is used to prevent a function call within an argument from
103
   clobbering any stack already set up.  */
104
static char *stack_usage_map;
105
 
106
/* Size of STACK_USAGE_MAP.  */
107
static int highest_outgoing_arg_in_use;
108
 
109
/* A bitmap of virtual-incoming stack space.  Bit is set if the corresponding
110
   stack location's tail call argument has been already stored into the stack.
111
   This bitmap is used to prevent sibling call optimization if function tries
112
   to use parent's incoming argument slots when they have been already
113
   overwritten with tail call arguments.  */
114
static sbitmap stored_args_map;
115
 
116
/* stack_arg_under_construction is nonzero when an argument may be
117
   initialized with a constructor call (including a C function that
118
   returns a BLKmode struct) and expand_call must take special action
119
   to make sure the object being constructed does not overlap the
120
   argument list for the constructor call.  */
121
static int stack_arg_under_construction;
122
 
123
static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
124
                         HOST_WIDE_INT, rtx, rtx, int, rtx, int,
125
                         CUMULATIVE_ARGS *);
126
static void precompute_register_parameters (int, struct arg_data *, int *);
127
static int store_one_arg (struct arg_data *, rtx, int, int, int);
128
static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
129
static int finalize_must_preallocate (int, int, struct arg_data *,
130
                                      struct args_size *);
131
static void precompute_arguments (int, int, struct arg_data *);
132
static int compute_argument_block_size (int, struct args_size *, int);
133
static void initialize_argument_information (int, struct arg_data *,
134
                                             struct args_size *, int, tree,
135
                                             tree, CUMULATIVE_ARGS *, int,
136
                                             rtx *, int *, int *, int *,
137
                                             bool *, bool);
138
static void compute_argument_addresses (struct arg_data *, rtx, int);
139
static rtx rtx_for_function_call (tree, tree);
140
static void load_register_parameters (struct arg_data *, int, rtx *, int,
141
                                      int, int *);
142
static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
143
                                      enum machine_mode, int, va_list);
144
static int special_function_p (tree, int);
145
static int check_sibcall_argument_overlap_1 (rtx);
146
static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
147
 
148
static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
149
                                                      unsigned int);
150
static tree split_complex_values (tree);
151
static tree split_complex_types (tree);
152
 
153
#ifdef REG_PARM_STACK_SPACE
154
static rtx save_fixed_argument_area (int, rtx, int *, int *);
155
static void restore_fixed_argument_area (rtx, rtx, int, int);
156
#endif
157
 
158
/* Force FUNEXP into a form suitable for the address of a CALL,
159
   and return that as an rtx.  Also load the static chain register
160
   if FNDECL is a nested function.
161
 
162
   CALL_FUSAGE points to a variable holding the prospective
163
   CALL_INSN_FUNCTION_USAGE information.  */
164
 
165
rtx
166
prepare_call_address (rtx funexp, rtx static_chain_value,
167
                      rtx *call_fusage, int reg_parm_seen, int sibcallp)
168
{
169
  /* Make a valid memory address and copy constants through pseudo-regs,
170
     but not for a constant address if -fno-function-cse.  */
171
  if (GET_CODE (funexp) != SYMBOL_REF)
172
    /* If we are using registers for parameters, force the
173
       function address into a register now.  */
174
    funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
175
              ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
176
              : memory_address (FUNCTION_MODE, funexp));
177
  else if (! sibcallp)
178
    {
179
#ifndef NO_FUNCTION_CSE
180
      if (optimize && ! flag_no_function_cse)
181
        funexp = force_reg (Pmode, funexp);
182
#endif
183
    }
184
 
185
  if (static_chain_value != 0)
186
    {
187
      static_chain_value = convert_memory_address (Pmode, static_chain_value);
188
      emit_move_insn (static_chain_rtx, static_chain_value);
189
 
190
      if (REG_P (static_chain_rtx))
191
        use_reg (call_fusage, static_chain_rtx);
192
    }
193
 
194
  return funexp;
195
}
196
 
197
/* Generate instructions to call function FUNEXP,
198
   and optionally pop the results.
199
   The CALL_INSN is the first insn generated.
200
 
201
   FNDECL is the declaration node of the function.  This is given to the
202
   macro RETURN_POPS_ARGS to determine whether this function pops its own args.
203
 
204
   FUNTYPE is the data type of the function.  This is given to the macro
205
   RETURN_POPS_ARGS to determine whether this function pops its own args.
206
   We used to allow an identifier for library functions, but that doesn't
207
   work when the return type is an aggregate type and the calling convention
208
   says that the pointer to this aggregate is to be popped by the callee.
209
 
210
   STACK_SIZE is the number of bytes of arguments on the stack,
211
   ROUNDED_STACK_SIZE is that number rounded up to
212
   PREFERRED_STACK_BOUNDARY; zero if the size is variable.  This is
213
   both to put into the call insn and to generate explicit popping
214
   code if necessary.
215
 
216
   STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
217
   It is zero if this call doesn't want a structure value.
218
 
219
   NEXT_ARG_REG is the rtx that results from executing
220
     FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
221
   just after all the args have had their registers assigned.
222
   This could be whatever you like, but normally it is the first
223
   arg-register beyond those used for args in this call,
224
   or 0 if all the arg-registers are used in this call.
225
   It is passed on to `gen_call' so you can put this info in the call insn.
226
 
227
   VALREG is a hard register in which a value is returned,
228
   or 0 if the call does not return a value.
229
 
230
   OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
231
   the args to this call were processed.
232
   We restore `inhibit_defer_pop' to that value.
233
 
234
   CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
235
   denote registers used by the called function.  */
236
 
237
static void
238
emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
239
             tree funtype ATTRIBUTE_UNUSED,
240
             HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
241
             HOST_WIDE_INT rounded_stack_size,
242
             HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
243
             rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
244
             int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
245
             CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
246
{
247
  rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
248
  rtx call_insn;
249
  int already_popped = 0;
250
  HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
251
#if defined (HAVE_call) && defined (HAVE_call_value)
252
  rtx struct_value_size_rtx;
253
  struct_value_size_rtx = GEN_INT (struct_value_size);
254
#endif
255
 
256
#ifdef CALL_POPS_ARGS
257
  n_popped += CALL_POPS_ARGS (* args_so_far);
258
#endif
259
 
260
  /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
261
     and we don't want to load it into a register as an optimization,
262
     because prepare_call_address already did it if it should be done.  */
263
  if (GET_CODE (funexp) != SYMBOL_REF)
264
    funexp = memory_address (FUNCTION_MODE, funexp);
265
 
266
#if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
267
  if ((ecf_flags & ECF_SIBCALL)
268
      && HAVE_sibcall_pop && HAVE_sibcall_value_pop
269
      && (n_popped > 0 || stack_size == 0))
270
    {
271
      rtx n_pop = GEN_INT (n_popped);
272
      rtx pat;
273
 
274
      /* If this subroutine pops its own args, record that in the call insn
275
         if possible, for the sake of frame pointer elimination.  */
276
 
277
      if (valreg)
278
        pat = GEN_SIBCALL_VALUE_POP (valreg,
279
                                     gen_rtx_MEM (FUNCTION_MODE, funexp),
280
                                     rounded_stack_size_rtx, next_arg_reg,
281
                                     n_pop);
282
      else
283
        pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
284
                               rounded_stack_size_rtx, next_arg_reg, n_pop);
285
 
286
      emit_call_insn (pat);
287
      already_popped = 1;
288
    }
289
  else
290
#endif
291
 
292
#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
293
  /* If the target has "call" or "call_value" insns, then prefer them
294
     if no arguments are actually popped.  If the target does not have
295
     "call" or "call_value" insns, then we must use the popping versions
296
     even if the call has no arguments to pop.  */
297
#if defined (HAVE_call) && defined (HAVE_call_value)
298
  if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
299
      && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
300
#else
301
  if (HAVE_call_pop && HAVE_call_value_pop)
302
#endif
303
    {
304
      rtx n_pop = GEN_INT (n_popped);
305
      rtx pat;
306
 
307
      /* If this subroutine pops its own args, record that in the call insn
308
         if possible, for the sake of frame pointer elimination.  */
309
 
310
      if (valreg)
311
        pat = GEN_CALL_VALUE_POP (valreg,
312
                                  gen_rtx_MEM (FUNCTION_MODE, funexp),
313
                                  rounded_stack_size_rtx, next_arg_reg, n_pop);
314
      else
315
        pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
316
                            rounded_stack_size_rtx, next_arg_reg, n_pop);
317
 
318
      emit_call_insn (pat);
319
      already_popped = 1;
320
    }
321
  else
322
#endif
323
 
324
#if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
325
  if ((ecf_flags & ECF_SIBCALL)
326
      && HAVE_sibcall && HAVE_sibcall_value)
327
    {
328
      if (valreg)
329
        emit_call_insn (GEN_SIBCALL_VALUE (valreg,
330
                                           gen_rtx_MEM (FUNCTION_MODE, funexp),
331
                                           rounded_stack_size_rtx,
332
                                           next_arg_reg, NULL_RTX));
333
      else
334
        emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
335
                                     rounded_stack_size_rtx, next_arg_reg,
336
                                     struct_value_size_rtx));
337
    }
338
  else
339
#endif
340
 
341
#if defined (HAVE_call) && defined (HAVE_call_value)
342
  if (HAVE_call && HAVE_call_value)
343
    {
344
      if (valreg)
345
        emit_call_insn (GEN_CALL_VALUE (valreg,
346
                                        gen_rtx_MEM (FUNCTION_MODE, funexp),
347
                                        rounded_stack_size_rtx, next_arg_reg,
348
                                        NULL_RTX));
349
      else
350
        emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
351
                                  rounded_stack_size_rtx, next_arg_reg,
352
                                  struct_value_size_rtx));
353
    }
354
  else
355
#endif
356
    gcc_unreachable ();
357
 
358
  /* Find the call we just emitted.  */
359
  call_insn = last_call_insn ();
360
 
361
  /* Mark memory as used for "pure" function call.  */
362
  if (ecf_flags & ECF_PURE)
363
    call_fusage
364
      = gen_rtx_EXPR_LIST
365
        (VOIDmode,
366
         gen_rtx_USE (VOIDmode,
367
                      gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
368
         call_fusage);
369
 
370
  /* Put the register usage information there.  */
371
  add_function_usage_to (call_insn, call_fusage);
372
 
373
  /* If this is a const call, then set the insn's unchanging bit.  */
374
  if (ecf_flags & (ECF_CONST | ECF_PURE))
375
    CONST_OR_PURE_CALL_P (call_insn) = 1;
376
 
377
  /* If this call can't throw, attach a REG_EH_REGION reg note to that
378
     effect.  */
379
  if (ecf_flags & ECF_NOTHROW)
380
    REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
381
                                               REG_NOTES (call_insn));
382
  else
383
    {
384
      int rn = lookup_stmt_eh_region (fntree);
385
 
386
      /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
387
         throw, which we already took care of.  */
388
      if (rn > 0)
389
        REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
390
                                                   REG_NOTES (call_insn));
391
      note_current_region_may_contain_throw ();
392
    }
393
 
394
  if (ecf_flags & ECF_NORETURN)
395
    REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
396
                                               REG_NOTES (call_insn));
397
 
398
  if (ecf_flags & ECF_RETURNS_TWICE)
399
    {
400
      REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
401
                                                 REG_NOTES (call_insn));
402
      current_function_calls_setjmp = 1;
403
    }
404
 
405
  SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
406
 
407
  /* Restore this now, so that we do defer pops for this call's args
408
     if the context of the call as a whole permits.  */
409
  inhibit_defer_pop = old_inhibit_defer_pop;
410
 
411
  if (n_popped > 0)
412
    {
413
      if (!already_popped)
414
        CALL_INSN_FUNCTION_USAGE (call_insn)
415
          = gen_rtx_EXPR_LIST (VOIDmode,
416
                               gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
417
                               CALL_INSN_FUNCTION_USAGE (call_insn));
418
      rounded_stack_size -= n_popped;
419
      rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
420
      stack_pointer_delta -= n_popped;
421
    }
422
 
423
  if (!ACCUMULATE_OUTGOING_ARGS)
424
    {
425
      /* If returning from the subroutine does not automatically pop the args,
426
         we need an instruction to pop them sooner or later.
427
         Perhaps do it now; perhaps just record how much space to pop later.
428
 
429
         If returning from the subroutine does pop the args, indicate that the
430
         stack pointer will be changed.  */
431
 
432
      if (rounded_stack_size != 0)
433
        {
434
          if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN))
435
            /* Just pretend we did the pop.  */
436
            stack_pointer_delta -= rounded_stack_size;
437
          else if (flag_defer_pop && inhibit_defer_pop == 0
438
              && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
439
            pending_stack_adjust += rounded_stack_size;
440
          else
441
            adjust_stack (rounded_stack_size_rtx);
442
        }
443
    }
444
  /* When we accumulate outgoing args, we must avoid any stack manipulations.
445
     Restore the stack pointer to its original value now.  Usually
446
     ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
447
     On  i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
448
     popping variants of functions exist as well.
449
 
450
     ??? We may optimize similar to defer_pop above, but it is
451
     probably not worthwhile.
452
 
453
     ??? It will be worthwhile to enable combine_stack_adjustments even for
454
     such machines.  */
455
  else if (n_popped)
456
    anti_adjust_stack (GEN_INT (n_popped));
457
}
458
 
459
/* Determine if the function identified by NAME and FNDECL is one with
460
   special properties we wish to know about.
461
 
462
   For example, if the function might return more than one time (setjmp), then
463
   set RETURNS_TWICE to a nonzero value.
464
 
465
   Similarly set NORETURN if the function is in the longjmp family.
466
 
467
   Set MAY_BE_ALLOCA for any memory allocation function that might allocate
468
   space from the stack such as alloca.  */
469
 
470
static int
471
special_function_p (tree fndecl, int flags)
472
{
473
  if (fndecl && DECL_NAME (fndecl)
474
      && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
475
      /* Exclude functions not at the file scope, or not `extern',
476
         since they are not the magic functions we would otherwise
477
         think they are.
478
         FIXME: this should be handled with attributes, not with this
479
         hacky imitation of DECL_ASSEMBLER_NAME.  It's (also) wrong
480
         because you can declare fork() inside a function if you
481
         wish.  */
482
      && (DECL_CONTEXT (fndecl) == NULL_TREE
483
          || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
484
      && TREE_PUBLIC (fndecl))
485
    {
486
      const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
487
      const char *tname = name;
488
 
489
      /* We assume that alloca will always be called by name.  It
490
         makes no sense to pass it as a pointer-to-function to
491
         anything that does not understand its behavior.  */
492
      if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
493
            && name[0] == 'a'
494
            && ! strcmp (name, "alloca"))
495
           || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
496
               && name[0] == '_'
497
               && ! strcmp (name, "__builtin_alloca"))))
498
        flags |= ECF_MAY_BE_ALLOCA;
499
 
500
      /* Disregard prefix _, __ or __x.  */
501
      if (name[0] == '_')
502
        {
503
          if (name[1] == '_' && name[2] == 'x')
504
            tname += 3;
505
          else if (name[1] == '_')
506
            tname += 2;
507
          else
508
            tname += 1;
509
        }
510
 
511
      if (tname[0] == 's')
512
        {
513
          if ((tname[1] == 'e'
514
               && (! strcmp (tname, "setjmp")
515
                   || ! strcmp (tname, "setjmp_syscall")))
516
              || (tname[1] == 'i'
517
                  && ! strcmp (tname, "sigsetjmp"))
518
              || (tname[1] == 'a'
519
                  && ! strcmp (tname, "savectx")))
520
            flags |= ECF_RETURNS_TWICE;
521
 
522
          if (tname[1] == 'i'
523
              && ! strcmp (tname, "siglongjmp"))
524
            flags |= ECF_NORETURN;
525
        }
526
      else if ((tname[0] == 'q' && tname[1] == 's'
527
                && ! strcmp (tname, "qsetjmp"))
528
               || (tname[0] == 'v' && tname[1] == 'f'
529
                   && ! strcmp (tname, "vfork"))
530
               || (tname[0] == 'g' && tname[1] == 'e'
531
                   && !strcmp (tname, "getcontext")))
532
        flags |= ECF_RETURNS_TWICE;
533
 
534
      else if (tname[0] == 'l' && tname[1] == 'o'
535
               && ! strcmp (tname, "longjmp"))
536
        flags |= ECF_NORETURN;
537
    }
538
 
539
  return flags;
540
}
541
 
542
/* Return nonzero when FNDECL represents a call to setjmp.  */
543
 
544
int
545
setjmp_call_p (tree fndecl)
546
{
547
  return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
548
}
549
 
550
/* Return true when exp contains alloca call.  */
551
bool
552
alloca_call_p (tree exp)
553
{
554
  if (TREE_CODE (exp) == CALL_EXPR
555
      && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
556
      && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
557
          == FUNCTION_DECL)
558
      && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
559
                              0) & ECF_MAY_BE_ALLOCA))
560
    return true;
561
  return false;
562
}
563
 
564
/* Detect flags (function attributes) from the function decl or type node.  */
565
 
566
int
567
flags_from_decl_or_type (tree exp)
568
{
569
  int flags = 0;
570
  tree type = exp;
571
 
572
  if (DECL_P (exp))
573
    {
574
      type = TREE_TYPE (exp);
575
 
576
      /* The function exp may have the `malloc' attribute.  */
577
      if (DECL_IS_MALLOC (exp))
578
        flags |= ECF_MALLOC;
579
 
580
      /* The function exp may have the `returns_twice' attribute.  */
581
      if (DECL_IS_RETURNS_TWICE (exp))
582
        flags |= ECF_RETURNS_TWICE;
583
 
584
      /* The function exp may have the `pure' attribute.  */
585
      if (DECL_IS_PURE (exp))
586
        flags |= ECF_PURE;
587
 
588
      if (DECL_IS_NOVOPS (exp))
589
        flags |= ECF_NOVOPS;
590
 
591
      if (TREE_NOTHROW (exp))
592
        flags |= ECF_NOTHROW;
593
 
594
      if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
595
        flags |= ECF_CONST;
596
 
597
      flags = special_function_p (exp, flags);
598
    }
599
  else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
600
    flags |= ECF_CONST;
601
 
602
  if (TREE_THIS_VOLATILE (exp))
603
    flags |= ECF_NORETURN;
604
 
605
  /* Mark if the function returns with the stack pointer depressed.   We
606
     cannot consider it pure or constant in that case.  */
607
  if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
608
    {
609
      flags |= ECF_SP_DEPRESSED;
610
      flags &= ~(ECF_PURE | ECF_CONST);
611
    }
612
 
613
  return flags;
614
}
615
 
616
/* Detect flags from a CALL_EXPR.  */
617
 
618
int
619
call_expr_flags (tree t)
620
{
621
  int flags;
622
  tree decl = get_callee_fndecl (t);
623
 
624
  if (decl)
625
    flags = flags_from_decl_or_type (decl);
626
  else
627
    {
628
      t = TREE_TYPE (TREE_OPERAND (t, 0));
629
      if (t && TREE_CODE (t) == POINTER_TYPE)
630
        flags = flags_from_decl_or_type (TREE_TYPE (t));
631
      else
632
        flags = 0;
633
    }
634
 
635
  return flags;
636
}
637
 
638
/* Precompute all register parameters as described by ARGS, storing values
639
   into fields within the ARGS array.
640
 
641
   NUM_ACTUALS indicates the total number elements in the ARGS array.
642
 
643
   Set REG_PARM_SEEN if we encounter a register parameter.  */
644
 
645
static void
646
precompute_register_parameters (int num_actuals, struct arg_data *args,
647
                                int *reg_parm_seen)
648
{
649
  int i;
650
 
651
  *reg_parm_seen = 0;
652
 
653
  for (i = 0; i < num_actuals; i++)
654
    if (args[i].reg != 0 && ! args[i].pass_on_stack)
655
      {
656
        *reg_parm_seen = 1;
657
 
658
        if (args[i].value == 0)
659
          {
660
            push_temp_slots ();
661
            args[i].value = expand_normal (args[i].tree_value);
662
            preserve_temp_slots (args[i].value);
663
            pop_temp_slots ();
664
          }
665
 
666
        /* If the value is a non-legitimate constant, force it into a
667
           pseudo now.  TLS symbols sometimes need a call to resolve.  */
668
        if (CONSTANT_P (args[i].value)
669
            && !LEGITIMATE_CONSTANT_P (args[i].value))
670
          args[i].value = force_reg (args[i].mode, args[i].value);
671
 
672
        /* If we are to promote the function arg to a wider mode,
673
           do it now.  */
674
 
675
        if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
676
          args[i].value
677
            = convert_modes (args[i].mode,
678
                             TYPE_MODE (TREE_TYPE (args[i].tree_value)),
679
                             args[i].value, args[i].unsignedp);
680
 
681
        /* If we're going to have to load the value by parts, pull the
682
           parts into pseudos.  The part extraction process can involve
683
           non-trivial computation.  */
684
        if (GET_CODE (args[i].reg) == PARALLEL)
685
          {
686
            tree type = TREE_TYPE (args[i].tree_value);
687
            args[i].parallel_value
688
              = emit_group_load_into_temps (args[i].reg, args[i].value,
689
                                            type, int_size_in_bytes (type));
690
          }
691
 
692
        /* If the value is expensive, and we are inside an appropriately
693
           short loop, put the value into a pseudo and then put the pseudo
694
           into the hard reg.
695
 
696
           For small register classes, also do this if this call uses
697
           register parameters.  This is to avoid reload conflicts while
698
           loading the parameters registers.  */
699
 
700
        else if ((! (REG_P (args[i].value)
701
                     || (GET_CODE (args[i].value) == SUBREG
702
                         && REG_P (SUBREG_REG (args[i].value)))))
703
                 && args[i].mode != BLKmode
704
                 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
705
                 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
706
                     || optimize))
707
          args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
708
      }
709
}
710
 
711
#ifdef REG_PARM_STACK_SPACE
712
 
713
  /* The argument list is the property of the called routine and it
714
     may clobber it.  If the fixed area has been used for previous
715
     parameters, we must save and restore it.  */
716
 
717
static rtx
718
save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
719
{
720
  int low;
721
  int high;
722
 
723
  /* Compute the boundary of the area that needs to be saved, if any.  */
724
  high = reg_parm_stack_space;
725
#ifdef ARGS_GROW_DOWNWARD
726
  high += 1;
727
#endif
728
  if (high > highest_outgoing_arg_in_use)
729
    high = highest_outgoing_arg_in_use;
730
 
731
  for (low = 0; low < high; low++)
732
    if (stack_usage_map[low] != 0)
733
      {
734
        int num_to_save;
735
        enum machine_mode save_mode;
736
        int delta;
737
        rtx stack_area;
738
        rtx save_area;
739
 
740
        while (stack_usage_map[--high] == 0)
741
          ;
742
 
743
        *low_to_save = low;
744
        *high_to_save = high;
745
 
746
        num_to_save = high - low + 1;
747
        save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
748
 
749
        /* If we don't have the required alignment, must do this
750
           in BLKmode.  */
751
        if ((low & (MIN (GET_MODE_SIZE (save_mode),
752
                         BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
753
          save_mode = BLKmode;
754
 
755
#ifdef ARGS_GROW_DOWNWARD
756
        delta = -high;
757
#else
758
        delta = low;
759
#endif
760
        stack_area = gen_rtx_MEM (save_mode,
761
                                  memory_address (save_mode,
762
                                                  plus_constant (argblock,
763
                                                                 delta)));
764
 
765
        set_mem_align (stack_area, PARM_BOUNDARY);
766
        if (save_mode == BLKmode)
767
          {
768
            save_area = assign_stack_temp (BLKmode, num_to_save, 0);
769
            emit_block_move (validize_mem (save_area), stack_area,
770
                             GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
771
          }
772
        else
773
          {
774
            save_area = gen_reg_rtx (save_mode);
775
            emit_move_insn (save_area, stack_area);
776
          }
777
 
778
        return save_area;
779
      }
780
 
781
  return NULL_RTX;
782
}
783
 
784
static void
785
restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
786
{
787
  enum machine_mode save_mode = GET_MODE (save_area);
788
  int delta;
789
  rtx stack_area;
790
 
791
#ifdef ARGS_GROW_DOWNWARD
792
  delta = -high_to_save;
793
#else
794
  delta = low_to_save;
795
#endif
796
  stack_area = gen_rtx_MEM (save_mode,
797
                            memory_address (save_mode,
798
                                            plus_constant (argblock, delta)));
799
  set_mem_align (stack_area, PARM_BOUNDARY);
800
 
801
  if (save_mode != BLKmode)
802
    emit_move_insn (stack_area, save_area);
803
  else
804
    emit_block_move (stack_area, validize_mem (save_area),
805
                     GEN_INT (high_to_save - low_to_save + 1),
806
                     BLOCK_OP_CALL_PARM);
807
}
808
#endif /* REG_PARM_STACK_SPACE */
809
 
810
/* If any elements in ARGS refer to parameters that are to be passed in
811
   registers, but not in memory, and whose alignment does not permit a
812
   direct copy into registers.  Copy the values into a group of pseudos
813
   which we will later copy into the appropriate hard registers.
814
 
815
   Pseudos for each unaligned argument will be stored into the array
816
   args[argnum].aligned_regs.  The caller is responsible for deallocating
817
   the aligned_regs array if it is nonzero.  */
818
 
819
static void
820
store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
821
{
822
  int i, j;
823
 
824
  for (i = 0; i < num_actuals; i++)
825
    if (args[i].reg != 0 && ! args[i].pass_on_stack
826
        && args[i].mode == BLKmode
827
        && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
828
            < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
829
      {
830
        int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
831
        int endian_correction = 0;
832
 
833
        if (args[i].partial)
834
          {
835
            gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
836
            args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
837
          }
838
        else
839
          {
840
            args[i].n_aligned_regs
841
              = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
842
          }
843
 
844
        args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
845
 
846
        /* Structures smaller than a word are normally aligned to the
847
           least significant byte.  On a BYTES_BIG_ENDIAN machine,
848
           this means we must skip the empty high order bytes when
849
           calculating the bit offset.  */
850
        if (bytes < UNITS_PER_WORD
851
#ifdef BLOCK_REG_PADDING
852
            && (BLOCK_REG_PADDING (args[i].mode,
853
                                   TREE_TYPE (args[i].tree_value), 1)
854
                == downward)
855
#else
856
            && BYTES_BIG_ENDIAN
857
#endif
858
            )
859
          endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
860
 
861
        for (j = 0; j < args[i].n_aligned_regs; j++)
862
          {
863
            rtx reg = gen_reg_rtx (word_mode);
864
            rtx word = operand_subword_force (args[i].value, j, BLKmode);
865
            int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
866
 
867
            args[i].aligned_regs[j] = reg;
868
            word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
869
                                      word_mode, word_mode);
870
 
871
            /* There is no need to restrict this code to loading items
872
               in TYPE_ALIGN sized hunks.  The bitfield instructions can
873
               load up entire word sized registers efficiently.
874
 
875
               ??? This may not be needed anymore.
876
               We use to emit a clobber here but that doesn't let later
877
               passes optimize the instructions we emit.  By storing 0 into
878
               the register later passes know the first AND to zero out the
879
               bitfield being set in the register is unnecessary.  The store
880
               of 0 will be deleted as will at least the first AND.  */
881
 
882
            emit_move_insn (reg, const0_rtx);
883
 
884
            bytes -= bitsize / BITS_PER_UNIT;
885
            store_bit_field (reg, bitsize, endian_correction, word_mode,
886
                             word);
887
          }
888
      }
889
}
890
 
891
/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
892
   ACTPARMS.
893
 
894
   NUM_ACTUALS is the total number of parameters.
895
 
896
   N_NAMED_ARGS is the total number of named arguments.
897
 
898
   FNDECL is the tree code for the target of this call (if known)
899
 
900
   ARGS_SO_FAR holds state needed by the target to know where to place
901
   the next argument.
902
 
903
   REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
904
   for arguments which are passed in registers.
905
 
906
   OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
907
   and may be modified by this routine.
908
 
909
   OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
910
   flags which may may be modified by this routine.
911
 
912
   MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
913
   that requires allocation of stack space.
914
 
915
   CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
916
   the thunked-to function.  */
917
 
918
static void
919
initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
920
                                 struct arg_data *args,
921
                                 struct args_size *args_size,
922
                                 int n_named_args ATTRIBUTE_UNUSED,
923
                                 tree actparms, tree fndecl,
924
                                 CUMULATIVE_ARGS *args_so_far,
925
                                 int reg_parm_stack_space,
926
                                 rtx *old_stack_level, int *old_pending_adj,
927
                                 int *must_preallocate, int *ecf_flags,
928
                                 bool *may_tailcall, bool call_from_thunk_p)
929
{
930
  /* 1 if scanning parms front to back, -1 if scanning back to front.  */
931
  int inc;
932
 
933
  /* Count arg position in order args appear.  */
934
  int argpos;
935
 
936
  int i;
937
  tree p;
938
 
939
  args_size->constant = 0;
940
  args_size->var = 0;
941
 
942
  /* In this loop, we consider args in the order they are written.
943
     We fill up ARGS from the front or from the back if necessary
944
     so that in any case the first arg to be pushed ends up at the front.  */
945
 
946
  if (PUSH_ARGS_REVERSED)
947
    {
948
      i = num_actuals - 1, inc = -1;
949
      /* In this case, must reverse order of args
950
         so that we compute and push the last arg first.  */
951
    }
952
  else
953
    {
954
      i = 0, inc = 1;
955
    }
956
 
957
  /* I counts args in order (to be) pushed; ARGPOS counts in order written.  */
958
  for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
959
    {
960
      tree type = TREE_TYPE (TREE_VALUE (p));
961
      int unsignedp;
962
      enum machine_mode mode;
963
 
964
      args[i].tree_value = TREE_VALUE (p);
965
 
966
      /* Replace erroneous argument with constant zero.  */
967
      if (type == error_mark_node || !COMPLETE_TYPE_P (type))
968
        args[i].tree_value = integer_zero_node, type = integer_type_node;
969
 
970
      /* If TYPE is a transparent union, pass things the way we would
971
         pass the first field of the union.  We have already verified that
972
         the modes are the same.  */
973
      if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
974
        type = TREE_TYPE (TYPE_FIELDS (type));
975
 
976
      /* Decide where to pass this arg.
977
 
978
         args[i].reg is nonzero if all or part is passed in registers.
979
 
980
         args[i].partial is nonzero if part but not all is passed in registers,
981
         and the exact value says how many bytes are passed in registers.
982
 
983
         args[i].pass_on_stack is nonzero if the argument must at least be
984
         computed on the stack.  It may then be loaded back into registers
985
         if args[i].reg is nonzero.
986
 
987
         These decisions are driven by the FUNCTION_... macros and must agree
988
         with those made by function.c.  */
989
 
990
      /* See if this argument should be passed by invisible reference.  */
991
      if (pass_by_reference (args_so_far, TYPE_MODE (type),
992
                             type, argpos < n_named_args))
993
        {
994
          bool callee_copies;
995
          tree base;
996
 
997
          callee_copies
998
            = reference_callee_copied (args_so_far, TYPE_MODE (type),
999
                                       type, argpos < n_named_args);
1000
 
1001
          /* If we're compiling a thunk, pass through invisible references
1002
             instead of making a copy.  */
1003
          if (call_from_thunk_p
1004
              || (callee_copies
1005
                  && !TREE_ADDRESSABLE (type)
1006
                  && (base = get_base_address (args[i].tree_value))
1007
                  && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1008
            {
1009
              /* We can't use sibcalls if a callee-copied argument is
1010
                 stored in the current function's frame.  */
1011
              if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1012
                *may_tailcall = false;
1013
 
1014
              args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
1015
              type = TREE_TYPE (args[i].tree_value);
1016
 
1017
              *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1018
            }
1019
          else
1020
            {
1021
              /* We make a copy of the object and pass the address to the
1022
                 function being called.  */
1023
              rtx copy;
1024
 
1025
              if (!COMPLETE_TYPE_P (type)
1026
                  || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1027
                  || (flag_stack_check && ! STACK_CHECK_BUILTIN
1028
                      && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1029
                                                STACK_CHECK_MAX_VAR_SIZE))))
1030
                {
1031
                  /* This is a variable-sized object.  Make space on the stack
1032
                     for it.  */
1033
                  rtx size_rtx = expr_size (TREE_VALUE (p));
1034
 
1035
                  if (*old_stack_level == 0)
1036
                    {
1037
                      emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1038
                      *old_pending_adj = pending_stack_adjust;
1039
                      pending_stack_adjust = 0;
1040
                    }
1041
 
1042
                  copy = gen_rtx_MEM (BLKmode,
1043
                                      allocate_dynamic_stack_space
1044
                                      (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1045
                  set_mem_attributes (copy, type, 1);
1046
                }
1047
              else
1048
                copy = assign_temp (type, 0, 1, 0);
1049
 
1050
              store_expr (args[i].tree_value, copy, 0);
1051
 
1052
              if (callee_copies)
1053
                *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1054
              else
1055
                *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1056
 
1057
              args[i].tree_value
1058
                = build_fold_addr_expr (make_tree (type, copy));
1059
              type = TREE_TYPE (args[i].tree_value);
1060
              *may_tailcall = false;
1061
            }
1062
        }
1063
 
1064
      mode = TYPE_MODE (type);
1065
      unsignedp = TYPE_UNSIGNED (type);
1066
 
1067
      if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1068
        mode = promote_mode (type, mode, &unsignedp, 1);
1069
 
1070
      args[i].unsignedp = unsignedp;
1071
      args[i].mode = mode;
1072
 
1073
      args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1074
                                  argpos < n_named_args);
1075
#ifdef FUNCTION_INCOMING_ARG
1076
      /* If this is a sibling call and the machine has register windows, the
1077
         register window has to be unwinded before calling the routine, so
1078
         arguments have to go into the incoming registers.  */
1079
      args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1080
                                                     argpos < n_named_args);
1081
#else
1082
      args[i].tail_call_reg = args[i].reg;
1083
#endif
1084
 
1085
      if (args[i].reg)
1086
        args[i].partial
1087
          = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1088
                                             argpos < n_named_args);
1089
 
1090
      args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1091
 
1092
      /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1093
         it means that we are to pass this arg in the register(s) designated
1094
         by the PARALLEL, but also to pass it in the stack.  */
1095
      if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1096
          && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1097
        args[i].pass_on_stack = 1;
1098
 
1099
      /* If this is an addressable type, we must preallocate the stack
1100
         since we must evaluate the object into its final location.
1101
 
1102
         If this is to be passed in both registers and the stack, it is simpler
1103
         to preallocate.  */
1104
      if (TREE_ADDRESSABLE (type)
1105
          || (args[i].pass_on_stack && args[i].reg != 0))
1106
        *must_preallocate = 1;
1107
 
1108
      /* If this is an addressable type, we cannot pre-evaluate it.  Thus,
1109
         we cannot consider this function call constant.  */
1110
      if (TREE_ADDRESSABLE (type))
1111
        *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1112
 
1113
      /* Compute the stack-size of this argument.  */
1114
      if (args[i].reg == 0 || args[i].partial != 0
1115
          || reg_parm_stack_space > 0
1116
          || args[i].pass_on_stack)
1117
        locate_and_pad_parm (mode, type,
1118
#ifdef STACK_PARMS_IN_REG_PARM_AREA
1119
                             1,
1120
#else
1121
                             args[i].reg != 0,
1122
#endif
1123
                             args[i].pass_on_stack ? 0 : args[i].partial,
1124
                             fndecl, args_size, &args[i].locate);
1125
#ifdef BLOCK_REG_PADDING
1126
      else
1127
        /* The argument is passed entirely in registers.  See at which
1128
           end it should be padded.  */
1129
        args[i].locate.where_pad =
1130
          BLOCK_REG_PADDING (mode, type,
1131
                             int_size_in_bytes (type) <= UNITS_PER_WORD);
1132
#endif
1133
 
1134
      /* Update ARGS_SIZE, the total stack space for args so far.  */
1135
 
1136
      args_size->constant += args[i].locate.size.constant;
1137
      if (args[i].locate.size.var)
1138
        ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1139
 
1140
      /* Increment ARGS_SO_FAR, which has info about which arg-registers
1141
         have been used, etc.  */
1142
 
1143
      FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1144
                            argpos < n_named_args);
1145
    }
1146
}
1147
 
1148
/* Update ARGS_SIZE to contain the total size for the argument block.
1149
   Return the original constant component of the argument block's size.
1150
 
1151
   REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1152
   for arguments passed in registers.  */
1153
 
1154
static int
1155
compute_argument_block_size (int reg_parm_stack_space,
1156
                             struct args_size *args_size,
1157
                             int preferred_stack_boundary ATTRIBUTE_UNUSED)
1158
{
1159
  int unadjusted_args_size = args_size->constant;
1160
 
1161
  /* For accumulate outgoing args mode we don't need to align, since the frame
1162
     will be already aligned.  Align to STACK_BOUNDARY in order to prevent
1163
     backends from generating misaligned frame sizes.  */
1164
  if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1165
    preferred_stack_boundary = STACK_BOUNDARY;
1166
 
1167
  /* Compute the actual size of the argument block required.  The variable
1168
     and constant sizes must be combined, the size may have to be rounded,
1169
     and there may be a minimum required size.  */
1170
 
1171
  if (args_size->var)
1172
    {
1173
      args_size->var = ARGS_SIZE_TREE (*args_size);
1174
      args_size->constant = 0;
1175
 
1176
      preferred_stack_boundary /= BITS_PER_UNIT;
1177
      if (preferred_stack_boundary > 1)
1178
        {
1179
          /* We don't handle this case yet.  To handle it correctly we have
1180
             to add the delta, round and subtract the delta.
1181
             Currently no machine description requires this support.  */
1182
          gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1183
          args_size->var = round_up (args_size->var, preferred_stack_boundary);
1184
        }
1185
 
1186
      if (reg_parm_stack_space > 0)
1187
        {
1188
          args_size->var
1189
            = size_binop (MAX_EXPR, args_size->var,
1190
                          ssize_int (reg_parm_stack_space));
1191
 
1192
#ifndef OUTGOING_REG_PARM_STACK_SPACE
1193
          /* The area corresponding to register parameters is not to count in
1194
             the size of the block we need.  So make the adjustment.  */
1195
          args_size->var
1196
            = size_binop (MINUS_EXPR, args_size->var,
1197
                          ssize_int (reg_parm_stack_space));
1198
#endif
1199
        }
1200
    }
1201
  else
1202
    {
1203
      preferred_stack_boundary /= BITS_PER_UNIT;
1204
      if (preferred_stack_boundary < 1)
1205
        preferred_stack_boundary = 1;
1206
      args_size->constant = (((args_size->constant
1207
                               + stack_pointer_delta
1208
                               + preferred_stack_boundary - 1)
1209
                              / preferred_stack_boundary
1210
                              * preferred_stack_boundary)
1211
                             - stack_pointer_delta);
1212
 
1213
      args_size->constant = MAX (args_size->constant,
1214
                                 reg_parm_stack_space);
1215
 
1216
#ifndef OUTGOING_REG_PARM_STACK_SPACE
1217
      args_size->constant -= reg_parm_stack_space;
1218
#endif
1219
    }
1220
  return unadjusted_args_size;
1221
}
1222
 
1223
/* Precompute parameters as needed for a function call.
1224
 
1225
   FLAGS is mask of ECF_* constants.
1226
 
1227
   NUM_ACTUALS is the number of arguments.
1228
 
1229
   ARGS is an array containing information for each argument; this
1230
   routine fills in the INITIAL_VALUE and VALUE fields for each
1231
   precomputed argument.  */
1232
 
1233
static void
1234
precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1235
{
1236
  int i;
1237
 
1238
  /* If this is a libcall, then precompute all arguments so that we do not
1239
     get extraneous instructions emitted as part of the libcall sequence.  */
1240
 
1241
  /* If we preallocated the stack space, and some arguments must be passed
1242
     on the stack, then we must precompute any parameter which contains a
1243
     function call which will store arguments on the stack.
1244
     Otherwise, evaluating the parameter may clobber previous parameters
1245
     which have already been stored into the stack.  (we have code to avoid
1246
     such case by saving the outgoing stack arguments, but it results in
1247
     worse code)  */
1248
  if ((flags & ECF_LIBCALL_BLOCK) == 0 && !ACCUMULATE_OUTGOING_ARGS)
1249
    return;
1250
 
1251
  for (i = 0; i < num_actuals; i++)
1252
    {
1253
      enum machine_mode mode;
1254
 
1255
      if ((flags & ECF_LIBCALL_BLOCK) == 0
1256
          && TREE_CODE (args[i].tree_value) != CALL_EXPR)
1257
        continue;
1258
 
1259
      /* If this is an addressable type, we cannot pre-evaluate it.  */
1260
      gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
1261
 
1262
      args[i].initial_value = args[i].value
1263
        = expand_normal (args[i].tree_value);
1264
 
1265
      mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1266
      if (mode != args[i].mode)
1267
        {
1268
          args[i].value
1269
            = convert_modes (args[i].mode, mode,
1270
                             args[i].value, args[i].unsignedp);
1271
#if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1272
          /* CSE will replace this only if it contains args[i].value
1273
             pseudo, so convert it down to the declared mode using
1274
             a SUBREG.  */
1275
          if (REG_P (args[i].value)
1276
              && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1277
            {
1278
              args[i].initial_value
1279
                = gen_lowpart_SUBREG (mode, args[i].value);
1280
              SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1281
              SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1282
                                            args[i].unsignedp);
1283
            }
1284
#endif
1285
        }
1286
    }
1287
}
1288
 
1289
/* Given the current state of MUST_PREALLOCATE and information about
1290
   arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1291
   compute and return the final value for MUST_PREALLOCATE.  */
1292
 
1293
static int
1294
finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
1295
{
1296
  /* See if we have or want to preallocate stack space.
1297
 
1298
     If we would have to push a partially-in-regs parm
1299
     before other stack parms, preallocate stack space instead.
1300
 
1301
     If the size of some parm is not a multiple of the required stack
1302
     alignment, we must preallocate.
1303
 
1304
     If the total size of arguments that would otherwise create a copy in
1305
     a temporary (such as a CALL) is more than half the total argument list
1306
     size, preallocation is faster.
1307
 
1308
     Another reason to preallocate is if we have a machine (like the m88k)
1309
     where stack alignment is required to be maintained between every
1310
     pair of insns, not just when the call is made.  However, we assume here
1311
     that such machines either do not have push insns (and hence preallocation
1312
     would occur anyway) or the problem is taken care of with
1313
     PUSH_ROUNDING.  */
1314
 
1315
  if (! must_preallocate)
1316
    {
1317
      int partial_seen = 0;
1318
      int copy_to_evaluate_size = 0;
1319
      int i;
1320
 
1321
      for (i = 0; i < num_actuals && ! must_preallocate; i++)
1322
        {
1323
          if (args[i].partial > 0 && ! args[i].pass_on_stack)
1324
            partial_seen = 1;
1325
          else if (partial_seen && args[i].reg == 0)
1326
            must_preallocate = 1;
1327
 
1328
          if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1329
              && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1330
                  || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1331
                  || TREE_CODE (args[i].tree_value) == COND_EXPR
1332
                  || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1333
            copy_to_evaluate_size
1334
              += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1335
        }
1336
 
1337
      if (copy_to_evaluate_size * 2 >= args_size->constant
1338
          && args_size->constant > 0)
1339
        must_preallocate = 1;
1340
    }
1341
  return must_preallocate;
1342
}
1343
 
1344
/* If we preallocated stack space, compute the address of each argument
1345
   and store it into the ARGS array.
1346
 
1347
   We need not ensure it is a valid memory address here; it will be
1348
   validized when it is used.
1349
 
1350
   ARGBLOCK is an rtx for the address of the outgoing arguments.  */
1351
 
1352
static void
1353
compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1354
{
1355
  if (argblock)
1356
    {
1357
      rtx arg_reg = argblock;
1358
      int i, arg_offset = 0;
1359
 
1360
      if (GET_CODE (argblock) == PLUS)
1361
        arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1362
 
1363
      for (i = 0; i < num_actuals; i++)
1364
        {
1365
          rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1366
          rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1367
          rtx addr;
1368
          unsigned int align, boundary;
1369
          unsigned int units_on_stack = 0;
1370
          enum machine_mode partial_mode = VOIDmode;
1371
 
1372
          /* Skip this parm if it will not be passed on the stack.  */
1373
          if (! args[i].pass_on_stack
1374
              && args[i].reg != 0
1375
              && args[i].partial == 0)
1376
            continue;
1377
 
1378
          if (GET_CODE (offset) == CONST_INT)
1379
            addr = plus_constant (arg_reg, INTVAL (offset));
1380
          else
1381
            addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1382
 
1383
          addr = plus_constant (addr, arg_offset);
1384
 
1385
          if (args[i].partial != 0)
1386
            {
1387
              /* Only part of the parameter is being passed on the stack.
1388
                 Generate a simple memory reference of the correct size.  */
1389
              units_on_stack = args[i].locate.size.constant;
1390
              partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1391
                                            MODE_INT, 1);
1392
              args[i].stack = gen_rtx_MEM (partial_mode, addr);
1393
              set_mem_size (args[i].stack, GEN_INT (units_on_stack));
1394
            }
1395
          else
1396
            {
1397
              args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1398
              set_mem_attributes (args[i].stack,
1399
                                  TREE_TYPE (args[i].tree_value), 1);
1400
            }
1401
          align = BITS_PER_UNIT;
1402
          boundary = args[i].locate.boundary;
1403
          if (args[i].locate.where_pad != downward)
1404
            align = boundary;
1405
          else if (GET_CODE (offset) == CONST_INT)
1406
            {
1407
              align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1408
              align = align & -align;
1409
            }
1410
          set_mem_align (args[i].stack, align);
1411
 
1412
          if (GET_CODE (slot_offset) == CONST_INT)
1413
            addr = plus_constant (arg_reg, INTVAL (slot_offset));
1414
          else
1415
            addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1416
 
1417
          addr = plus_constant (addr, arg_offset);
1418
 
1419
          if (args[i].partial != 0)
1420
            {
1421
              /* Only part of the parameter is being passed on the stack.
1422
                 Generate a simple memory reference of the correct size.  */
1423
              args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1424
              set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
1425
            }
1426
          else
1427
            {
1428
              args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1429
              set_mem_attributes (args[i].stack_slot,
1430
                                  TREE_TYPE (args[i].tree_value), 1);
1431
            }
1432
          set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1433
 
1434
          /* Function incoming arguments may overlap with sibling call
1435
             outgoing arguments and we cannot allow reordering of reads
1436
             from function arguments with stores to outgoing arguments
1437
             of sibling calls.  */
1438
          set_mem_alias_set (args[i].stack, 0);
1439
          set_mem_alias_set (args[i].stack_slot, 0);
1440
        }
1441
    }
1442
}
1443
 
1444
/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1445
   in a call instruction.
1446
 
1447
   FNDECL is the tree node for the target function.  For an indirect call
1448
   FNDECL will be NULL_TREE.
1449
 
1450
   ADDR is the operand 0 of CALL_EXPR for this call.  */
1451
 
1452
static rtx
1453
rtx_for_function_call (tree fndecl, tree addr)
1454
{
1455
  rtx funexp;
1456
 
1457
  /* Get the function to call, in the form of RTL.  */
1458
  if (fndecl)
1459
    {
1460
      /* If this is the first use of the function, see if we need to
1461
         make an external definition for it.  */
1462
      if (! TREE_USED (fndecl))
1463
        {
1464
          assemble_external (fndecl);
1465
          TREE_USED (fndecl) = 1;
1466
        }
1467
 
1468
      /* Get a SYMBOL_REF rtx for the function address.  */
1469
      funexp = XEXP (DECL_RTL (fndecl), 0);
1470
    }
1471
  else
1472
    /* Generate an rtx (probably a pseudo-register) for the address.  */
1473
    {
1474
      push_temp_slots ();
1475
      funexp = expand_normal (addr);
1476
      pop_temp_slots ();        /* FUNEXP can't be BLKmode.  */
1477
    }
1478
  return funexp;
1479
}
1480
 
1481
/* Return true if and only if SIZE storage units (usually bytes)
1482
   starting from address ADDR overlap with already clobbered argument
1483
   area.  This function is used to determine if we should give up a
1484
   sibcall.  */
1485
 
1486
static bool
1487
mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1488
{
1489
  HOST_WIDE_INT i;
1490
 
1491
  if (addr == current_function_internal_arg_pointer)
1492
    i = 0;
1493
  else if (GET_CODE (addr) == PLUS
1494
           && XEXP (addr, 0) == current_function_internal_arg_pointer
1495
           && GET_CODE (XEXP (addr, 1)) == CONST_INT)
1496
    i = INTVAL (XEXP (addr, 1));
1497
  /* Return true for arg pointer based indexed addressing.  */
1498
  else if (GET_CODE (addr) == PLUS
1499
           && (XEXP (addr, 0) == current_function_internal_arg_pointer
1500
               || XEXP (addr, 1) == current_function_internal_arg_pointer))
1501
    return true;
1502
  else
1503
    return false;
1504
 
1505
#ifdef ARGS_GROW_DOWNWARD
1506
  i = -i - size;
1507
#endif
1508
  if (size > 0)
1509
    {
1510
      unsigned HOST_WIDE_INT k;
1511
 
1512
      for (k = 0; k < size; k++)
1513
        if (i + k < stored_args_map->n_bits
1514
            && TEST_BIT (stored_args_map, i + k))
1515
          return true;
1516
    }
1517
 
1518
  return false;
1519
}
1520
 
1521
/* Do the register loads required for any wholly-register parms or any
1522
   parms which are passed both on the stack and in a register.  Their
1523
   expressions were already evaluated.
1524
 
1525
   Mark all register-parms as living through the call, putting these USE
1526
   insns in the CALL_INSN_FUNCTION_USAGE field.
1527
 
1528
   When IS_SIBCALL, perform the check_sibcall_argument_overlap
1529
   checking, setting *SIBCALL_FAILURE if appropriate.  */
1530
 
1531
static void
1532
load_register_parameters (struct arg_data *args, int num_actuals,
1533
                          rtx *call_fusage, int flags, int is_sibcall,
1534
                          int *sibcall_failure)
1535
{
1536
  int i, j;
1537
 
1538
  for (i = 0; i < num_actuals; i++)
1539
    {
1540
      rtx reg = ((flags & ECF_SIBCALL)
1541
                 ? args[i].tail_call_reg : args[i].reg);
1542
      if (reg)
1543
        {
1544
          int partial = args[i].partial;
1545
          int nregs;
1546
          int size = 0;
1547
          rtx before_arg = get_last_insn ();
1548
          /* Set non-negative if we must move a word at a time, even if
1549
             just one word (e.g, partial == 4 && mode == DFmode).  Set
1550
             to -1 if we just use a normal move insn.  This value can be
1551
             zero if the argument is a zero size structure.  */
1552
          nregs = -1;
1553
          if (GET_CODE (reg) == PARALLEL)
1554
            ;
1555
          else if (partial)
1556
            {
1557
              gcc_assert (partial % UNITS_PER_WORD == 0);
1558
              nregs = partial / UNITS_PER_WORD;
1559
            }
1560
          else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1561
            {
1562
              size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1563
              nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1564
            }
1565
          else
1566
            size = GET_MODE_SIZE (args[i].mode);
1567
 
1568
          /* Handle calls that pass values in multiple non-contiguous
1569
             locations.  The Irix 6 ABI has examples of this.  */
1570
 
1571
          if (GET_CODE (reg) == PARALLEL)
1572
            emit_group_move (reg, args[i].parallel_value);
1573
 
1574
          /* If simple case, just do move.  If normal partial, store_one_arg
1575
             has already loaded the register for us.  In all other cases,
1576
             load the register(s) from memory.  */
1577
 
1578
          else if (nregs == -1)
1579
            {
1580
              emit_move_insn (reg, args[i].value);
1581
#ifdef BLOCK_REG_PADDING
1582
              /* Handle case where we have a value that needs shifting
1583
                 up to the msb.  eg. a QImode value and we're padding
1584
                 upward on a BYTES_BIG_ENDIAN machine.  */
1585
              if (size < UNITS_PER_WORD
1586
                  && (args[i].locate.where_pad
1587
                      == (BYTES_BIG_ENDIAN ? upward : downward)))
1588
                {
1589
                  rtx x;
1590
                  int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1591
 
1592
                  /* Assigning REG here rather than a temp makes CALL_FUSAGE
1593
                     report the whole reg as used.  Strictly speaking, the
1594
                     call only uses SIZE bytes at the msb end, but it doesn't
1595
                     seem worth generating rtl to say that.  */
1596
                  reg = gen_rtx_REG (word_mode, REGNO (reg));
1597
                  x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1598
                                    build_int_cst (NULL_TREE, shift),
1599
                                    reg, 1);
1600
                  if (x != reg)
1601
                    emit_move_insn (reg, x);
1602
                }
1603
#endif
1604
            }
1605
 
1606
          /* If we have pre-computed the values to put in the registers in
1607
             the case of non-aligned structures, copy them in now.  */
1608
 
1609
          else if (args[i].n_aligned_regs != 0)
1610
            for (j = 0; j < args[i].n_aligned_regs; j++)
1611
              emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1612
                              args[i].aligned_regs[j]);
1613
 
1614
          else if (partial == 0 || args[i].pass_on_stack)
1615
            {
1616
              rtx mem = validize_mem (args[i].value);
1617
 
1618
              /* Check for overlap with already clobbered argument area.  */
1619
              if (is_sibcall
1620
                  && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0),
1621
                                                           size))
1622
                *sibcall_failure = 1;
1623
 
1624
              /* Handle a BLKmode that needs shifting.  */
1625
              if (nregs == 1 && size < UNITS_PER_WORD
1626
#ifdef BLOCK_REG_PADDING
1627
                  && args[i].locate.where_pad == downward
1628
#else
1629
                  && BYTES_BIG_ENDIAN
1630
#endif
1631
                 )
1632
                {
1633
                  rtx tem = operand_subword_force (mem, 0, args[i].mode);
1634
                  rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1635
                  rtx x = gen_reg_rtx (word_mode);
1636
                  int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1637
                  enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1638
                                                        : LSHIFT_EXPR;
1639
 
1640
                  emit_move_insn (x, tem);
1641
                  x = expand_shift (dir, word_mode, x,
1642
                                    build_int_cst (NULL_TREE, shift),
1643
                                    ri, 1);
1644
                  if (x != ri)
1645
                    emit_move_insn (ri, x);
1646
                }
1647
              else
1648
                move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1649
            }
1650
 
1651
          /* When a parameter is a block, and perhaps in other cases, it is
1652
             possible that it did a load from an argument slot that was
1653
             already clobbered.  */
1654
          if (is_sibcall
1655
              && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1656
            *sibcall_failure = 1;
1657
 
1658
          /* Handle calls that pass values in multiple non-contiguous
1659
             locations.  The Irix 6 ABI has examples of this.  */
1660
          if (GET_CODE (reg) == PARALLEL)
1661
            use_group_regs (call_fusage, reg);
1662
          else if (nregs == -1)
1663
            use_reg (call_fusage, reg);
1664
          else if (nregs > 0)
1665
            use_regs (call_fusage, REGNO (reg), nregs);
1666
        }
1667
    }
1668
}
1669
 
1670
/* We need to pop PENDING_STACK_ADJUST bytes.  But, if the arguments
1671
   wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1672
   bytes, then we would need to push some additional bytes to pad the
1673
   arguments.  So, we compute an adjust to the stack pointer for an
1674
   amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1675
   bytes.  Then, when the arguments are pushed the stack will be perfectly
1676
   aligned.  ARGS_SIZE->CONSTANT is set to the number of bytes that should
1677
   be popped after the call.  Returns the adjustment.  */
1678
 
1679
static int
1680
combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1681
                                           struct args_size *args_size,
1682
                                           unsigned int preferred_unit_stack_boundary)
1683
{
1684
  /* The number of bytes to pop so that the stack will be
1685
     under-aligned by UNADJUSTED_ARGS_SIZE bytes.  */
1686
  HOST_WIDE_INT adjustment;
1687
  /* The alignment of the stack after the arguments are pushed, if we
1688
     just pushed the arguments without adjust the stack here.  */
1689
  unsigned HOST_WIDE_INT unadjusted_alignment;
1690
 
1691
  unadjusted_alignment
1692
    = ((stack_pointer_delta + unadjusted_args_size)
1693
       % preferred_unit_stack_boundary);
1694
 
1695
  /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1696
     as possible -- leaving just enough left to cancel out the
1697
     UNADJUSTED_ALIGNMENT.  In other words, we want to ensure that the
1698
     PENDING_STACK_ADJUST is non-negative, and congruent to
1699
     -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY.  */
1700
 
1701
  /* Begin by trying to pop all the bytes.  */
1702
  unadjusted_alignment
1703
    = (unadjusted_alignment
1704
       - (pending_stack_adjust % preferred_unit_stack_boundary));
1705
  adjustment = pending_stack_adjust;
1706
  /* Push enough additional bytes that the stack will be aligned
1707
     after the arguments are pushed.  */
1708
  if (preferred_unit_stack_boundary > 1)
1709
    {
1710
      if (unadjusted_alignment > 0)
1711
        adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1712
      else
1713
        adjustment += unadjusted_alignment;
1714
    }
1715
 
1716
  /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1717
     bytes after the call.  The right number is the entire
1718
     PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1719
     by the arguments in the first place.  */
1720
  args_size->constant
1721
    = pending_stack_adjust - adjustment + unadjusted_args_size;
1722
 
1723
  return adjustment;
1724
}
1725
 
1726
/* Scan X expression if it does not dereference any argument slots
1727
   we already clobbered by tail call arguments (as noted in stored_args_map
1728
   bitmap).
1729
   Return nonzero if X expression dereferences such argument slots,
1730
   zero otherwise.  */
1731
 
1732
static int
1733
check_sibcall_argument_overlap_1 (rtx x)
1734
{
1735
  RTX_CODE code;
1736
  int i, j;
1737
  const char *fmt;
1738
 
1739
  if (x == NULL_RTX)
1740
    return 0;
1741
 
1742
  code = GET_CODE (x);
1743
 
1744
  if (code == MEM)
1745
    return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1746
                                                 GET_MODE_SIZE (GET_MODE (x)));
1747
 
1748
  /* Scan all subexpressions.  */
1749
  fmt = GET_RTX_FORMAT (code);
1750
  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1751
    {
1752
      if (*fmt == 'e')
1753
        {
1754
          if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1755
            return 1;
1756
        }
1757
      else if (*fmt == 'E')
1758
        {
1759
          for (j = 0; j < XVECLEN (x, i); j++)
1760
            if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1761
              return 1;
1762
        }
1763
    }
1764
  return 0;
1765
}
1766
 
1767
/* Scan sequence after INSN if it does not dereference any argument slots
1768
   we already clobbered by tail call arguments (as noted in stored_args_map
1769
   bitmap).  If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1770
   stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1771
   should be 0).  Return nonzero if sequence after INSN dereferences such argument
1772
   slots, zero otherwise.  */
1773
 
1774
static int
1775
check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1776
{
1777
  int low, high;
1778
 
1779
  if (insn == NULL_RTX)
1780
    insn = get_insns ();
1781
  else
1782
    insn = NEXT_INSN (insn);
1783
 
1784
  for (; insn; insn = NEXT_INSN (insn))
1785
    if (INSN_P (insn)
1786
        && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1787
      break;
1788
 
1789
  if (mark_stored_args_map)
1790
    {
1791
#ifdef ARGS_GROW_DOWNWARD
1792
      low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1793
#else
1794
      low = arg->locate.slot_offset.constant;
1795
#endif
1796
 
1797
      for (high = low + arg->locate.size.constant; low < high; low++)
1798
        SET_BIT (stored_args_map, low);
1799
    }
1800
  return insn != NULL_RTX;
1801
}
1802
 
1803
/* Given that a function returns a value of mode MODE at the most
1804
   significant end of hard register VALUE, shift VALUE left or right
1805
   as specified by LEFT_P.  Return true if some action was needed.  */
1806
 
1807
bool
1808
shift_return_value (enum machine_mode mode, bool left_p, rtx value)
1809
{
1810
  HOST_WIDE_INT shift;
1811
 
1812
  gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1813
  shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1814
  if (shift == 0)
1815
    return false;
1816
 
1817
  /* Use ashr rather than lshr for right shifts.  This is for the benefit
1818
     of the MIPS port, which requires SImode values to be sign-extended
1819
     when stored in 64-bit registers.  */
1820
  if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1821
                           value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1822
    gcc_unreachable ();
1823
  return true;
1824
}
1825
 
1826
/* Generate all the code for a function call
1827
   and return an rtx for its value.
1828
   Store the value in TARGET (specified as an rtx) if convenient.
1829
   If the value is stored in TARGET then TARGET is returned.
1830
   If IGNORE is nonzero, then we ignore the value of the function call.  */
1831
 
1832
rtx
1833
expand_call (tree exp, rtx target, int ignore)
1834
{
1835
  /* Nonzero if we are currently expanding a call.  */
1836
  static int currently_expanding_call = 0;
1837
 
1838
  /* List of actual parameters.  */
1839
  tree actparms = TREE_OPERAND (exp, 1);
1840
  /* RTX for the function to be called.  */
1841
  rtx funexp;
1842
  /* Sequence of insns to perform a normal "call".  */
1843
  rtx normal_call_insns = NULL_RTX;
1844
  /* Sequence of insns to perform a tail "call".  */
1845
  rtx tail_call_insns = NULL_RTX;
1846
  /* Data type of the function.  */
1847
  tree funtype;
1848
  tree type_arg_types;
1849
  /* Declaration of the function being called,
1850
     or 0 if the function is computed (not known by name).  */
1851
  tree fndecl = 0;
1852
  /* The type of the function being called.  */
1853
  tree fntype;
1854
  bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1855
  int pass;
1856
 
1857
  /* Register in which non-BLKmode value will be returned,
1858
     or 0 if no value or if value is BLKmode.  */
1859
  rtx valreg;
1860
  /* Address where we should return a BLKmode value;
1861
 
1862
  rtx structure_value_addr = 0;
1863
  /* Nonzero if that address is being passed by treating it as
1864
     an extra, implicit first parameter.  Otherwise,
1865
     it is passed by being copied directly into struct_value_rtx.  */
1866
  int structure_value_addr_parm = 0;
1867
  /* Size of aggregate value wanted, or zero if none wanted
1868
     or if we are using the non-reentrant PCC calling convention
1869
     or expecting the value in registers.  */
1870
  HOST_WIDE_INT struct_value_size = 0;
1871
  /* Nonzero if called function returns an aggregate in memory PCC style,
1872
     by returning the address of where to find it.  */
1873
  int pcc_struct_value = 0;
1874
  rtx struct_value = 0;
1875
 
1876
  /* Number of actual parameters in this call, including struct value addr.  */
1877
  int num_actuals;
1878
  /* Number of named args.  Args after this are anonymous ones
1879
     and they must all go on the stack.  */
1880
  int n_named_args;
1881
 
1882
  /* Vector of information about each argument.
1883
     Arguments are numbered in the order they will be pushed,
1884
     not the order they are written.  */
1885
  struct arg_data *args;
1886
 
1887
  /* Total size in bytes of all the stack-parms scanned so far.  */
1888
  struct args_size args_size;
1889
  struct args_size adjusted_args_size;
1890
  /* Size of arguments before any adjustments (such as rounding).  */
1891
  int unadjusted_args_size;
1892
  /* Data on reg parms scanned so far.  */
1893
  CUMULATIVE_ARGS args_so_far;
1894
  /* Nonzero if a reg parm has been scanned.  */
1895
  int reg_parm_seen;
1896
  /* Nonzero if this is an indirect function call.  */
1897
 
1898
  /* Nonzero if we must avoid push-insns in the args for this call.
1899
     If stack space is allocated for register parameters, but not by the
1900
     caller, then it is preallocated in the fixed part of the stack frame.
1901
     So the entire argument block must then be preallocated (i.e., we
1902
     ignore PUSH_ROUNDING in that case).  */
1903
 
1904
  int must_preallocate = !PUSH_ARGS;
1905
 
1906
  /* Size of the stack reserved for parameter registers.  */
1907
  int reg_parm_stack_space = 0;
1908
 
1909
  /* Address of space preallocated for stack parms
1910
     (on machines that lack push insns), or 0 if space not preallocated.  */
1911
  rtx argblock = 0;
1912
 
1913
  /* Mask of ECF_ flags.  */
1914
  int flags = 0;
1915
#ifdef REG_PARM_STACK_SPACE
1916
  /* Define the boundary of the register parm stack space that needs to be
1917
     saved, if any.  */
1918
  int low_to_save, high_to_save;
1919
  rtx save_area = 0;             /* Place that it is saved */
1920
#endif
1921
 
1922
  int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1923
  char *initial_stack_usage_map = stack_usage_map;
1924
  char *stack_usage_map_buf = NULL;
1925
 
1926
  int old_stack_allocated;
1927
 
1928
  /* State variables to track stack modifications.  */
1929
  rtx old_stack_level = 0;
1930
  int old_stack_arg_under_construction = 0;
1931
  int old_pending_adj = 0;
1932
  int old_inhibit_defer_pop = inhibit_defer_pop;
1933
 
1934
  /* Some stack pointer alterations we make are performed via
1935
     allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1936
     which we then also need to save/restore along the way.  */
1937
  int old_stack_pointer_delta = 0;
1938
 
1939
  rtx call_fusage;
1940
  tree p = TREE_OPERAND (exp, 0);
1941
  tree addr = TREE_OPERAND (exp, 0);
1942
  int i;
1943
  /* The alignment of the stack, in bits.  */
1944
  unsigned HOST_WIDE_INT preferred_stack_boundary;
1945
  /* The alignment of the stack, in bytes.  */
1946
  unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
1947
  /* The static chain value to use for this call.  */
1948
  rtx static_chain_value;
1949
  /* See if this is "nothrow" function call.  */
1950
  if (TREE_NOTHROW (exp))
1951
    flags |= ECF_NOTHROW;
1952
 
1953
  /* See if we can find a DECL-node for the actual function, and get the
1954
     function attributes (flags) from the function decl or type node.  */
1955
  fndecl = get_callee_fndecl (exp);
1956
  if (fndecl)
1957
    {
1958
      fntype = TREE_TYPE (fndecl);
1959
      flags |= flags_from_decl_or_type (fndecl);
1960
    }
1961
  else
1962
    {
1963
      fntype = TREE_TYPE (TREE_TYPE (p));
1964
      flags |= flags_from_decl_or_type (fntype);
1965
    }
1966
 
1967
  struct_value = targetm.calls.struct_value_rtx (fntype, 0);
1968
 
1969
  /* Warn if this value is an aggregate type,
1970
     regardless of which calling convention we are using for it.  */
1971
  if (AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1972
    warning (OPT_Waggregate_return, "function call has aggregate value");
1973
 
1974
  /* If the result of a pure or const function call is ignored (or void),
1975
     and none of its arguments are volatile, we can avoid expanding the
1976
     call and just evaluate the arguments for side-effects.  */
1977
  if ((flags & (ECF_CONST | ECF_PURE))
1978
      && (ignore || target == const0_rtx
1979
          || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
1980
    {
1981
      bool volatilep = false;
1982
      tree arg;
1983
 
1984
      for (arg = actparms; arg; arg = TREE_CHAIN (arg))
1985
        if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
1986
          {
1987
            volatilep = true;
1988
            break;
1989
          }
1990
 
1991
      if (! volatilep)
1992
        {
1993
          for (arg = actparms; arg; arg = TREE_CHAIN (arg))
1994
            expand_expr (TREE_VALUE (arg), const0_rtx,
1995
                         VOIDmode, EXPAND_NORMAL);
1996
          return const0_rtx;
1997
        }
1998
    }
1999
 
2000
#ifdef REG_PARM_STACK_SPACE
2001
  reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2002
#endif
2003
 
2004
#ifndef OUTGOING_REG_PARM_STACK_SPACE
2005
  if (reg_parm_stack_space > 0 && PUSH_ARGS)
2006
    must_preallocate = 1;
2007
#endif
2008
 
2009
  /* Set up a place to return a structure.  */
2010
 
2011
  /* Cater to broken compilers.  */
2012
  if (aggregate_value_p (exp, fndecl))
2013
    {
2014
      /* This call returns a big structure.  */
2015
      flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2016
 
2017
#ifdef PCC_STATIC_STRUCT_RETURN
2018
      {
2019
        pcc_struct_value = 1;
2020
      }
2021
#else /* not PCC_STATIC_STRUCT_RETURN */
2022
      {
2023
        struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2024
 
2025
        if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2026
          structure_value_addr = XEXP (target, 0);
2027
        else
2028
          {
2029
            /* For variable-sized objects, we must be called with a target
2030
               specified.  If we were to allocate space on the stack here,
2031
               we would have no way of knowing when to free it.  */
2032
            rtx d = assign_temp (TREE_TYPE (exp), 0, 1, 1);
2033
 
2034
            mark_temp_addr_taken (d);
2035
            structure_value_addr = XEXP (d, 0);
2036
            target = 0;
2037
          }
2038
      }
2039
#endif /* not PCC_STATIC_STRUCT_RETURN */
2040
    }
2041
 
2042
  /* Figure out the amount to which the stack should be aligned.  */
2043
  preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2044
  if (fndecl)
2045
    {
2046
      struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2047
      if (i && i->preferred_incoming_stack_boundary)
2048
        preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2049
    }
2050
 
2051
  /* Operand 0 is a pointer-to-function; get the type of the function.  */
2052
  funtype = TREE_TYPE (addr);
2053
  gcc_assert (POINTER_TYPE_P (funtype));
2054
  funtype = TREE_TYPE (funtype);
2055
 
2056
  /* Munge the tree to split complex arguments into their imaginary
2057
     and real parts.  */
2058
  if (targetm.calls.split_complex_arg)
2059
    {
2060
      type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2061
      actparms = split_complex_values (actparms);
2062
    }
2063
  else
2064
    type_arg_types = TYPE_ARG_TYPES (funtype);
2065
 
2066
  if (flags & ECF_MAY_BE_ALLOCA)
2067
    current_function_calls_alloca = 1;
2068
 
2069
  /* If struct_value_rtx is 0, it means pass the address
2070
     as if it were an extra parameter.  */
2071
  if (structure_value_addr && struct_value == 0)
2072
    {
2073
      /* If structure_value_addr is a REG other than
2074
         virtual_outgoing_args_rtx, we can use always use it.  If it
2075
         is not a REG, we must always copy it into a register.
2076
         If it is virtual_outgoing_args_rtx, we must copy it to another
2077
         register in some cases.  */
2078
      rtx temp = (!REG_P (structure_value_addr)
2079
                  || (ACCUMULATE_OUTGOING_ARGS
2080
                      && stack_arg_under_construction
2081
                      && structure_value_addr == virtual_outgoing_args_rtx)
2082
                  ? copy_addr_to_reg (convert_memory_address
2083
                                      (Pmode, structure_value_addr))
2084
                  : structure_value_addr);
2085
 
2086
      actparms
2087
        = tree_cons (error_mark_node,
2088
                     make_tree (build_pointer_type (TREE_TYPE (funtype)),
2089
                                temp),
2090
                     actparms);
2091
      structure_value_addr_parm = 1;
2092
    }
2093
 
2094
  /* Count the arguments and set NUM_ACTUALS.  */
2095
  for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2096
    num_actuals++;
2097
 
2098
  /* Compute number of named args.
2099
     First, do a raw count of the args for INIT_CUMULATIVE_ARGS.  */
2100
 
2101
  if (type_arg_types != 0)
2102
    n_named_args
2103
      = (list_length (type_arg_types)
2104
         /* Count the struct value address, if it is passed as a parm.  */
2105
         + structure_value_addr_parm);
2106
  else
2107
    /* If we know nothing, treat all args as named.  */
2108
    n_named_args = num_actuals;
2109
 
2110
  /* Start updating where the next arg would go.
2111
 
2112
     On some machines (such as the PA) indirect calls have a different
2113
     calling convention than normal calls.  The fourth argument in
2114
     INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2115
     or not.  */
2116
  INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2117
 
2118
  /* Now possibly adjust the number of named args.
2119
     Normally, don't include the last named arg if anonymous args follow.
2120
     We do include the last named arg if
2121
     targetm.calls.strict_argument_naming() returns nonzero.
2122
     (If no anonymous args follow, the result of list_length is actually
2123
     one too large.  This is harmless.)
2124
 
2125
     If targetm.calls.pretend_outgoing_varargs_named() returns
2126
     nonzero, and targetm.calls.strict_argument_naming() returns zero,
2127
     this machine will be able to place unnamed args that were passed
2128
     in registers into the stack.  So treat all args as named.  This
2129
     allows the insns emitting for a specific argument list to be
2130
     independent of the function declaration.
2131
 
2132
     If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2133
     we do not have any reliable way to pass unnamed args in
2134
     registers, so we must force them into memory.  */
2135
 
2136
  if (type_arg_types != 0
2137
      && targetm.calls.strict_argument_naming (&args_so_far))
2138
    ;
2139
  else if (type_arg_types != 0
2140
           && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2141
    /* Don't include the last named arg.  */
2142
    --n_named_args;
2143
  else
2144
    /* Treat all args as named.  */
2145
    n_named_args = num_actuals;
2146
 
2147
  /* Make a vector to hold all the information about each arg.  */
2148
  args = alloca (num_actuals * sizeof (struct arg_data));
2149
  memset (args, 0, num_actuals * sizeof (struct arg_data));
2150
 
2151
  /* Build up entries in the ARGS array, compute the size of the
2152
     arguments into ARGS_SIZE, etc.  */
2153
  initialize_argument_information (num_actuals, args, &args_size,
2154
                                   n_named_args, actparms, fndecl,
2155
                                   &args_so_far, reg_parm_stack_space,
2156
                                   &old_stack_level, &old_pending_adj,
2157
                                   &must_preallocate, &flags,
2158
                                   &try_tail_call, CALL_FROM_THUNK_P (exp));
2159
 
2160
  if (args_size.var)
2161
    {
2162
      /* If this function requires a variable-sized argument list, don't
2163
         try to make a cse'able block for this call.  We may be able to
2164
         do this eventually, but it is too complicated to keep track of
2165
         what insns go in the cse'able block and which don't.  */
2166
 
2167
      flags &= ~ECF_LIBCALL_BLOCK;
2168
      must_preallocate = 1;
2169
    }
2170
 
2171
  /* Now make final decision about preallocating stack space.  */
2172
  must_preallocate = finalize_must_preallocate (must_preallocate,
2173
                                                num_actuals, args,
2174
                                                &args_size);
2175
 
2176
  /* If the structure value address will reference the stack pointer, we
2177
     must stabilize it.  We don't need to do this if we know that we are
2178
     not going to adjust the stack pointer in processing this call.  */
2179
 
2180
  if (structure_value_addr
2181
      && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2182
          || reg_mentioned_p (virtual_outgoing_args_rtx,
2183
                              structure_value_addr))
2184
      && (args_size.var
2185
          || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2186
    structure_value_addr = copy_to_reg (structure_value_addr);
2187
 
2188
  /* Tail calls can make things harder to debug, and we've traditionally
2189
     pushed these optimizations into -O2.  Don't try if we're already
2190
     expanding a call, as that means we're an argument.  Don't try if
2191
     there's cleanups, as we know there's code to follow the call.  */
2192
 
2193
  if (currently_expanding_call++ != 0
2194
      || !flag_optimize_sibling_calls
2195
      || args_size.var
2196
      || lookup_stmt_eh_region (exp) >= 0)
2197
    try_tail_call = 0;
2198
 
2199
  /*  Rest of purposes for tail call optimizations to fail.  */
2200
  if (
2201
#ifdef HAVE_sibcall_epilogue
2202
      !HAVE_sibcall_epilogue
2203
#else
2204
      1
2205
#endif
2206
      || !try_tail_call
2207
      /* Doing sibling call optimization needs some work, since
2208
         structure_value_addr can be allocated on the stack.
2209
         It does not seem worth the effort since few optimizable
2210
         sibling calls will return a structure.  */
2211
      || structure_value_addr != NULL_RTX
2212
      /* Check whether the target is able to optimize the call
2213
         into a sibcall.  */
2214
      || !targetm.function_ok_for_sibcall (fndecl, exp)
2215
      /* Functions that do not return exactly once may not be sibcall
2216
         optimized.  */
2217
      || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2218
      || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2219
      /* If the called function is nested in the current one, it might access
2220
         some of the caller's arguments, but could clobber them beforehand if
2221
         the argument areas are shared.  */
2222
      || (fndecl && decl_function_context (fndecl) == current_function_decl)
2223
      /* If this function requires more stack slots than the current
2224
         function, we cannot change it into a sibling call.
2225
         current_function_pretend_args_size is not part of the
2226
         stack allocated by our caller.  */
2227
      || args_size.constant > (current_function_args_size
2228
                               - current_function_pretend_args_size)
2229
      /* If the callee pops its own arguments, then it must pop exactly
2230
         the same number of arguments as the current function.  */
2231
      || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2232
          != RETURN_POPS_ARGS (current_function_decl,
2233
                               TREE_TYPE (current_function_decl),
2234
                               current_function_args_size))
2235
      || !lang_hooks.decls.ok_for_sibcall (fndecl))
2236
    try_tail_call = 0;
2237
 
2238
  /* Ensure current function's preferred stack boundary is at least
2239
     what we need.  We don't have to increase alignment for recursive
2240
     functions.  */
2241
  if (cfun->preferred_stack_boundary < preferred_stack_boundary
2242
      && fndecl != current_function_decl)
2243
    cfun->preferred_stack_boundary = preferred_stack_boundary;
2244
  if (fndecl == current_function_decl)
2245
    cfun->recursive_call_emit = true;
2246
 
2247
  preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2248
 
2249
  /* We want to make two insn chains; one for a sibling call, the other
2250
     for a normal call.  We will select one of the two chains after
2251
     initial RTL generation is complete.  */
2252
  for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2253
    {
2254
      int sibcall_failure = 0;
2255
      /* We want to emit any pending stack adjustments before the tail
2256
         recursion "call".  That way we know any adjustment after the tail
2257
         recursion call can be ignored if we indeed use the tail
2258
         call expansion.  */
2259
      int save_pending_stack_adjust = 0;
2260
      int save_stack_pointer_delta = 0;
2261
      rtx insns;
2262
      rtx before_call, next_arg_reg;
2263
 
2264
      if (pass == 0)
2265
        {
2266
          /* State variables we need to save and restore between
2267
             iterations.  */
2268
          save_pending_stack_adjust = pending_stack_adjust;
2269
          save_stack_pointer_delta = stack_pointer_delta;
2270
        }
2271
      if (pass)
2272
        flags &= ~ECF_SIBCALL;
2273
      else
2274
        flags |= ECF_SIBCALL;
2275
 
2276
      /* Other state variables that we must reinitialize each time
2277
         through the loop (that are not initialized by the loop itself).  */
2278
      argblock = 0;
2279
      call_fusage = 0;
2280
 
2281
      /* Start a new sequence for the normal call case.
2282
 
2283
         From this point on, if the sibling call fails, we want to set
2284
         sibcall_failure instead of continuing the loop.  */
2285
      start_sequence ();
2286
 
2287
      /* Don't let pending stack adjusts add up to too much.
2288
         Also, do all pending adjustments now if there is any chance
2289
         this might be a call to alloca or if we are expanding a sibling
2290
         call sequence or if we are calling a function that is to return
2291
         with stack pointer depressed.
2292
         Also do the adjustments before a throwing call, otherwise
2293
         exception handling can fail; PR 19225. */
2294
      if (pending_stack_adjust >= 32
2295
          || (pending_stack_adjust > 0
2296
              && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2297
          || (pending_stack_adjust > 0
2298
              && flag_exceptions && !(flags & ECF_NOTHROW))
2299
          || pass == 0)
2300
        do_pending_stack_adjust ();
2301
 
2302
      /* When calling a const function, we must pop the stack args right away,
2303
         so that the pop is deleted or moved with the call.  */
2304
      if (pass && (flags & ECF_LIBCALL_BLOCK))
2305
        NO_DEFER_POP;
2306
 
2307
      /* Precompute any arguments as needed.  */
2308
      if (pass)
2309
        precompute_arguments (flags, num_actuals, args);
2310
 
2311
      /* Now we are about to start emitting insns that can be deleted
2312
         if a libcall is deleted.  */
2313
      if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2314
        start_sequence ();
2315
 
2316
      if (pass == 0 && cfun->stack_protect_guard)
2317
        stack_protect_epilogue ();
2318
 
2319
      adjusted_args_size = args_size;
2320
      /* Compute the actual size of the argument block required.  The variable
2321
         and constant sizes must be combined, the size may have to be rounded,
2322
         and there may be a minimum required size.  When generating a sibcall
2323
         pattern, do not round up, since we'll be re-using whatever space our
2324
         caller provided.  */
2325
      unadjusted_args_size
2326
        = compute_argument_block_size (reg_parm_stack_space,
2327
                                       &adjusted_args_size,
2328
                                       (pass == 0 ? 0
2329
                                        : preferred_stack_boundary));
2330
 
2331
      old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2332
 
2333
      /* The argument block when performing a sibling call is the
2334
         incoming argument block.  */
2335
      if (pass == 0)
2336
        {
2337
          argblock = virtual_incoming_args_rtx;
2338
          argblock
2339
#ifdef STACK_GROWS_DOWNWARD
2340
            = plus_constant (argblock, current_function_pretend_args_size);
2341
#else
2342
            = plus_constant (argblock, -current_function_pretend_args_size);
2343
#endif
2344
          stored_args_map = sbitmap_alloc (args_size.constant);
2345
          sbitmap_zero (stored_args_map);
2346
        }
2347
 
2348
      /* If we have no actual push instructions, or shouldn't use them,
2349
         make space for all args right now.  */
2350
      else if (adjusted_args_size.var != 0)
2351
        {
2352
          if (old_stack_level == 0)
2353
            {
2354
              emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2355
              old_stack_pointer_delta = stack_pointer_delta;
2356
              old_pending_adj = pending_stack_adjust;
2357
              pending_stack_adjust = 0;
2358
              /* stack_arg_under_construction says whether a stack arg is
2359
                 being constructed at the old stack level.  Pushing the stack
2360
                 gets a clean outgoing argument block.  */
2361
              old_stack_arg_under_construction = stack_arg_under_construction;
2362
              stack_arg_under_construction = 0;
2363
            }
2364
          argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2365
        }
2366
      else
2367
        {
2368
          /* Note that we must go through the motions of allocating an argument
2369
             block even if the size is zero because we may be storing args
2370
             in the area reserved for register arguments, which may be part of
2371
             the stack frame.  */
2372
 
2373
          int needed = adjusted_args_size.constant;
2374
 
2375
          /* Store the maximum argument space used.  It will be pushed by
2376
             the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2377
             checking).  */
2378
 
2379
          if (needed > current_function_outgoing_args_size)
2380
            current_function_outgoing_args_size = needed;
2381
 
2382
          if (must_preallocate)
2383
            {
2384
              if (ACCUMULATE_OUTGOING_ARGS)
2385
                {
2386
                  /* Since the stack pointer will never be pushed, it is
2387
                     possible for the evaluation of a parm to clobber
2388
                     something we have already written to the stack.
2389
                     Since most function calls on RISC machines do not use
2390
                     the stack, this is uncommon, but must work correctly.
2391
 
2392
                     Therefore, we save any area of the stack that was already
2393
                     written and that we are using.  Here we set up to do this
2394
                     by making a new stack usage map from the old one.  The
2395
                     actual save will be done by store_one_arg.
2396
 
2397
                     Another approach might be to try to reorder the argument
2398
                     evaluations to avoid this conflicting stack usage.  */
2399
 
2400
#ifndef OUTGOING_REG_PARM_STACK_SPACE
2401
                  /* Since we will be writing into the entire argument area,
2402
                     the map must be allocated for its entire size, not just
2403
                     the part that is the responsibility of the caller.  */
2404
                  needed += reg_parm_stack_space;
2405
#endif
2406
 
2407
#ifdef ARGS_GROW_DOWNWARD
2408
                  highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2409
                                                     needed + 1);
2410
#else
2411
                  highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2412
                                                     needed);
2413
#endif
2414
                  if (stack_usage_map_buf)
2415
                    free (stack_usage_map_buf);
2416
                  stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2417
                  stack_usage_map = stack_usage_map_buf;
2418
 
2419
                  if (initial_highest_arg_in_use)
2420
                    memcpy (stack_usage_map, initial_stack_usage_map,
2421
                            initial_highest_arg_in_use);
2422
 
2423
                  if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2424
                    memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2425
                           (highest_outgoing_arg_in_use
2426
                            - initial_highest_arg_in_use));
2427
                  needed = 0;
2428
 
2429
                  /* The address of the outgoing argument list must not be
2430
                     copied to a register here, because argblock would be left
2431
                     pointing to the wrong place after the call to
2432
                     allocate_dynamic_stack_space below.  */
2433
 
2434
                  argblock = virtual_outgoing_args_rtx;
2435
                }
2436
              else
2437
                {
2438
                  if (inhibit_defer_pop == 0)
2439
                    {
2440
                      /* Try to reuse some or all of the pending_stack_adjust
2441
                         to get this space.  */
2442
                      needed
2443
                        = (combine_pending_stack_adjustment_and_call
2444
                           (unadjusted_args_size,
2445
                            &adjusted_args_size,
2446
                            preferred_unit_stack_boundary));
2447
 
2448
                      /* combine_pending_stack_adjustment_and_call computes
2449
                         an adjustment before the arguments are allocated.
2450
                         Account for them and see whether or not the stack
2451
                         needs to go up or down.  */
2452
                      needed = unadjusted_args_size - needed;
2453
 
2454
                      if (needed < 0)
2455
                        {
2456
                          /* We're releasing stack space.  */
2457
                          /* ??? We can avoid any adjustment at all if we're
2458
                             already aligned.  FIXME.  */
2459
                          pending_stack_adjust = -needed;
2460
                          do_pending_stack_adjust ();
2461
                          needed = 0;
2462
                        }
2463
                      else
2464
                        /* We need to allocate space.  We'll do that in
2465
                           push_block below.  */
2466
                        pending_stack_adjust = 0;
2467
                    }
2468
 
2469
                  /* Special case this because overhead of `push_block' in
2470
                     this case is non-trivial.  */
2471
                  if (needed == 0)
2472
                    argblock = virtual_outgoing_args_rtx;
2473
                  else
2474
                    {
2475
                      argblock = push_block (GEN_INT (needed), 0, 0);
2476
#ifdef ARGS_GROW_DOWNWARD
2477
                      argblock = plus_constant (argblock, needed);
2478
#endif
2479
                    }
2480
 
2481
                  /* We only really need to call `copy_to_reg' in the case
2482
                     where push insns are going to be used to pass ARGBLOCK
2483
                     to a function call in ARGS.  In that case, the stack
2484
                     pointer changes value from the allocation point to the
2485
                     call point, and hence the value of
2486
                     VIRTUAL_OUTGOING_ARGS_RTX changes as well.  But might
2487
                     as well always do it.  */
2488
                  argblock = copy_to_reg (argblock);
2489
                }
2490
            }
2491
        }
2492
 
2493
      if (ACCUMULATE_OUTGOING_ARGS)
2494
        {
2495
          /* The save/restore code in store_one_arg handles all
2496
             cases except one: a constructor call (including a C
2497
             function returning a BLKmode struct) to initialize
2498
             an argument.  */
2499
          if (stack_arg_under_construction)
2500
            {
2501
#ifndef OUTGOING_REG_PARM_STACK_SPACE
2502
              rtx push_size = GEN_INT (reg_parm_stack_space
2503
                                       + adjusted_args_size.constant);
2504
#else
2505
              rtx push_size = GEN_INT (adjusted_args_size.constant);
2506
#endif
2507
              if (old_stack_level == 0)
2508
                {
2509
                  emit_stack_save (SAVE_BLOCK, &old_stack_level,
2510
                                   NULL_RTX);
2511
                  old_stack_pointer_delta = stack_pointer_delta;
2512
                  old_pending_adj = pending_stack_adjust;
2513
                  pending_stack_adjust = 0;
2514
                  /* stack_arg_under_construction says whether a stack
2515
                     arg is being constructed at the old stack level.
2516
                     Pushing the stack gets a clean outgoing argument
2517
                     block.  */
2518
                  old_stack_arg_under_construction
2519
                    = stack_arg_under_construction;
2520
                  stack_arg_under_construction = 0;
2521
                  /* Make a new map for the new argument list.  */
2522
                  if (stack_usage_map_buf)
2523
                    free (stack_usage_map_buf);
2524
                  stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2525
                  stack_usage_map = stack_usage_map_buf;
2526
                  memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2527
                  highest_outgoing_arg_in_use = 0;
2528
                }
2529
              allocate_dynamic_stack_space (push_size, NULL_RTX,
2530
                                            BITS_PER_UNIT);
2531
            }
2532
 
2533
          /* If argument evaluation might modify the stack pointer,
2534
             copy the address of the argument list to a register.  */
2535
          for (i = 0; i < num_actuals; i++)
2536
            if (args[i].pass_on_stack)
2537
              {
2538
                argblock = copy_addr_to_reg (argblock);
2539
                break;
2540
              }
2541
        }
2542
 
2543
      compute_argument_addresses (args, argblock, num_actuals);
2544
 
2545
      /* If we push args individually in reverse order, perform stack alignment
2546
         before the first push (the last arg).  */
2547
      if (PUSH_ARGS_REVERSED && argblock == 0
2548
          && adjusted_args_size.constant != unadjusted_args_size)
2549
        {
2550
          /* When the stack adjustment is pending, we get better code
2551
             by combining the adjustments.  */
2552
          if (pending_stack_adjust
2553
              && ! (flags & ECF_LIBCALL_BLOCK)
2554
              && ! inhibit_defer_pop)
2555
            {
2556
              pending_stack_adjust
2557
                = (combine_pending_stack_adjustment_and_call
2558
                   (unadjusted_args_size,
2559
                    &adjusted_args_size,
2560
                    preferred_unit_stack_boundary));
2561
              do_pending_stack_adjust ();
2562
            }
2563
          else if (argblock == 0)
2564
            anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2565
                                        - unadjusted_args_size));
2566
        }
2567
      /* Now that the stack is properly aligned, pops can't safely
2568
         be deferred during the evaluation of the arguments.  */
2569
      NO_DEFER_POP;
2570
 
2571
      funexp = rtx_for_function_call (fndecl, addr);
2572
 
2573
      /* Figure out the register where the value, if any, will come back.  */
2574
      valreg = 0;
2575
      if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2576
          && ! structure_value_addr)
2577
        {
2578
          if (pcc_struct_value)
2579
            valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2580
                                          fndecl, NULL, (pass == 0));
2581
          else
2582
            valreg = hard_function_value (TREE_TYPE (exp), fndecl, fntype,
2583
                                          (pass == 0));
2584
        }
2585
 
2586
      /* Precompute all register parameters.  It isn't safe to compute anything
2587
         once we have started filling any specific hard regs.  */
2588
      precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2589
 
2590
      if (TREE_OPERAND (exp, 2))
2591
        static_chain_value = expand_normal (TREE_OPERAND (exp, 2));
2592
      else
2593
        static_chain_value = 0;
2594
 
2595
#ifdef REG_PARM_STACK_SPACE
2596
      /* Save the fixed argument area if it's part of the caller's frame and
2597
         is clobbered by argument setup for this call.  */
2598
      if (ACCUMULATE_OUTGOING_ARGS && pass)
2599
        save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2600
                                              &low_to_save, &high_to_save);
2601
#endif
2602
 
2603
      /* Now store (and compute if necessary) all non-register parms.
2604
         These come before register parms, since they can require block-moves,
2605
         which could clobber the registers used for register parms.
2606
         Parms which have partial registers are not stored here,
2607
         but we do preallocate space here if they want that.  */
2608
 
2609
      for (i = 0; i < num_actuals; i++)
2610
        if (args[i].reg == 0 || args[i].pass_on_stack)
2611
          {
2612
            rtx before_arg = get_last_insn ();
2613
 
2614
            if (store_one_arg (&args[i], argblock, flags,
2615
                               adjusted_args_size.var != 0,
2616
                               reg_parm_stack_space)
2617
                || (pass == 0
2618
                    && check_sibcall_argument_overlap (before_arg,
2619
                                                       &args[i], 1)))
2620
              sibcall_failure = 1;
2621
 
2622
            if (flags & ECF_CONST
2623
                && args[i].stack
2624
                && args[i].value == args[i].stack)
2625
              call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2626
                                               gen_rtx_USE (VOIDmode,
2627
                                                            args[i].value),
2628
                                               call_fusage);
2629
          }
2630
 
2631
      /* If we have a parm that is passed in registers but not in memory
2632
         and whose alignment does not permit a direct copy into registers,
2633
         make a group of pseudos that correspond to each register that we
2634
         will later fill.  */
2635
      if (STRICT_ALIGNMENT)
2636
        store_unaligned_arguments_into_pseudos (args, num_actuals);
2637
 
2638
      /* Now store any partially-in-registers parm.
2639
         This is the last place a block-move can happen.  */
2640
      if (reg_parm_seen)
2641
        for (i = 0; i < num_actuals; i++)
2642
          if (args[i].partial != 0 && ! args[i].pass_on_stack)
2643
            {
2644
              rtx before_arg = get_last_insn ();
2645
 
2646
              if (store_one_arg (&args[i], argblock, flags,
2647
                                 adjusted_args_size.var != 0,
2648
                                 reg_parm_stack_space)
2649
                  || (pass == 0
2650
                      && check_sibcall_argument_overlap (before_arg,
2651
                                                         &args[i], 1)))
2652
                sibcall_failure = 1;
2653
            }
2654
 
2655
      /* If we pushed args in forward order, perform stack alignment
2656
         after pushing the last arg.  */
2657
      if (!PUSH_ARGS_REVERSED && argblock == 0)
2658
        anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2659
                                    - unadjusted_args_size));
2660
 
2661
      /* If register arguments require space on the stack and stack space
2662
         was not preallocated, allocate stack space here for arguments
2663
         passed in registers.  */
2664
#ifdef OUTGOING_REG_PARM_STACK_SPACE
2665
      if (!ACCUMULATE_OUTGOING_ARGS
2666
          && must_preallocate == 0 && reg_parm_stack_space > 0)
2667
        anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2668
#endif
2669
 
2670
      /* Pass the function the address in which to return a
2671
         structure value.  */
2672
      if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2673
        {
2674
          structure_value_addr
2675
            = convert_memory_address (Pmode, structure_value_addr);
2676
          emit_move_insn (struct_value,
2677
                          force_reg (Pmode,
2678
                                     force_operand (structure_value_addr,
2679
                                                    NULL_RTX)));
2680
 
2681
          if (REG_P (struct_value))
2682
            use_reg (&call_fusage, struct_value);
2683
        }
2684
 
2685
      funexp = prepare_call_address (funexp, static_chain_value,
2686
                                     &call_fusage, reg_parm_seen, pass == 0);
2687
 
2688
      load_register_parameters (args, num_actuals, &call_fusage, flags,
2689
                                pass == 0, &sibcall_failure);
2690
 
2691
      /* Save a pointer to the last insn before the call, so that we can
2692
         later safely search backwards to find the CALL_INSN.  */
2693
      before_call = get_last_insn ();
2694
 
2695
      /* Set up next argument register.  For sibling calls on machines
2696
         with register windows this should be the incoming register.  */
2697
#ifdef FUNCTION_INCOMING_ARG
2698
      if (pass == 0)
2699
        next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2700
                                              void_type_node, 1);
2701
      else
2702
#endif
2703
        next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2704
                                     void_type_node, 1);
2705
 
2706
      /* All arguments and registers used for the call must be set up by
2707
         now!  */
2708
 
2709
      /* Stack must be properly aligned now.  */
2710
      gcc_assert (!pass
2711
                  || !(stack_pointer_delta % preferred_unit_stack_boundary));
2712
 
2713
      /* Generate the actual call instruction.  */
2714
      emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2715
                   adjusted_args_size.constant, struct_value_size,
2716
                   next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2717
                   flags, & args_so_far);
2718
 
2719
      /* If a non-BLKmode value is returned at the most significant end
2720
         of a register, shift the register right by the appropriate amount
2721
         and update VALREG accordingly.  BLKmode values are handled by the
2722
         group load/store machinery below.  */
2723
      if (!structure_value_addr
2724
          && !pcc_struct_value
2725
          && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2726
          && targetm.calls.return_in_msb (TREE_TYPE (exp)))
2727
        {
2728
          if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
2729
            sibcall_failure = 1;
2730
          valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
2731
        }
2732
 
2733
      /* If call is cse'able, make appropriate pair of reg-notes around it.
2734
         Test valreg so we don't crash; may safely ignore `const'
2735
         if return type is void.  Disable for PARALLEL return values, because
2736
         we have no way to move such values into a pseudo register.  */
2737
      if (pass && (flags & ECF_LIBCALL_BLOCK))
2738
        {
2739
          rtx insns;
2740
          rtx insn;
2741
          bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
2742
 
2743
          insns = get_insns ();
2744
 
2745
          /* Expansion of block moves possibly introduced a loop that may
2746
             not appear inside libcall block.  */
2747
          for (insn = insns; insn; insn = NEXT_INSN (insn))
2748
            if (JUMP_P (insn))
2749
              failed = true;
2750
 
2751
          if (failed)
2752
            {
2753
              end_sequence ();
2754
              emit_insn (insns);
2755
            }
2756
          else
2757
            {
2758
              rtx note = 0;
2759
              rtx temp = gen_reg_rtx (GET_MODE (valreg));
2760
 
2761
              /* Mark the return value as a pointer if needed.  */
2762
              if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2763
                mark_reg_pointer (temp,
2764
                                  TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2765
 
2766
              end_sequence ();
2767
              if (flag_unsafe_math_optimizations
2768
                  && fndecl
2769
                  && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2770
                  && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2771
                      || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2772
                      || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
2773
                note = gen_rtx_fmt_e (SQRT,
2774
                                      GET_MODE (temp),
2775
                                      args[0].initial_value);
2776
              else
2777
                {
2778
                  /* Construct an "equal form" for the value which
2779
                     mentions all the arguments in order as well as
2780
                     the function name.  */
2781
                  for (i = 0; i < num_actuals; i++)
2782
                    note = gen_rtx_EXPR_LIST (VOIDmode,
2783
                                              args[i].initial_value, note);
2784
                  note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2785
 
2786
                  if (flags & ECF_PURE)
2787
                    note = gen_rtx_EXPR_LIST (VOIDmode,
2788
                        gen_rtx_USE (VOIDmode,
2789
                                     gen_rtx_MEM (BLKmode,
2790
                                                  gen_rtx_SCRATCH (VOIDmode))),
2791
                        note);
2792
                }
2793
              emit_libcall_block (insns, temp, valreg, note);
2794
 
2795
              valreg = temp;
2796
            }
2797
        }
2798
      else if (pass && (flags & ECF_MALLOC))
2799
        {
2800
          rtx temp = gen_reg_rtx (GET_MODE (valreg));
2801
          rtx last, insns;
2802
 
2803
          /* The return value from a malloc-like function is a pointer.  */
2804
          if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2805
            mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2806
 
2807
          emit_move_insn (temp, valreg);
2808
 
2809
          /* The return value from a malloc-like function can not alias
2810
             anything else.  */
2811
          last = get_last_insn ();
2812
          REG_NOTES (last) =
2813
            gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2814
 
2815
          /* Write out the sequence.  */
2816
          insns = get_insns ();
2817
          end_sequence ();
2818
          emit_insn (insns);
2819
          valreg = temp;
2820
        }
2821
 
2822
      /* For calls to `setjmp', etc., inform flow.c it should complain
2823
         if nonvolatile values are live.  For functions that cannot return,
2824
         inform flow that control does not fall through.  */
2825
 
2826
      if ((flags & ECF_NORETURN) || pass == 0)
2827
        {
2828
          /* The barrier must be emitted
2829
             immediately after the CALL_INSN.  Some ports emit more
2830
             than just a CALL_INSN above, so we must search for it here.  */
2831
 
2832
          rtx last = get_last_insn ();
2833
          while (!CALL_P (last))
2834
            {
2835
              last = PREV_INSN (last);
2836
              /* There was no CALL_INSN?  */
2837
              gcc_assert (last != before_call);
2838
            }
2839
 
2840
          emit_barrier_after (last);
2841
 
2842
          /* Stack adjustments after a noreturn call are dead code.
2843
             However when NO_DEFER_POP is in effect, we must preserve
2844
             stack_pointer_delta.  */
2845
          if (inhibit_defer_pop == 0)
2846
            {
2847
              stack_pointer_delta = old_stack_allocated;
2848
              pending_stack_adjust = 0;
2849
            }
2850
        }
2851
 
2852
      /* If value type not void, return an rtx for the value.  */
2853
 
2854
      if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2855
          || ignore)
2856
        target = const0_rtx;
2857
      else if (structure_value_addr)
2858
        {
2859
          if (target == 0 || !MEM_P (target))
2860
            {
2861
              target
2862
                = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2863
                               memory_address (TYPE_MODE (TREE_TYPE (exp)),
2864
                                               structure_value_addr));
2865
              set_mem_attributes (target, exp, 1);
2866
            }
2867
        }
2868
      else if (pcc_struct_value)
2869
        {
2870
          /* This is the special C++ case where we need to
2871
             know what the true target was.  We take care to
2872
             never use this value more than once in one expression.  */
2873
          target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2874
                                copy_to_reg (valreg));
2875
          set_mem_attributes (target, exp, 1);
2876
        }
2877
      /* Handle calls that return values in multiple non-contiguous locations.
2878
         The Irix 6 ABI has examples of this.  */
2879
      else if (GET_CODE (valreg) == PARALLEL)
2880
        {
2881
          if (target == 0)
2882
            {
2883
              /* This will only be assigned once, so it can be readonly.  */
2884
              tree nt = build_qualified_type (TREE_TYPE (exp),
2885
                                              (TYPE_QUALS (TREE_TYPE (exp))
2886
                                               | TYPE_QUAL_CONST));
2887
 
2888
              target = assign_temp (nt, 0, 1, 1);
2889
            }
2890
 
2891
          if (! rtx_equal_p (target, valreg))
2892
            emit_group_store (target, valreg, TREE_TYPE (exp),
2893
                              int_size_in_bytes (TREE_TYPE (exp)));
2894
 
2895
          /* We can not support sibling calls for this case.  */
2896
          sibcall_failure = 1;
2897
        }
2898
      else if (target
2899
               && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2900
               && GET_MODE (target) == GET_MODE (valreg))
2901
        {
2902
          bool may_overlap = false;
2903
 
2904
          /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
2905
             reg to a plain register.  */
2906
          if (REG_P (valreg)
2907
              && HARD_REGISTER_P (valreg)
2908
              && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (valreg)))
2909
              && !(REG_P (target) && !HARD_REGISTER_P (target)))
2910
            valreg = copy_to_reg (valreg);
2911
 
2912
          /* If TARGET is a MEM in the argument area, and we have
2913
             saved part of the argument area, then we can't store
2914
             directly into TARGET as it may get overwritten when we
2915
             restore the argument save area below.  Don't work too
2916
             hard though and simply force TARGET to a register if it
2917
             is a MEM; the optimizer is quite likely to sort it out.  */
2918
          if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
2919
            for (i = 0; i < num_actuals; i++)
2920
              if (args[i].save_area)
2921
                {
2922
                  may_overlap = true;
2923
                  break;
2924
                }
2925
 
2926
          if (may_overlap)
2927
            target = copy_to_reg (valreg);
2928
          else
2929
            {
2930
              /* TARGET and VALREG cannot be equal at this point
2931
                 because the latter would not have
2932
                 REG_FUNCTION_VALUE_P true, while the former would if
2933
                 it were referring to the same register.
2934
 
2935
                 If they refer to the same register, this move will be
2936
                 a no-op, except when function inlining is being
2937
                 done.  */
2938
              emit_move_insn (target, valreg);
2939
 
2940
              /* If we are setting a MEM, this code must be executed.
2941
                 Since it is emitted after the call insn, sibcall
2942
                 optimization cannot be performed in that case.  */
2943
              if (MEM_P (target))
2944
                sibcall_failure = 1;
2945
            }
2946
        }
2947
      else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2948
        {
2949
          target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2950
 
2951
          /* We can not support sibling calls for this case.  */
2952
          sibcall_failure = 1;
2953
        }
2954
      else
2955
        target = copy_to_reg (valreg);
2956
 
2957
      if (targetm.calls.promote_function_return(funtype))
2958
        {
2959
          /* If we promoted this return value, make the proper SUBREG.
2960
             TARGET might be const0_rtx here, so be careful.  */
2961
          if (REG_P (target)
2962
              && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2963
              && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2964
            {
2965
              tree type = TREE_TYPE (exp);
2966
              int unsignedp = TYPE_UNSIGNED (type);
2967
              int offset = 0;
2968
              enum machine_mode pmode;
2969
 
2970
              pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
2971
              /* If we don't promote as expected, something is wrong.  */
2972
              gcc_assert (GET_MODE (target) == pmode);
2973
 
2974
              if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
2975
                  && (GET_MODE_SIZE (GET_MODE (target))
2976
                      > GET_MODE_SIZE (TYPE_MODE (type))))
2977
                {
2978
                  offset = GET_MODE_SIZE (GET_MODE (target))
2979
                    - GET_MODE_SIZE (TYPE_MODE (type));
2980
                  if (! BYTES_BIG_ENDIAN)
2981
                    offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
2982
                  else if (! WORDS_BIG_ENDIAN)
2983
                    offset %= UNITS_PER_WORD;
2984
                }
2985
              target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
2986
              SUBREG_PROMOTED_VAR_P (target) = 1;
2987
              SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
2988
            }
2989
        }
2990
 
2991
      /* If size of args is variable or this was a constructor call for a stack
2992
         argument, restore saved stack-pointer value.  */
2993
 
2994
      if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
2995
        {
2996
          emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2997
          stack_pointer_delta = old_stack_pointer_delta;
2998
          pending_stack_adjust = old_pending_adj;
2999
          old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3000
          stack_arg_under_construction = old_stack_arg_under_construction;
3001
          highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3002
          stack_usage_map = initial_stack_usage_map;
3003
          sibcall_failure = 1;
3004
        }
3005
      else if (ACCUMULATE_OUTGOING_ARGS && pass)
3006
        {
3007
#ifdef REG_PARM_STACK_SPACE
3008
          if (save_area)
3009
            restore_fixed_argument_area (save_area, argblock,
3010
                                         high_to_save, low_to_save);
3011
#endif
3012
 
3013
          /* If we saved any argument areas, restore them.  */
3014
          for (i = 0; i < num_actuals; i++)
3015
            if (args[i].save_area)
3016
              {
3017
                enum machine_mode save_mode = GET_MODE (args[i].save_area);
3018
                rtx stack_area
3019
                  = gen_rtx_MEM (save_mode,
3020
                                 memory_address (save_mode,
3021
                                                 XEXP (args[i].stack_slot, 0)));
3022
 
3023
                if (save_mode != BLKmode)
3024
                  emit_move_insn (stack_area, args[i].save_area);
3025
                else
3026
                  emit_block_move (stack_area, args[i].save_area,
3027
                                   GEN_INT (args[i].locate.size.constant),
3028
                                   BLOCK_OP_CALL_PARM);
3029
              }
3030
 
3031
          highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3032
          stack_usage_map = initial_stack_usage_map;
3033
        }
3034
 
3035
      /* If this was alloca, record the new stack level for nonlocal gotos.
3036
         Check for the handler slots since we might not have a save area
3037
         for non-local gotos.  */
3038
 
3039
      if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3040
        update_nonlocal_goto_save_area ();
3041
 
3042
      /* Free up storage we no longer need.  */
3043
      for (i = 0; i < num_actuals; ++i)
3044
        if (args[i].aligned_regs)
3045
          free (args[i].aligned_regs);
3046
 
3047
      insns = get_insns ();
3048
      end_sequence ();
3049
 
3050
      if (pass == 0)
3051
        {
3052
          tail_call_insns = insns;
3053
 
3054
          /* Restore the pending stack adjustment now that we have
3055
             finished generating the sibling call sequence.  */
3056
 
3057
          pending_stack_adjust = save_pending_stack_adjust;
3058
          stack_pointer_delta = save_stack_pointer_delta;
3059
 
3060
          /* Prepare arg structure for next iteration.  */
3061
          for (i = 0; i < num_actuals; i++)
3062
            {
3063
              args[i].value = 0;
3064
              args[i].aligned_regs = 0;
3065
              args[i].stack = 0;
3066
            }
3067
 
3068
          sbitmap_free (stored_args_map);
3069
        }
3070
      else
3071
        {
3072
          normal_call_insns = insns;
3073
 
3074
          /* Verify that we've deallocated all the stack we used.  */
3075
          gcc_assert ((flags & ECF_NORETURN)
3076
                      || (old_stack_allocated
3077
                          == stack_pointer_delta - pending_stack_adjust));
3078
        }
3079
 
3080
      /* If something prevents making this a sibling call,
3081
         zero out the sequence.  */
3082
      if (sibcall_failure)
3083
        tail_call_insns = NULL_RTX;
3084
      else
3085
        break;
3086
    }
3087
 
3088
  /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3089
     arguments too, as argument area is now clobbered by the call.  */
3090
  if (tail_call_insns)
3091
    {
3092
      emit_insn (tail_call_insns);
3093
      cfun->tail_call_emit = true;
3094
    }
3095
  else
3096
    emit_insn (normal_call_insns);
3097
 
3098
  currently_expanding_call--;
3099
 
3100
  /* If this function returns with the stack pointer depressed, ensure
3101
     this block saves and restores the stack pointer, show it was
3102
     changed, and adjust for any outgoing arg space.  */
3103
  if (flags & ECF_SP_DEPRESSED)
3104
    {
3105
      clear_pending_stack_adjust ();
3106
      emit_insn (gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx));
3107
      emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3108
    }
3109
 
3110
  if (stack_usage_map_buf)
3111
    free (stack_usage_map_buf);
3112
 
3113
  return target;
3114
}
3115
 
3116
/* A sibling call sequence invalidates any REG_EQUIV notes made for
3117
   this function's incoming arguments.
3118
 
3119
   At the start of RTL generation we know the only REG_EQUIV notes
3120
   in the rtl chain are those for incoming arguments, so we can look
3121
   for REG_EQUIV notes between the start of the function and the
3122
   NOTE_INSN_FUNCTION_BEG.
3123
 
3124
   This is (slight) overkill.  We could keep track of the highest
3125
   argument we clobber and be more selective in removing notes, but it
3126
   does not seem to be worth the effort.  */
3127
 
3128
void
3129
fixup_tail_calls (void)
3130
{
3131
  rtx insn;
3132
 
3133
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3134
    {
3135
      /* There are never REG_EQUIV notes for the incoming arguments
3136
         after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it.  */
3137
      if (NOTE_P (insn)
3138
          && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
3139
        break;
3140
 
3141
      while (1)
3142
        {
3143
          rtx note = find_reg_note (insn, REG_EQUIV, 0);
3144
          if (note)
3145
            {
3146
              /* Remove the note and keep looking at the notes for
3147
                 this insn.  */
3148
              remove_note (insn, note);
3149
              continue;
3150
            }
3151
          break;
3152
        }
3153
    }
3154
}
3155
 
3156
/* Traverse an argument list in VALUES and expand all complex
3157
   arguments into their components.  */
3158
static tree
3159
split_complex_values (tree values)
3160
{
3161
  tree p;
3162
 
3163
  /* Before allocating memory, check for the common case of no complex.  */
3164
  for (p = values; p; p = TREE_CHAIN (p))
3165
    {
3166
      tree type = TREE_TYPE (TREE_VALUE (p));
3167
      if (type && TREE_CODE (type) == COMPLEX_TYPE
3168
          && targetm.calls.split_complex_arg (type))
3169
        goto found;
3170
    }
3171
  return values;
3172
 
3173
 found:
3174
  values = copy_list (values);
3175
 
3176
  for (p = values; p; p = TREE_CHAIN (p))
3177
    {
3178
      tree complex_value = TREE_VALUE (p);
3179
      tree complex_type;
3180
 
3181
      complex_type = TREE_TYPE (complex_value);
3182
      if (!complex_type)
3183
        continue;
3184
 
3185
      if (TREE_CODE (complex_type) == COMPLEX_TYPE
3186
          && targetm.calls.split_complex_arg (complex_type))
3187
        {
3188
          tree subtype;
3189
          tree real, imag, next;
3190
 
3191
          subtype = TREE_TYPE (complex_type);
3192
          complex_value = save_expr (complex_value);
3193
          real = build1 (REALPART_EXPR, subtype, complex_value);
3194
          imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3195
 
3196
          TREE_VALUE (p) = real;
3197
          next = TREE_CHAIN (p);
3198
          imag = build_tree_list (NULL_TREE, imag);
3199
          TREE_CHAIN (p) = imag;
3200
          TREE_CHAIN (imag) = next;
3201
 
3202
          /* Skip the newly created node.  */
3203
          p = TREE_CHAIN (p);
3204
        }
3205
    }
3206
 
3207
  return values;
3208
}
3209
 
3210
/* Traverse a list of TYPES and expand all complex types into their
3211
   components.  */
3212
static tree
3213
split_complex_types (tree types)
3214
{
3215
  tree p;
3216
 
3217
  /* Before allocating memory, check for the common case of no complex.  */
3218
  for (p = types; p; p = TREE_CHAIN (p))
3219
    {
3220
      tree type = TREE_VALUE (p);
3221
      if (TREE_CODE (type) == COMPLEX_TYPE
3222
          && targetm.calls.split_complex_arg (type))
3223
        goto found;
3224
    }
3225
  return types;
3226
 
3227
 found:
3228
  types = copy_list (types);
3229
 
3230
  for (p = types; p; p = TREE_CHAIN (p))
3231
    {
3232
      tree complex_type = TREE_VALUE (p);
3233
 
3234
      if (TREE_CODE (complex_type) == COMPLEX_TYPE
3235
          && targetm.calls.split_complex_arg (complex_type))
3236
        {
3237
          tree next, imag;
3238
 
3239
          /* Rewrite complex type with component type.  */
3240
          TREE_VALUE (p) = TREE_TYPE (complex_type);
3241
          next = TREE_CHAIN (p);
3242
 
3243
          /* Add another component type for the imaginary part.  */
3244
          imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3245
          TREE_CHAIN (p) = imag;
3246
          TREE_CHAIN (imag) = next;
3247
 
3248
          /* Skip the newly created node.  */
3249
          p = TREE_CHAIN (p);
3250
        }
3251
    }
3252
 
3253
  return types;
3254
}
3255
 
3256
/* Output a library call to function FUN (a SYMBOL_REF rtx).
3257
   The RETVAL parameter specifies whether return value needs to be saved, other
3258
   parameters are documented in the emit_library_call function below.  */
3259
 
3260
static rtx
3261
emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3262
                           enum libcall_type fn_type,
3263
                           enum machine_mode outmode, int nargs, va_list p)
3264
{
3265
  /* Total size in bytes of all the stack-parms scanned so far.  */
3266
  struct args_size args_size;
3267
  /* Size of arguments before any adjustments (such as rounding).  */
3268
  struct args_size original_args_size;
3269
  int argnum;
3270
  rtx fun;
3271
  int inc;
3272
  int count;
3273
  rtx argblock = 0;
3274
  CUMULATIVE_ARGS args_so_far;
3275
  struct arg
3276
  {
3277
    rtx value;
3278
    enum machine_mode mode;
3279
    rtx reg;
3280
    int partial;
3281
    struct locate_and_pad_arg_data locate;
3282
    rtx save_area;
3283
  };
3284
  struct arg *argvec;
3285
  int old_inhibit_defer_pop = inhibit_defer_pop;
3286
  rtx call_fusage = 0;
3287
  rtx mem_value = 0;
3288
  rtx valreg;
3289
  int pcc_struct_value = 0;
3290
  int struct_value_size = 0;
3291
  int flags;
3292
  int reg_parm_stack_space = 0;
3293
  int needed;
3294
  rtx before_call;
3295
  tree tfom;                    /* type_for_mode (outmode, 0) */
3296
 
3297
#ifdef REG_PARM_STACK_SPACE
3298
  /* Define the boundary of the register parm stack space that needs to be
3299
     save, if any.  */
3300
  int low_to_save, high_to_save;
3301
  rtx save_area = 0;            /* Place that it is saved.  */
3302
#endif
3303
 
3304
  /* Size of the stack reserved for parameter registers.  */
3305
  int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3306
  char *initial_stack_usage_map = stack_usage_map;
3307
  char *stack_usage_map_buf = NULL;
3308
 
3309
  rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3310
 
3311
#ifdef REG_PARM_STACK_SPACE
3312
  reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3313
#endif
3314
 
3315
  /* By default, library functions can not throw.  */
3316
  flags = ECF_NOTHROW;
3317
 
3318
  switch (fn_type)
3319
    {
3320
    case LCT_NORMAL:
3321
      break;
3322
    case LCT_CONST:
3323
      flags |= ECF_CONST;
3324
      break;
3325
    case LCT_PURE:
3326
      flags |= ECF_PURE;
3327
      break;
3328
    case LCT_CONST_MAKE_BLOCK:
3329
      flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3330
      break;
3331
    case LCT_PURE_MAKE_BLOCK:
3332
      flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3333
      break;
3334
    case LCT_NORETURN:
3335
      flags |= ECF_NORETURN;
3336
      break;
3337
    case LCT_THROW:
3338
      flags = ECF_NORETURN;
3339
      break;
3340
    case LCT_RETURNS_TWICE:
3341
      flags = ECF_RETURNS_TWICE;
3342
      break;
3343
    }
3344
  fun = orgfun;
3345
 
3346
  /* Ensure current function's preferred stack boundary is at least
3347
     what we need.  */
3348
  if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3349
    cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3350
 
3351
  /* If this kind of value comes back in memory,
3352
     decide where in memory it should come back.  */
3353
  if (outmode != VOIDmode)
3354
    {
3355
      tfom = lang_hooks.types.type_for_mode (outmode, 0);
3356
      if (aggregate_value_p (tfom, 0))
3357
        {
3358
#ifdef PCC_STATIC_STRUCT_RETURN
3359
          rtx pointer_reg
3360
            = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3361
          mem_value = gen_rtx_MEM (outmode, pointer_reg);
3362
          pcc_struct_value = 1;
3363
          if (value == 0)
3364
            value = gen_reg_rtx (outmode);
3365
#else /* not PCC_STATIC_STRUCT_RETURN */
3366
          struct_value_size = GET_MODE_SIZE (outmode);
3367
          if (value != 0 && MEM_P (value))
3368
            mem_value = value;
3369
          else
3370
            mem_value = assign_temp (tfom, 0, 1, 1);
3371
#endif
3372
          /* This call returns a big structure.  */
3373
          flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3374
        }
3375
    }
3376
  else
3377
    tfom = void_type_node;
3378
 
3379
  /* ??? Unfinished: must pass the memory address as an argument.  */
3380
 
3381
  /* Copy all the libcall-arguments out of the varargs data
3382
     and into a vector ARGVEC.
3383
 
3384
     Compute how to pass each argument.  We only support a very small subset
3385
     of the full argument passing conventions to limit complexity here since
3386
     library functions shouldn't have many args.  */
3387
 
3388
  argvec = alloca ((nargs + 1) * sizeof (struct arg));
3389
  memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3390
 
3391
#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3392
  INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3393
#else
3394
  INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3395
#endif
3396
 
3397
  args_size.constant = 0;
3398
  args_size.var = 0;
3399
 
3400
  count = 0;
3401
 
3402
  /* Now we are about to start emitting insns that can be deleted
3403
     if a libcall is deleted.  */
3404
  if (flags & ECF_LIBCALL_BLOCK)
3405
    start_sequence ();
3406
 
3407
  push_temp_slots ();
3408
 
3409
  /* If there's a structure value address to be passed,
3410
     either pass it in the special place, or pass it as an extra argument.  */
3411
  if (mem_value && struct_value == 0 && ! pcc_struct_value)
3412
    {
3413
      rtx addr = XEXP (mem_value, 0);
3414
 
3415
      nargs++;
3416
 
3417
      /* Make sure it is a reasonable operand for a move or push insn.  */
3418
      if (!REG_P (addr) && !MEM_P (addr)
3419
          && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3420
        addr = force_operand (addr, NULL_RTX);
3421
 
3422
      argvec[count].value = addr;
3423
      argvec[count].mode = Pmode;
3424
      argvec[count].partial = 0;
3425
 
3426
      argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3427
      gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3428
                                                   NULL_TREE, 1) == 0);
3429
 
3430
      locate_and_pad_parm (Pmode, NULL_TREE,
3431
#ifdef STACK_PARMS_IN_REG_PARM_AREA
3432
                           1,
3433
#else
3434
                           argvec[count].reg != 0,
3435
#endif
3436
                           0, NULL_TREE, &args_size, &argvec[count].locate);
3437
 
3438
      if (argvec[count].reg == 0 || argvec[count].partial != 0
3439
          || reg_parm_stack_space > 0)
3440
        args_size.constant += argvec[count].locate.size.constant;
3441
 
3442
      FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3443
 
3444
      count++;
3445
    }
3446
 
3447
  for (; count < nargs; count++)
3448
    {
3449
      rtx val = va_arg (p, rtx);
3450
      enum machine_mode mode = va_arg (p, enum machine_mode);
3451
 
3452
      /* We cannot convert the arg value to the mode the library wants here;
3453
         must do it earlier where we know the signedness of the arg.  */
3454
      gcc_assert (mode != BLKmode
3455
                  && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3456
 
3457
      /* Make sure it is a reasonable operand for a move or push insn.  */
3458
      if (!REG_P (val) && !MEM_P (val)
3459
          && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3460
        val = force_operand (val, NULL_RTX);
3461
 
3462
      if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3463
        {
3464
          rtx slot;
3465
          int must_copy
3466
            = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3467
 
3468
          /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3469
             functions, so we have to pretend this isn't such a function.  */
3470
          if (flags & ECF_LIBCALL_BLOCK)
3471
            {
3472
              rtx insns = get_insns ();
3473
              end_sequence ();
3474
              emit_insn (insns);
3475
            }
3476
          flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3477
 
3478
          /* If this was a CONST function, it is now PURE since
3479
             it now reads memory.  */
3480
          if (flags & ECF_CONST)
3481
            {
3482
              flags &= ~ECF_CONST;
3483
              flags |= ECF_PURE;
3484
            }
3485
 
3486
          if (GET_MODE (val) == MEM && !must_copy)
3487
            slot = val;
3488
          else
3489
            {
3490
              slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3491
                                  0, 1, 1);
3492
              emit_move_insn (slot, val);
3493
            }
3494
 
3495
          call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3496
                                           gen_rtx_USE (VOIDmode, slot),
3497
                                           call_fusage);
3498
          if (must_copy)
3499
            call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3500
                                             gen_rtx_CLOBBER (VOIDmode,
3501
                                                              slot),
3502
                                             call_fusage);
3503
 
3504
          mode = Pmode;
3505
          val = force_operand (XEXP (slot, 0), NULL_RTX);
3506
        }
3507
 
3508
      argvec[count].value = val;
3509
      argvec[count].mode = mode;
3510
 
3511
      argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3512
 
3513
      argvec[count].partial
3514
        = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3515
 
3516
      locate_and_pad_parm (mode, NULL_TREE,
3517
#ifdef STACK_PARMS_IN_REG_PARM_AREA
3518
                           1,
3519
#else
3520
                           argvec[count].reg != 0,
3521
#endif
3522
                           argvec[count].partial,
3523
                           NULL_TREE, &args_size, &argvec[count].locate);
3524
 
3525
      gcc_assert (!argvec[count].locate.size.var);
3526
 
3527
      if (argvec[count].reg == 0 || argvec[count].partial != 0
3528
          || reg_parm_stack_space > 0)
3529
        args_size.constant += argvec[count].locate.size.constant;
3530
 
3531
      FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3532
    }
3533
 
3534
  /* If this machine requires an external definition for library
3535
     functions, write one out.  */
3536
  assemble_external_libcall (fun);
3537
 
3538
  original_args_size = args_size;
3539
  args_size.constant = (((args_size.constant
3540
                          + stack_pointer_delta
3541
                          + STACK_BYTES - 1)
3542
                          / STACK_BYTES
3543
                          * STACK_BYTES)
3544
                         - stack_pointer_delta);
3545
 
3546
  args_size.constant = MAX (args_size.constant,
3547
                            reg_parm_stack_space);
3548
 
3549
#ifndef OUTGOING_REG_PARM_STACK_SPACE
3550
  args_size.constant -= reg_parm_stack_space;
3551
#endif
3552
 
3553
  if (args_size.constant > current_function_outgoing_args_size)
3554
    current_function_outgoing_args_size = args_size.constant;
3555
 
3556
  if (ACCUMULATE_OUTGOING_ARGS)
3557
    {
3558
      /* Since the stack pointer will never be pushed, it is possible for
3559
         the evaluation of a parm to clobber something we have already
3560
         written to the stack.  Since most function calls on RISC machines
3561
         do not use the stack, this is uncommon, but must work correctly.
3562
 
3563
         Therefore, we save any area of the stack that was already written
3564
         and that we are using.  Here we set up to do this by making a new
3565
         stack usage map from the old one.
3566
 
3567
         Another approach might be to try to reorder the argument
3568
         evaluations to avoid this conflicting stack usage.  */
3569
 
3570
      needed = args_size.constant;
3571
 
3572
#ifndef OUTGOING_REG_PARM_STACK_SPACE
3573
      /* Since we will be writing into the entire argument area, the
3574
         map must be allocated for its entire size, not just the part that
3575
         is the responsibility of the caller.  */
3576
      needed += reg_parm_stack_space;
3577
#endif
3578
 
3579
#ifdef ARGS_GROW_DOWNWARD
3580
      highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3581
                                         needed + 1);
3582
#else
3583
      highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3584
                                         needed);
3585
#endif
3586
      stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3587
      stack_usage_map = stack_usage_map_buf;
3588
 
3589
      if (initial_highest_arg_in_use)
3590
        memcpy (stack_usage_map, initial_stack_usage_map,
3591
                initial_highest_arg_in_use);
3592
 
3593
      if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3594
        memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3595
               highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3596
      needed = 0;
3597
 
3598
      /* We must be careful to use virtual regs before they're instantiated,
3599
         and real regs afterwards.  Loop optimization, for example, can create
3600
         new libcalls after we've instantiated the virtual regs, and if we
3601
         use virtuals anyway, they won't match the rtl patterns.  */
3602
 
3603
      if (virtuals_instantiated)
3604
        argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3605
      else
3606
        argblock = virtual_outgoing_args_rtx;
3607
    }
3608
  else
3609
    {
3610
      if (!PUSH_ARGS)
3611
        argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3612
    }
3613
 
3614
  /* If we push args individually in reverse order, perform stack alignment
3615
     before the first push (the last arg).  */
3616
  if (argblock == 0 && PUSH_ARGS_REVERSED)
3617
    anti_adjust_stack (GEN_INT (args_size.constant
3618
                                - original_args_size.constant));
3619
 
3620
  if (PUSH_ARGS_REVERSED)
3621
    {
3622
      inc = -1;
3623
      argnum = nargs - 1;
3624
    }
3625
  else
3626
    {
3627
      inc = 1;
3628
      argnum = 0;
3629
    }
3630
 
3631
#ifdef REG_PARM_STACK_SPACE
3632
  if (ACCUMULATE_OUTGOING_ARGS)
3633
    {
3634
      /* The argument list is the property of the called routine and it
3635
         may clobber it.  If the fixed area has been used for previous
3636
         parameters, we must save and restore it.  */
3637
      save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3638
                                            &low_to_save, &high_to_save);
3639
    }
3640
#endif
3641
 
3642
  /* Push the args that need to be pushed.  */
3643
 
3644
  /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3645
     are to be pushed.  */
3646
  for (count = 0; count < nargs; count++, argnum += inc)
3647
    {
3648
      enum machine_mode mode = argvec[argnum].mode;
3649
      rtx val = argvec[argnum].value;
3650
      rtx reg = argvec[argnum].reg;
3651
      int partial = argvec[argnum].partial;
3652
      int lower_bound = 0, upper_bound = 0, i;
3653
 
3654
      if (! (reg != 0 && partial == 0))
3655
        {
3656
          if (ACCUMULATE_OUTGOING_ARGS)
3657
            {
3658
              /* If this is being stored into a pre-allocated, fixed-size,
3659
                 stack area, save any previous data at that location.  */
3660
 
3661
#ifdef ARGS_GROW_DOWNWARD
3662
              /* stack_slot is negative, but we want to index stack_usage_map
3663
                 with positive values.  */
3664
              upper_bound = -argvec[argnum].locate.offset.constant + 1;
3665
              lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3666
#else
3667
              lower_bound = argvec[argnum].locate.offset.constant;
3668
              upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3669
#endif
3670
 
3671
              i = lower_bound;
3672
              /* Don't worry about things in the fixed argument area;
3673
                 it has already been saved.  */
3674
              if (i < reg_parm_stack_space)
3675
                i = reg_parm_stack_space;
3676
              while (i < upper_bound && stack_usage_map[i] == 0)
3677
                i++;
3678
 
3679
              if (i < upper_bound)
3680
                {
3681
                  /* We need to make a save area.  */
3682
                  unsigned int size
3683
                    = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3684
                  enum machine_mode save_mode
3685
                    = mode_for_size (size, MODE_INT, 1);
3686
                  rtx adr
3687
                    = plus_constant (argblock,
3688
                                     argvec[argnum].locate.offset.constant);
3689
                  rtx stack_area
3690
                    = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3691
 
3692
                  if (save_mode == BLKmode)
3693
                    {
3694
                      argvec[argnum].save_area
3695
                        = assign_stack_temp (BLKmode,
3696
                                             argvec[argnum].locate.size.constant,
3697
                                             0);
3698
 
3699
                      emit_block_move (validize_mem (argvec[argnum].save_area),
3700
                                       stack_area,
3701
                                       GEN_INT (argvec[argnum].locate.size.constant),
3702
                                       BLOCK_OP_CALL_PARM);
3703
                    }
3704
                  else
3705
                    {
3706
                      argvec[argnum].save_area = gen_reg_rtx (save_mode);
3707
 
3708
                      emit_move_insn (argvec[argnum].save_area, stack_area);
3709
                    }
3710
                }
3711
            }
3712
 
3713
          emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3714
                          partial, reg, 0, argblock,
3715
                          GEN_INT (argvec[argnum].locate.offset.constant),
3716
                          reg_parm_stack_space,
3717
                          ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3718
 
3719
          /* Now mark the segment we just used.  */
3720
          if (ACCUMULATE_OUTGOING_ARGS)
3721
            for (i = lower_bound; i < upper_bound; i++)
3722
              stack_usage_map[i] = 1;
3723
 
3724
          NO_DEFER_POP;
3725
 
3726
          if (flags & ECF_CONST)
3727
            {
3728
              rtx use;
3729
 
3730
              /* Indicate argument access so that alias.c knows that these
3731
                 values are live.  */
3732
              if (argblock)
3733
                use = plus_constant (argblock,
3734
                                     argvec[argnum].locate.offset.constant);
3735
              else
3736
                /* When arguments are pushed, trying to tell alias.c where
3737
                   exactly this argument is won't work, because the
3738
                   auto-increment causes confusion.  So we merely indicate
3739
                   that we access something with a known mode somewhere on
3740
                   the stack.  */
3741
                use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3742
                                    gen_rtx_SCRATCH (Pmode));
3743
              use = gen_rtx_MEM (argvec[argnum].mode, use);
3744
              use = gen_rtx_USE (VOIDmode, use);
3745
              call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3746
            }
3747
        }
3748
    }
3749
 
3750
  /* If we pushed args in forward order, perform stack alignment
3751
     after pushing the last arg.  */
3752
  if (argblock == 0 && !PUSH_ARGS_REVERSED)
3753
    anti_adjust_stack (GEN_INT (args_size.constant
3754
                                - original_args_size.constant));
3755
 
3756
  if (PUSH_ARGS_REVERSED)
3757
    argnum = nargs - 1;
3758
  else
3759
    argnum = 0;
3760
 
3761
  fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3762
 
3763
  /* Now load any reg parms into their regs.  */
3764
 
3765
  /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3766
     are to be pushed.  */
3767
  for (count = 0; count < nargs; count++, argnum += inc)
3768
    {
3769
      enum machine_mode mode = argvec[argnum].mode;
3770
      rtx val = argvec[argnum].value;
3771
      rtx reg = argvec[argnum].reg;
3772
      int partial = argvec[argnum].partial;
3773
 
3774
      /* Handle calls that pass values in multiple non-contiguous
3775
         locations.  The PA64 has examples of this for library calls.  */
3776
      if (reg != 0 && GET_CODE (reg) == PARALLEL)
3777
        emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3778
      else if (reg != 0 && partial == 0)
3779
        emit_move_insn (reg, val);
3780
 
3781
      NO_DEFER_POP;
3782
    }
3783
 
3784
  /* Any regs containing parms remain in use through the call.  */
3785
  for (count = 0; count < nargs; count++)
3786
    {
3787
      rtx reg = argvec[count].reg;
3788
      if (reg != 0 && GET_CODE (reg) == PARALLEL)
3789
        use_group_regs (&call_fusage, reg);
3790
      else if (reg != 0)
3791
        use_reg (&call_fusage, reg);
3792
    }
3793
 
3794
  /* Pass the function the address in which to return a structure value.  */
3795
  if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3796
    {
3797
      emit_move_insn (struct_value,
3798
                      force_reg (Pmode,
3799
                                 force_operand (XEXP (mem_value, 0),
3800
                                                NULL_RTX)));
3801
      if (REG_P (struct_value))
3802
        use_reg (&call_fusage, struct_value);
3803
    }
3804
 
3805
  /* Don't allow popping to be deferred, since then
3806
     cse'ing of library calls could delete a call and leave the pop.  */
3807
  NO_DEFER_POP;
3808
  valreg = (mem_value == 0 && outmode != VOIDmode
3809
            ? hard_libcall_value (outmode) : NULL_RTX);
3810
 
3811
  /* Stack must be properly aligned now.  */
3812
  gcc_assert (!(stack_pointer_delta
3813
                & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3814
 
3815
  before_call = get_last_insn ();
3816
 
3817
  /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3818
     will set inhibit_defer_pop to that value.  */
3819
  /* The return type is needed to decide how many bytes the function pops.
3820
     Signedness plays no role in that, so for simplicity, we pretend it's
3821
     always signed.  We also assume that the list of arguments passed has
3822
     no impact, so we pretend it is unknown.  */
3823
 
3824
  emit_call_1 (fun, NULL,
3825
               get_identifier (XSTR (orgfun, 0)),
3826
               build_function_type (tfom, NULL_TREE),
3827
               original_args_size.constant, args_size.constant,
3828
               struct_value_size,
3829
               FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3830
               valreg,
3831
               old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3832
 
3833
  /* For calls to `setjmp', etc., inform flow.c it should complain
3834
     if nonvolatile values are live.  For functions that cannot return,
3835
     inform flow that control does not fall through.  */
3836
 
3837
  if (flags & ECF_NORETURN)
3838
    {
3839
      /* The barrier note must be emitted
3840
         immediately after the CALL_INSN.  Some ports emit more than
3841
         just a CALL_INSN above, so we must search for it here.  */
3842
 
3843
      rtx last = get_last_insn ();
3844
      while (!CALL_P (last))
3845
        {
3846
          last = PREV_INSN (last);
3847
          /* There was no CALL_INSN?  */
3848
          gcc_assert (last != before_call);
3849
        }
3850
 
3851
      emit_barrier_after (last);
3852
    }
3853
 
3854
  /* Now restore inhibit_defer_pop to its actual original value.  */
3855
  OK_DEFER_POP;
3856
 
3857
  /* If call is cse'able, make appropriate pair of reg-notes around it.
3858
     Test valreg so we don't crash; may safely ignore `const'
3859
     if return type is void.  Disable for PARALLEL return values, because
3860
     we have no way to move such values into a pseudo register.  */
3861
  if (flags & ECF_LIBCALL_BLOCK)
3862
    {
3863
      rtx insns;
3864
 
3865
      if (valreg == 0)
3866
        {
3867
          insns = get_insns ();
3868
          end_sequence ();
3869
          emit_insn (insns);
3870
        }
3871
      else
3872
        {
3873
          rtx note = 0;
3874
          rtx temp;
3875
          int i;
3876
 
3877
          if (GET_CODE (valreg) == PARALLEL)
3878
            {
3879
              temp = gen_reg_rtx (outmode);
3880
              emit_group_store (temp, valreg, NULL_TREE,
3881
                                GET_MODE_SIZE (outmode));
3882
              valreg = temp;
3883
            }
3884
 
3885
          temp = gen_reg_rtx (GET_MODE (valreg));
3886
 
3887
          /* Construct an "equal form" for the value which mentions all the
3888
             arguments in order as well as the function name.  */
3889
          for (i = 0; i < nargs; i++)
3890
            note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3891
          note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3892
 
3893
          insns = get_insns ();
3894
          end_sequence ();
3895
 
3896
          if (flags & ECF_PURE)
3897
            note = gen_rtx_EXPR_LIST (VOIDmode,
3898
                        gen_rtx_USE (VOIDmode,
3899
                                     gen_rtx_MEM (BLKmode,
3900
                                                  gen_rtx_SCRATCH (VOIDmode))),
3901
                        note);
3902
 
3903
          emit_libcall_block (insns, temp, valreg, note);
3904
 
3905
          valreg = temp;
3906
        }
3907
    }
3908
  pop_temp_slots ();
3909
 
3910
  /* Copy the value to the right place.  */
3911
  if (outmode != VOIDmode && retval)
3912
    {
3913
      if (mem_value)
3914
        {
3915
          if (value == 0)
3916
            value = mem_value;
3917
          if (value != mem_value)
3918
            emit_move_insn (value, mem_value);
3919
        }
3920
      else if (GET_CODE (valreg) == PARALLEL)
3921
        {
3922
          if (value == 0)
3923
            value = gen_reg_rtx (outmode);
3924
          emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3925
        }
3926
      else if (value != 0)
3927
        emit_move_insn (value, valreg);
3928
      else
3929
        value = valreg;
3930
    }
3931
 
3932
  if (ACCUMULATE_OUTGOING_ARGS)
3933
    {
3934
#ifdef REG_PARM_STACK_SPACE
3935
      if (save_area)
3936
        restore_fixed_argument_area (save_area, argblock,
3937
                                     high_to_save, low_to_save);
3938
#endif
3939
 
3940
      /* If we saved any argument areas, restore them.  */
3941
      for (count = 0; count < nargs; count++)
3942
        if (argvec[count].save_area)
3943
          {
3944
            enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3945
            rtx adr = plus_constant (argblock,
3946
                                     argvec[count].locate.offset.constant);
3947
            rtx stack_area = gen_rtx_MEM (save_mode,
3948
                                          memory_address (save_mode, adr));
3949
 
3950
            if (save_mode == BLKmode)
3951
              emit_block_move (stack_area,
3952
                               validize_mem (argvec[count].save_area),
3953
                               GEN_INT (argvec[count].locate.size.constant),
3954
                               BLOCK_OP_CALL_PARM);
3955
            else
3956
              emit_move_insn (stack_area, argvec[count].save_area);
3957
          }
3958
 
3959
      highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3960
      stack_usage_map = initial_stack_usage_map;
3961
    }
3962
 
3963
  if (stack_usage_map_buf)
3964
    free (stack_usage_map_buf);
3965
 
3966
  return value;
3967
 
3968
}
3969
 
3970
/* Output a library call to function FUN (a SYMBOL_REF rtx)
3971
   (emitting the queue unless NO_QUEUE is nonzero),
3972
   for a value of mode OUTMODE,
3973
   with NARGS different arguments, passed as alternating rtx values
3974
   and machine_modes to convert them to.
3975
 
3976
   FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
3977
   calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
3978
   which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
3979
   LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
3980
   REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
3981
   or other LCT_ value for other types of library calls.  */
3982
 
3983
void
3984
emit_library_call (rtx orgfun, enum libcall_type fn_type,
3985
                   enum machine_mode outmode, int nargs, ...)
3986
{
3987
  va_list p;
3988
 
3989
  va_start (p, nargs);
3990
  emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
3991
  va_end (p);
3992
}
3993
 
3994
/* Like emit_library_call except that an extra argument, VALUE,
3995
   comes second and says where to store the result.
3996
   (If VALUE is zero, this function chooses a convenient way
3997
   to return the value.
3998
 
3999
   This function returns an rtx for where the value is to be found.
4000
   If VALUE is nonzero, VALUE is returned.  */
4001
 
4002
rtx
4003
emit_library_call_value (rtx orgfun, rtx value,
4004
                         enum libcall_type fn_type,
4005
                         enum machine_mode outmode, int nargs, ...)
4006
{
4007
  rtx result;
4008
  va_list p;
4009
 
4010
  va_start (p, nargs);
4011
  result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4012
                                      nargs, p);
4013
  va_end (p);
4014
 
4015
  return result;
4016
}
4017
 
4018
/* Store a single argument for a function call
4019
   into the register or memory area where it must be passed.
4020
   *ARG describes the argument value and where to pass it.
4021
 
4022
   ARGBLOCK is the address of the stack-block for all the arguments,
4023
   or 0 on a machine where arguments are pushed individually.
4024
 
4025
   MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4026
   so must be careful about how the stack is used.
4027
 
4028
   VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4029
   argument stack.  This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4030
   that we need not worry about saving and restoring the stack.
4031
 
4032
   FNDECL is the declaration of the function we are calling.
4033
 
4034
   Return nonzero if this arg should cause sibcall failure,
4035
   zero otherwise.  */
4036
 
4037
static int
4038
store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4039
               int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4040
{
4041
  tree pval = arg->tree_value;
4042
  rtx reg = 0;
4043
  int partial = 0;
4044
  int used = 0;
4045
  int i, lower_bound = 0, upper_bound = 0;
4046
  int sibcall_failure = 0;
4047
 
4048
  if (TREE_CODE (pval) == ERROR_MARK)
4049
    return 1;
4050
 
4051
  /* Push a new temporary level for any temporaries we make for
4052
     this argument.  */
4053
  push_temp_slots ();
4054
 
4055
  if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4056
    {
4057
      /* If this is being stored into a pre-allocated, fixed-size, stack area,
4058
         save any previous data at that location.  */
4059
      if (argblock && ! variable_size && arg->stack)
4060
        {
4061
#ifdef ARGS_GROW_DOWNWARD
4062
          /* stack_slot is negative, but we want to index stack_usage_map
4063
             with positive values.  */
4064
          if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4065
            upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4066
          else
4067
            upper_bound = 0;
4068
 
4069
          lower_bound = upper_bound - arg->locate.size.constant;
4070
#else
4071
          if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4072
            lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4073
          else
4074
            lower_bound = 0;
4075
 
4076
          upper_bound = lower_bound + arg->locate.size.constant;
4077
#endif
4078
 
4079
          i = lower_bound;
4080
          /* Don't worry about things in the fixed argument area;
4081
             it has already been saved.  */
4082
          if (i < reg_parm_stack_space)
4083
            i = reg_parm_stack_space;
4084
          while (i < upper_bound && stack_usage_map[i] == 0)
4085
            i++;
4086
 
4087
          if (i < upper_bound)
4088
            {
4089
              /* We need to make a save area.  */
4090
              unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4091
              enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4092
              rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4093
              rtx stack_area = gen_rtx_MEM (save_mode, adr);
4094
 
4095
              if (save_mode == BLKmode)
4096
                {
4097
                  tree ot = TREE_TYPE (arg->tree_value);
4098
                  tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4099
                                                       | TYPE_QUAL_CONST));
4100
 
4101
                  arg->save_area = assign_temp (nt, 0, 1, 1);
4102
                  preserve_temp_slots (arg->save_area);
4103
                  emit_block_move (validize_mem (arg->save_area), stack_area,
4104
                                   GEN_INT (arg->locate.size.constant),
4105
                                   BLOCK_OP_CALL_PARM);
4106
                }
4107
              else
4108
                {
4109
                  arg->save_area = gen_reg_rtx (save_mode);
4110
                  emit_move_insn (arg->save_area, stack_area);
4111
                }
4112
            }
4113
        }
4114
    }
4115
 
4116
  /* If this isn't going to be placed on both the stack and in registers,
4117
     set up the register and number of words.  */
4118
  if (! arg->pass_on_stack)
4119
    {
4120
      if (flags & ECF_SIBCALL)
4121
        reg = arg->tail_call_reg;
4122
      else
4123
        reg = arg->reg;
4124
      partial = arg->partial;
4125
    }
4126
 
4127
  /* Being passed entirely in a register.  We shouldn't be called in
4128
     this case.  */
4129
  gcc_assert (reg == 0 || partial != 0);
4130
 
4131
  /* If this arg needs special alignment, don't load the registers
4132
     here.  */
4133
  if (arg->n_aligned_regs != 0)
4134
    reg = 0;
4135
 
4136
  /* If this is being passed partially in a register, we can't evaluate
4137
     it directly into its stack slot.  Otherwise, we can.  */
4138
  if (arg->value == 0)
4139
    {
4140
      /* stack_arg_under_construction is nonzero if a function argument is
4141
         being evaluated directly into the outgoing argument list and
4142
         expand_call must take special action to preserve the argument list
4143
         if it is called recursively.
4144
 
4145
         For scalar function arguments stack_usage_map is sufficient to
4146
         determine which stack slots must be saved and restored.  Scalar
4147
         arguments in general have pass_on_stack == 0.
4148
 
4149
         If this argument is initialized by a function which takes the
4150
         address of the argument (a C++ constructor or a C function
4151
         returning a BLKmode structure), then stack_usage_map is
4152
         insufficient and expand_call must push the stack around the
4153
         function call.  Such arguments have pass_on_stack == 1.
4154
 
4155
         Note that it is always safe to set stack_arg_under_construction,
4156
         but this generates suboptimal code if set when not needed.  */
4157
 
4158
      if (arg->pass_on_stack)
4159
        stack_arg_under_construction++;
4160
 
4161
      arg->value = expand_expr (pval,
4162
                                (partial
4163
                                 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4164
                                ? NULL_RTX : arg->stack,
4165
                                VOIDmode, EXPAND_STACK_PARM);
4166
 
4167
      /* If we are promoting object (or for any other reason) the mode
4168
         doesn't agree, convert the mode.  */
4169
 
4170
      if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4171
        arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4172
                                    arg->value, arg->unsignedp);
4173
 
4174
      if (arg->pass_on_stack)
4175
        stack_arg_under_construction--;
4176
    }
4177
 
4178
  /* Check for overlap with already clobbered argument area.  */
4179
  if ((flags & ECF_SIBCALL)
4180
      && MEM_P (arg->value)
4181
      && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4182
                                               arg->locate.size.constant))
4183
    sibcall_failure = 1;
4184
 
4185
  /* Don't allow anything left on stack from computation
4186
     of argument to alloca.  */
4187
  if (flags & ECF_MAY_BE_ALLOCA)
4188
    do_pending_stack_adjust ();
4189
 
4190
  if (arg->value == arg->stack)
4191
    /* If the value is already in the stack slot, we are done.  */
4192
    ;
4193
  else if (arg->mode != BLKmode)
4194
    {
4195
      int size;
4196
 
4197
      /* Argument is a scalar, not entirely passed in registers.
4198
         (If part is passed in registers, arg->partial says how much
4199
         and emit_push_insn will take care of putting it there.)
4200
 
4201
         Push it, and if its size is less than the
4202
         amount of space allocated to it,
4203
         also bump stack pointer by the additional space.
4204
         Note that in C the default argument promotions
4205
         will prevent such mismatches.  */
4206
 
4207
      size = GET_MODE_SIZE (arg->mode);
4208
      /* Compute how much space the push instruction will push.
4209
         On many machines, pushing a byte will advance the stack
4210
         pointer by a halfword.  */
4211
#ifdef PUSH_ROUNDING
4212
      size = PUSH_ROUNDING (size);
4213
#endif
4214
      used = size;
4215
 
4216
      /* Compute how much space the argument should get:
4217
         round up to a multiple of the alignment for arguments.  */
4218
      if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4219
        used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4220
                 / (PARM_BOUNDARY / BITS_PER_UNIT))
4221
                * (PARM_BOUNDARY / BITS_PER_UNIT));
4222
 
4223
      /* This isn't already where we want it on the stack, so put it there.
4224
         This can either be done with push or copy insns.  */
4225
      emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4226
                      PARM_BOUNDARY, partial, reg, used - size, argblock,
4227
                      ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4228
                      ARGS_SIZE_RTX (arg->locate.alignment_pad));
4229
 
4230
      /* Unless this is a partially-in-register argument, the argument is now
4231
         in the stack.  */
4232
      if (partial == 0)
4233
        arg->value = arg->stack;
4234
    }
4235
  else
4236
    {
4237
      /* BLKmode, at least partly to be pushed.  */
4238
 
4239
      unsigned int parm_align;
4240
      int excess;
4241
      rtx size_rtx;
4242
 
4243
      /* Pushing a nonscalar.
4244
         If part is passed in registers, PARTIAL says how much
4245
         and emit_push_insn will take care of putting it there.  */
4246
 
4247
      /* Round its size up to a multiple
4248
         of the allocation unit for arguments.  */
4249
 
4250
      if (arg->locate.size.var != 0)
4251
        {
4252
          excess = 0;
4253
          size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4254
        }
4255
      else
4256
        {
4257
          /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4258
             for BLKmode is careful to avoid it.  */
4259
          excess = (arg->locate.size.constant
4260
                    - int_size_in_bytes (TREE_TYPE (pval))
4261
                    + partial);
4262
          size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4263
                                  NULL_RTX, TYPE_MODE (sizetype), 0);
4264
        }
4265
 
4266
      parm_align = arg->locate.boundary;
4267
 
4268
      /* When an argument is padded down, the block is aligned to
4269
         PARM_BOUNDARY, but the actual argument isn't.  */
4270
      if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4271
        {
4272
          if (arg->locate.size.var)
4273
            parm_align = BITS_PER_UNIT;
4274
          else if (excess)
4275
            {
4276
              unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4277
              parm_align = MIN (parm_align, excess_align);
4278
            }
4279
        }
4280
 
4281
      if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4282
        {
4283
          /* emit_push_insn might not work properly if arg->value and
4284
             argblock + arg->locate.offset areas overlap.  */
4285
          rtx x = arg->value;
4286
          int i = 0;
4287
 
4288
          if (XEXP (x, 0) == current_function_internal_arg_pointer
4289
              || (GET_CODE (XEXP (x, 0)) == PLUS
4290
                  && XEXP (XEXP (x, 0), 0) ==
4291
                     current_function_internal_arg_pointer
4292
                  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4293
            {
4294
              if (XEXP (x, 0) != current_function_internal_arg_pointer)
4295
                i = INTVAL (XEXP (XEXP (x, 0), 1));
4296
 
4297
              /* expand_call should ensure this.  */
4298
              gcc_assert (!arg->locate.offset.var
4299
                          && GET_CODE (size_rtx) == CONST_INT);
4300
 
4301
              if (arg->locate.offset.constant > i)
4302
                {
4303
                  if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4304
                    sibcall_failure = 1;
4305
                }
4306
              else if (arg->locate.offset.constant < i)
4307
                {
4308
                  if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4309
                    sibcall_failure = 1;
4310
                }
4311
            }
4312
        }
4313
 
4314
      emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4315
                      parm_align, partial, reg, excess, argblock,
4316
                      ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4317
                      ARGS_SIZE_RTX (arg->locate.alignment_pad));
4318
 
4319
      /* Unless this is a partially-in-register argument, the argument is now
4320
         in the stack.
4321
 
4322
         ??? Unlike the case above, in which we want the actual
4323
         address of the data, so that we can load it directly into a
4324
         register, here we want the address of the stack slot, so that
4325
         it's properly aligned for word-by-word copying or something
4326
         like that.  It's not clear that this is always correct.  */
4327
      if (partial == 0)
4328
        arg->value = arg->stack_slot;
4329
    }
4330
 
4331
  if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4332
    {
4333
      tree type = TREE_TYPE (arg->tree_value);
4334
      arg->parallel_value
4335
        = emit_group_load_into_temps (arg->reg, arg->value, type,
4336
                                      int_size_in_bytes (type));
4337
    }
4338
 
4339
  /* Mark all slots this store used.  */
4340
  if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4341
      && argblock && ! variable_size && arg->stack)
4342
    for (i = lower_bound; i < upper_bound; i++)
4343
      stack_usage_map[i] = 1;
4344
 
4345
  /* Once we have pushed something, pops can't safely
4346
     be deferred during the rest of the arguments.  */
4347
  NO_DEFER_POP;
4348
 
4349
  /* Free any temporary slots made in processing this argument.  Show
4350
     that we might have taken the address of something and pushed that
4351
     as an operand.  */
4352
  preserve_temp_slots (NULL_RTX);
4353
  free_temp_slots ();
4354
  pop_temp_slots ();
4355
 
4356
  return sibcall_failure;
4357
}
4358
 
4359
/* Nonzero if we do not know how to pass TYPE solely in registers.  */
4360
 
4361
bool
4362
must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4363
                             tree type)
4364
{
4365
  if (!type)
4366
    return false;
4367
 
4368
  /* If the type has variable size...  */
4369
  if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4370
    return true;
4371
 
4372
  /* If the type is marked as addressable (it is required
4373
     to be constructed into the stack)...  */
4374
  if (TREE_ADDRESSABLE (type))
4375
    return true;
4376
 
4377
  return false;
4378
}
4379
 
4380
/* Another version of the TARGET_MUST_PASS_IN_STACK hook.  This one
4381
   takes trailing padding of a structure into account.  */
4382
/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING.  */
4383
 
4384
bool
4385
must_pass_in_stack_var_size_or_pad (enum machine_mode mode, tree type)
4386
{
4387
  if (!type)
4388
    return false;
4389
 
4390
  /* If the type has variable size...  */
4391
  if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4392
    return true;
4393
 
4394
  /* If the type is marked as addressable (it is required
4395
     to be constructed into the stack)...  */
4396
  if (TREE_ADDRESSABLE (type))
4397
    return true;
4398
 
4399
  /* If the padding and mode of the type is such that a copy into
4400
     a register would put it into the wrong part of the register.  */
4401
  if (mode == BLKmode
4402
      && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4403
      && (FUNCTION_ARG_PADDING (mode, type)
4404
          == (BYTES_BIG_ENDIAN ? upward : downward)))
4405
    return true;
4406
 
4407
  return false;
4408
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.