OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [calls.c] - Blame information for rev 20

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Convert function calls to rtl insns, for GNU C compiler.
2
   Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 2, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING.  If not, write to the Free
20
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21
02110-1301, USA.  */
22
 
23
#include "config.h"
24
#include "system.h"
25
#include "coretypes.h"
26
#include "tm.h"
27
#include "rtl.h"
28
#include "tree.h"
29
#include "flags.h"
30
#include "expr.h"
31
#include "optabs.h"
32
#include "libfuncs.h"
33
#include "function.h"
34
#include "regs.h"
35
#include "toplev.h"
36
#include "output.h"
37
#include "tm_p.h"
38
#include "timevar.h"
39
#include "sbitmap.h"
40
#include "langhooks.h"
41
#include "target.h"
42
#include "cgraph.h"
43
#include "except.h"
44
 
45
/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits.  */
46
#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
47
 
48
/* Data structure and subroutines used within expand_call.  */
49
 
50
struct arg_data
51
{
52
  /* Tree node for this argument.  */
53
  tree tree_value;
54
  /* Mode for value; TYPE_MODE unless promoted.  */
55
  enum machine_mode mode;
56
  /* Current RTL value for argument, or 0 if it isn't precomputed.  */
57
  rtx value;
58
  /* Initially-compute RTL value for argument; only for const functions.  */
59
  rtx initial_value;
60
  /* Register to pass this argument in, 0 if passed on stack, or an
61
     PARALLEL if the arg is to be copied into multiple non-contiguous
62
     registers.  */
63
  rtx reg;
64
  /* Register to pass this argument in when generating tail call sequence.
65
     This is not the same register as for normal calls on machines with
66
     register windows.  */
67
  rtx tail_call_reg;
68
  /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
69
     form for emit_group_move.  */
70
  rtx parallel_value;
71
  /* If REG was promoted from the actual mode of the argument expression,
72
     indicates whether the promotion is sign- or zero-extended.  */
73
  int unsignedp;
74
  /* Number of bytes to put in registers.  0 means put the whole arg
75
     in registers.  Also 0 if not passed in registers.  */
76
  int partial;
77
  /* Nonzero if argument must be passed on stack.
78
     Note that some arguments may be passed on the stack
79
     even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80
     pass_on_stack identifies arguments that *cannot* go in registers.  */
81
  int pass_on_stack;
82
  /* Some fields packaged up for locate_and_pad_parm.  */
83
  struct locate_and_pad_arg_data locate;
84
  /* Location on the stack at which parameter should be stored.  The store
85
     has already been done if STACK == VALUE.  */
86
  rtx stack;
87
  /* Location on the stack of the start of this argument slot.  This can
88
     differ from STACK if this arg pads downward.  This location is known
89
     to be aligned to FUNCTION_ARG_BOUNDARY.  */
90
  rtx stack_slot;
91
  /* Place that this stack area has been saved, if needed.  */
92
  rtx save_area;
93
  /* If an argument's alignment does not permit direct copying into registers,
94
     copy in smaller-sized pieces into pseudos.  These are stored in a
95
     block pointed to by this field.  The next field says how many
96
     word-sized pseudos we made.  */
97
  rtx *aligned_regs;
98
  int n_aligned_regs;
99
};
100
 
101
/* A vector of one char per byte of stack space.  A byte if nonzero if
102
   the corresponding stack location has been used.
103
   This vector is used to prevent a function call within an argument from
104
   clobbering any stack already set up.  */
105
static char *stack_usage_map;
106
 
107
/* Size of STACK_USAGE_MAP.  */
108
static int highest_outgoing_arg_in_use;
109
 
110
/* A bitmap of virtual-incoming stack space.  Bit is set if the corresponding
111
   stack location's tail call argument has been already stored into the stack.
112
   This bitmap is used to prevent sibling call optimization if function tries
113
   to use parent's incoming argument slots when they have been already
114
   overwritten with tail call arguments.  */
115
static sbitmap stored_args_map;
116
 
117
/* stack_arg_under_construction is nonzero when an argument may be
118
   initialized with a constructor call (including a C function that
119
   returns a BLKmode struct) and expand_call must take special action
120
   to make sure the object being constructed does not overlap the
121
   argument list for the constructor call.  */
122
static int stack_arg_under_construction;
123
 
124
static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
125
                         HOST_WIDE_INT, rtx, rtx, int, rtx, int,
126
                         CUMULATIVE_ARGS *);
127
static void precompute_register_parameters (int, struct arg_data *, int *);
128
static int store_one_arg (struct arg_data *, rtx, int, int, int);
129
static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
130
static int finalize_must_preallocate (int, int, struct arg_data *,
131
                                      struct args_size *);
132
static void precompute_arguments (int, int, struct arg_data *);
133
static int compute_argument_block_size (int, struct args_size *, int);
134
static void initialize_argument_information (int, struct arg_data *,
135
                                             struct args_size *, int, tree,
136
                                             tree, CUMULATIVE_ARGS *, int,
137
                                             rtx *, int *, int *, int *,
138
                                             bool *, bool);
139
static void compute_argument_addresses (struct arg_data *, rtx, int);
140
static rtx rtx_for_function_call (tree, tree);
141
static void load_register_parameters (struct arg_data *, int, rtx *, int,
142
                                      int, int *);
143
static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
144
                                      enum machine_mode, int, va_list);
145
static int special_function_p (tree, int);
146
static int check_sibcall_argument_overlap_1 (rtx);
147
static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
148
 
149
static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
150
                                                      unsigned int);
151
static tree split_complex_values (tree);
152
static tree split_complex_types (tree);
153
 
154
#ifdef REG_PARM_STACK_SPACE
155
static rtx save_fixed_argument_area (int, rtx, int *, int *);
156
static void restore_fixed_argument_area (rtx, rtx, int, int);
157
#endif
158
 
159
/* Force FUNEXP into a form suitable for the address of a CALL,
160
   and return that as an rtx.  Also load the static chain register
161
   if FNDECL is a nested function.
162
 
163
   CALL_FUSAGE points to a variable holding the prospective
164
   CALL_INSN_FUNCTION_USAGE information.  */
165
 
166
rtx
167
prepare_call_address (rtx funexp, rtx static_chain_value,
168
                      rtx *call_fusage, int reg_parm_seen, int sibcallp)
169
{
170
  /* Make a valid memory address and copy constants through pseudo-regs,
171
     but not for a constant address if -fno-function-cse.  */
172
  if (GET_CODE (funexp) != SYMBOL_REF)
173
    /* If we are using registers for parameters, force the
174
       function address into a register now.  */
175
    funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
176
              ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
177
              : memory_address (FUNCTION_MODE, funexp));
178
  else if (! sibcallp)
179
    {
180
#ifndef NO_FUNCTION_CSE
181
      if (optimize && ! flag_no_function_cse)
182
        funexp = force_reg (Pmode, funexp);
183
#endif
184
    }
185
 
186
  if (static_chain_value != 0)
187
    {
188
      static_chain_value = convert_memory_address (Pmode, static_chain_value);
189
      emit_move_insn (static_chain_rtx, static_chain_value);
190
 
191
      if (REG_P (static_chain_rtx))
192
        use_reg (call_fusage, static_chain_rtx);
193
    }
194
 
195
  return funexp;
196
}
197
 
198
/* Generate instructions to call function FUNEXP,
199
   and optionally pop the results.
200
   The CALL_INSN is the first insn generated.
201
 
202
   FNDECL is the declaration node of the function.  This is given to the
203
   macro RETURN_POPS_ARGS to determine whether this function pops its own args.
204
 
205
   FUNTYPE is the data type of the function.  This is given to the macro
206
   RETURN_POPS_ARGS to determine whether this function pops its own args.
207
   We used to allow an identifier for library functions, but that doesn't
208
   work when the return type is an aggregate type and the calling convention
209
   says that the pointer to this aggregate is to be popped by the callee.
210
 
211
   STACK_SIZE is the number of bytes of arguments on the stack,
212
   ROUNDED_STACK_SIZE is that number rounded up to
213
   PREFERRED_STACK_BOUNDARY; zero if the size is variable.  This is
214
   both to put into the call insn and to generate explicit popping
215
   code if necessary.
216
 
217
   STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
218
   It is zero if this call doesn't want a structure value.
219
 
220
   NEXT_ARG_REG is the rtx that results from executing
221
     FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
222
   just after all the args have had their registers assigned.
223
   This could be whatever you like, but normally it is the first
224
   arg-register beyond those used for args in this call,
225
   or 0 if all the arg-registers are used in this call.
226
   It is passed on to `gen_call' so you can put this info in the call insn.
227
 
228
   VALREG is a hard register in which a value is returned,
229
   or 0 if the call does not return a value.
230
 
231
   OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
232
   the args to this call were processed.
233
   We restore `inhibit_defer_pop' to that value.
234
 
235
   CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
236
   denote registers used by the called function.  */
237
 
238
static void
239
emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
240
             tree funtype ATTRIBUTE_UNUSED,
241
             HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
242
             HOST_WIDE_INT rounded_stack_size,
243
             HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
244
             rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
245
             int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
246
             CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
247
{
248
  rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
249
  rtx call_insn;
250
  int already_popped = 0;
251
  HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
252
#if defined (HAVE_call) && defined (HAVE_call_value)
253
  rtx struct_value_size_rtx;
254
  struct_value_size_rtx = GEN_INT (struct_value_size);
255
#endif
256
 
257
#ifdef CALL_POPS_ARGS
258
  n_popped += CALL_POPS_ARGS (* args_so_far);
259
#endif
260
 
261
  /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
262
     and we don't want to load it into a register as an optimization,
263
     because prepare_call_address already did it if it should be done.  */
264
  if (GET_CODE (funexp) != SYMBOL_REF)
265
    funexp = memory_address (FUNCTION_MODE, funexp);
266
 
267
#if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
268
  if ((ecf_flags & ECF_SIBCALL)
269
      && HAVE_sibcall_pop && HAVE_sibcall_value_pop
270
      && (n_popped > 0 || stack_size == 0))
271
    {
272
      rtx n_pop = GEN_INT (n_popped);
273
      rtx pat;
274
 
275
      /* If this subroutine pops its own args, record that in the call insn
276
         if possible, for the sake of frame pointer elimination.  */
277
 
278
      if (valreg)
279
        pat = GEN_SIBCALL_VALUE_POP (valreg,
280
                                     gen_rtx_MEM (FUNCTION_MODE, funexp),
281
                                     rounded_stack_size_rtx, next_arg_reg,
282
                                     n_pop);
283
      else
284
        pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
285
                               rounded_stack_size_rtx, next_arg_reg, n_pop);
286
 
287
      emit_call_insn (pat);
288
      already_popped = 1;
289
    }
290
  else
291
#endif
292
 
293
#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
294
  /* If the target has "call" or "call_value" insns, then prefer them
295
     if no arguments are actually popped.  If the target does not have
296
     "call" or "call_value" insns, then we must use the popping versions
297
     even if the call has no arguments to pop.  */
298
#if defined (HAVE_call) && defined (HAVE_call_value)
299
  if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
300
      && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
301
#else
302
  if (HAVE_call_pop && HAVE_call_value_pop)
303
#endif
304
    {
305
      rtx n_pop = GEN_INT (n_popped);
306
      rtx pat;
307
 
308
      /* If this subroutine pops its own args, record that in the call insn
309
         if possible, for the sake of frame pointer elimination.  */
310
 
311
      if (valreg)
312
        pat = GEN_CALL_VALUE_POP (valreg,
313
                                  gen_rtx_MEM (FUNCTION_MODE, funexp),
314
                                  rounded_stack_size_rtx, next_arg_reg, n_pop);
315
      else
316
        pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
317
                            rounded_stack_size_rtx, next_arg_reg, n_pop);
318
 
319
      emit_call_insn (pat);
320
      already_popped = 1;
321
    }
322
  else
323
#endif
324
 
325
#if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
326
  if ((ecf_flags & ECF_SIBCALL)
327
      && HAVE_sibcall && HAVE_sibcall_value)
328
    {
329
      if (valreg)
330
        emit_call_insn (GEN_SIBCALL_VALUE (valreg,
331
                                           gen_rtx_MEM (FUNCTION_MODE, funexp),
332
                                           rounded_stack_size_rtx,
333
                                           next_arg_reg, NULL_RTX));
334
      else
335
        emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
336
                                     rounded_stack_size_rtx, next_arg_reg,
337
                                     struct_value_size_rtx));
338
    }
339
  else
340
#endif
341
 
342
#if defined (HAVE_call) && defined (HAVE_call_value)
343
  if (HAVE_call && HAVE_call_value)
344
    {
345
      if (valreg)
346
        emit_call_insn (GEN_CALL_VALUE (valreg,
347
                                        gen_rtx_MEM (FUNCTION_MODE, funexp),
348
                                        rounded_stack_size_rtx, next_arg_reg,
349
                                        NULL_RTX));
350
      else
351
        emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
352
                                  rounded_stack_size_rtx, next_arg_reg,
353
                                  struct_value_size_rtx));
354
    }
355
  else
356
#endif
357
    gcc_unreachable ();
358
 
359
  /* Find the call we just emitted.  */
360
  call_insn = last_call_insn ();
361
 
362
  /* Mark memory as used for "pure" function call.  */
363
  if (ecf_flags & ECF_PURE)
364
    call_fusage
365
      = gen_rtx_EXPR_LIST
366
        (VOIDmode,
367
         gen_rtx_USE (VOIDmode,
368
                      gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
369
         call_fusage);
370
 
371
  /* Put the register usage information there.  */
372
  add_function_usage_to (call_insn, call_fusage);
373
 
374
  /* If this is a const call, then set the insn's unchanging bit.  */
375
  if (ecf_flags & (ECF_CONST | ECF_PURE))
376
    CONST_OR_PURE_CALL_P (call_insn) = 1;
377
 
378
  /* If this call can't throw, attach a REG_EH_REGION reg note to that
379
     effect.  */
380
  if (ecf_flags & ECF_NOTHROW)
381
    REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
382
                                               REG_NOTES (call_insn));
383
  else
384
    {
385
      int rn = lookup_stmt_eh_region (fntree);
386
 
387
      /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
388
         throw, which we already took care of.  */
389
      if (rn > 0)
390
        REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
391
                                                   REG_NOTES (call_insn));
392
      note_current_region_may_contain_throw ();
393
    }
394
 
395
  if (ecf_flags & ECF_NORETURN)
396
    REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
397
                                               REG_NOTES (call_insn));
398
 
399
  if (ecf_flags & ECF_RETURNS_TWICE)
400
    {
401
      REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
402
                                                 REG_NOTES (call_insn));
403
      current_function_calls_setjmp = 1;
404
    }
405
 
406
  SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
407
 
408
  /* Restore this now, so that we do defer pops for this call's args
409
     if the context of the call as a whole permits.  */
410
  inhibit_defer_pop = old_inhibit_defer_pop;
411
 
412
  if (n_popped > 0)
413
    {
414
      if (!already_popped)
415
        CALL_INSN_FUNCTION_USAGE (call_insn)
416
          = gen_rtx_EXPR_LIST (VOIDmode,
417
                               gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
418
                               CALL_INSN_FUNCTION_USAGE (call_insn));
419
      rounded_stack_size -= n_popped;
420
      rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
421
      stack_pointer_delta -= n_popped;
422
    }
423
 
424
  if (!ACCUMULATE_OUTGOING_ARGS)
425
    {
426
      /* If returning from the subroutine does not automatically pop the args,
427
         we need an instruction to pop them sooner or later.
428
         Perhaps do it now; perhaps just record how much space to pop later.
429
 
430
         If returning from the subroutine does pop the args, indicate that the
431
         stack pointer will be changed.  */
432
 
433
      if (rounded_stack_size != 0)
434
        {
435
          if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN))
436
            /* Just pretend we did the pop.  */
437
            stack_pointer_delta -= rounded_stack_size;
438
          else if (flag_defer_pop && inhibit_defer_pop == 0
439
              && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
440
            pending_stack_adjust += rounded_stack_size;
441
          else
442
            adjust_stack (rounded_stack_size_rtx);
443
        }
444
    }
445
  /* When we accumulate outgoing args, we must avoid any stack manipulations.
446
     Restore the stack pointer to its original value now.  Usually
447
     ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
448
     On  i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
449
     popping variants of functions exist as well.
450
 
451
     ??? We may optimize similar to defer_pop above, but it is
452
     probably not worthwhile.
453
 
454
     ??? It will be worthwhile to enable combine_stack_adjustments even for
455
     such machines.  */
456
  else if (n_popped)
457
    anti_adjust_stack (GEN_INT (n_popped));
458
}
459
 
460
/* Determine if the function identified by NAME and FNDECL is one with
461
   special properties we wish to know about.
462
 
463
   For example, if the function might return more than one time (setjmp), then
464
   set RETURNS_TWICE to a nonzero value.
465
 
466
   Similarly set NORETURN if the function is in the longjmp family.
467
 
468
   Set MAY_BE_ALLOCA for any memory allocation function that might allocate
469
   space from the stack such as alloca.  */
470
 
471
static int
472
special_function_p (tree fndecl, int flags)
473
{
474
  if (fndecl && DECL_NAME (fndecl)
475
      && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
476
      /* Exclude functions not at the file scope, or not `extern',
477
         since they are not the magic functions we would otherwise
478
         think they are.
479
         FIXME: this should be handled with attributes, not with this
480
         hacky imitation of DECL_ASSEMBLER_NAME.  It's (also) wrong
481
         because you can declare fork() inside a function if you
482
         wish.  */
483
      && (DECL_CONTEXT (fndecl) == NULL_TREE
484
          || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
485
      && TREE_PUBLIC (fndecl))
486
    {
487
      const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
488
      const char *tname = name;
489
 
490
      /* We assume that alloca will always be called by name.  It
491
         makes no sense to pass it as a pointer-to-function to
492
         anything that does not understand its behavior.  */
493
      if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
494
            && name[0] == 'a'
495
            && ! strcmp (name, "alloca"))
496
           || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
497
               && name[0] == '_'
498
               && ! strcmp (name, "__builtin_alloca"))))
499
        flags |= ECF_MAY_BE_ALLOCA;
500
 
501
      /* Disregard prefix _, __ or __x.  */
502
      if (name[0] == '_')
503
        {
504
          if (name[1] == '_' && name[2] == 'x')
505
            tname += 3;
506
          else if (name[1] == '_')
507
            tname += 2;
508
          else
509
            tname += 1;
510
        }
511
 
512
      if (tname[0] == 's')
513
        {
514
          if ((tname[1] == 'e'
515
               && (! strcmp (tname, "setjmp")
516
                   || ! strcmp (tname, "setjmp_syscall")))
517
              || (tname[1] == 'i'
518
                  && ! strcmp (tname, "sigsetjmp"))
519
              || (tname[1] == 'a'
520
                  && ! strcmp (tname, "savectx")))
521
            flags |= ECF_RETURNS_TWICE;
522
 
523
          if (tname[1] == 'i'
524
              && ! strcmp (tname, "siglongjmp"))
525
            flags |= ECF_NORETURN;
526
        }
527
      else if ((tname[0] == 'q' && tname[1] == 's'
528
                && ! strcmp (tname, "qsetjmp"))
529
               || (tname[0] == 'v' && tname[1] == 'f'
530
                   && ! strcmp (tname, "vfork"))
531
               || (tname[0] == 'g' && tname[1] == 'e'
532
                   && !strcmp (tname, "getcontext")))
533
        flags |= ECF_RETURNS_TWICE;
534
 
535
      else if (tname[0] == 'l' && tname[1] == 'o'
536
               && ! strcmp (tname, "longjmp"))
537
        flags |= ECF_NORETURN;
538
    }
539
 
540
  return flags;
541
}
542
 
543
/* Return nonzero when FNDECL represents a call to setjmp.  */
544
 
545
int
546
setjmp_call_p (tree fndecl)
547
{
548
  return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
549
}
550
 
551
/* Return true when exp contains alloca call.  */
552
bool
553
alloca_call_p (tree exp)
554
{
555
  if (TREE_CODE (exp) == CALL_EXPR
556
      && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
557
      && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
558
          == FUNCTION_DECL)
559
      && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
560
                              0) & ECF_MAY_BE_ALLOCA))
561
    return true;
562
  return false;
563
}
564
 
565
/* Detect flags (function attributes) from the function decl or type node.  */
566
 
567
int
568
flags_from_decl_or_type (tree exp)
569
{
570
  int flags = 0;
571
  tree type = exp;
572
 
573
  if (DECL_P (exp))
574
    {
575
      type = TREE_TYPE (exp);
576
 
577
      /* The function exp may have the `malloc' attribute.  */
578
      if (DECL_IS_MALLOC (exp))
579
        flags |= ECF_MALLOC;
580
 
581
      /* The function exp may have the `returns_twice' attribute.  */
582
      if (DECL_IS_RETURNS_TWICE (exp))
583
        flags |= ECF_RETURNS_TWICE;
584
 
585
      /* The function exp may have the `pure' attribute.  */
586
      if (DECL_IS_PURE (exp))
587
        flags |= ECF_PURE;
588
 
589
      if (DECL_IS_NOVOPS (exp))
590
        flags |= ECF_NOVOPS;
591
 
592
      if (TREE_NOTHROW (exp))
593
        flags |= ECF_NOTHROW;
594
 
595
      if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
596
        flags |= ECF_CONST;
597
 
598
      flags = special_function_p (exp, flags);
599
    }
600
  else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
601
    flags |= ECF_CONST;
602
 
603
  if (TREE_THIS_VOLATILE (exp))
604
    flags |= ECF_NORETURN;
605
 
606
  /* Mark if the function returns with the stack pointer depressed.   We
607
     cannot consider it pure or constant in that case.  */
608
  if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
609
    {
610
      flags |= ECF_SP_DEPRESSED;
611
      flags &= ~(ECF_PURE | ECF_CONST);
612
    }
613
 
614
  return flags;
615
}
616
 
617
/* Detect flags from a CALL_EXPR.  */
618
 
619
int
620
call_expr_flags (tree t)
621
{
622
  int flags;
623
  tree decl = get_callee_fndecl (t);
624
 
625
  if (decl)
626
    flags = flags_from_decl_or_type (decl);
627
  else
628
    {
629
      t = TREE_TYPE (TREE_OPERAND (t, 0));
630
      if (t && TREE_CODE (t) == POINTER_TYPE)
631
        flags = flags_from_decl_or_type (TREE_TYPE (t));
632
      else
633
        flags = 0;
634
    }
635
 
636
  return flags;
637
}
638
 
639
/* Precompute all register parameters as described by ARGS, storing values
640
   into fields within the ARGS array.
641
 
642
   NUM_ACTUALS indicates the total number elements in the ARGS array.
643
 
644
   Set REG_PARM_SEEN if we encounter a register parameter.  */
645
 
646
static void
647
precompute_register_parameters (int num_actuals, struct arg_data *args,
648
                                int *reg_parm_seen)
649
{
650
  int i;
651
 
652
  *reg_parm_seen = 0;
653
 
654
  for (i = 0; i < num_actuals; i++)
655
    if (args[i].reg != 0 && ! args[i].pass_on_stack)
656
      {
657
        *reg_parm_seen = 1;
658
 
659
        if (args[i].value == 0)
660
          {
661
            push_temp_slots ();
662
            args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
663
                                         VOIDmode, 0);
664
            preserve_temp_slots (args[i].value);
665
            pop_temp_slots ();
666
          }
667
 
668
        /* If the value is a non-legitimate constant, force it into a
669
           pseudo now.  TLS symbols sometimes need a call to resolve.  */
670
        if (CONSTANT_P (args[i].value)
671
            && !LEGITIMATE_CONSTANT_P (args[i].value))
672
          args[i].value = force_reg (args[i].mode, args[i].value);
673
 
674
        /* If we are to promote the function arg to a wider mode,
675
           do it now.  */
676
 
677
        if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
678
          args[i].value
679
            = convert_modes (args[i].mode,
680
                             TYPE_MODE (TREE_TYPE (args[i].tree_value)),
681
                             args[i].value, args[i].unsignedp);
682
 
683
        /* If we're going to have to load the value by parts, pull the
684
           parts into pseudos.  The part extraction process can involve
685
           non-trivial computation.  */
686
        if (GET_CODE (args[i].reg) == PARALLEL)
687
          {
688
            tree type = TREE_TYPE (args[i].tree_value);
689
            args[i].parallel_value
690
              = emit_group_load_into_temps (args[i].reg, args[i].value,
691
                                            type, int_size_in_bytes (type));
692
          }
693
 
694
        /* If the value is expensive, and we are inside an appropriately
695
           short loop, put the value into a pseudo and then put the pseudo
696
           into the hard reg.
697
 
698
           For small register classes, also do this if this call uses
699
           register parameters.  This is to avoid reload conflicts while
700
           loading the parameters registers.  */
701
 
702
        else if ((! (REG_P (args[i].value)
703
                     || (GET_CODE (args[i].value) == SUBREG
704
                         && REG_P (SUBREG_REG (args[i].value)))))
705
                 && args[i].mode != BLKmode
706
                 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
707
                 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
708
                     || optimize))
709
          args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
710
      }
711
}
712
 
713
#ifdef REG_PARM_STACK_SPACE
714
 
715
  /* The argument list is the property of the called routine and it
716
     may clobber it.  If the fixed area has been used for previous
717
     parameters, we must save and restore it.  */
718
 
719
static rtx
720
save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
721
{
722
  int low;
723
  int high;
724
 
725
  /* Compute the boundary of the area that needs to be saved, if any.  */
726
  high = reg_parm_stack_space;
727
#ifdef ARGS_GROW_DOWNWARD
728
  high += 1;
729
#endif
730
  if (high > highest_outgoing_arg_in_use)
731
    high = highest_outgoing_arg_in_use;
732
 
733
  for (low = 0; low < high; low++)
734
    if (stack_usage_map[low] != 0)
735
      {
736
        int num_to_save;
737
        enum machine_mode save_mode;
738
        int delta;
739
        rtx stack_area;
740
        rtx save_area;
741
 
742
        while (stack_usage_map[--high] == 0)
743
          ;
744
 
745
        *low_to_save = low;
746
        *high_to_save = high;
747
 
748
        num_to_save = high - low + 1;
749
        save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
750
 
751
        /* If we don't have the required alignment, must do this
752
           in BLKmode.  */
753
        if ((low & (MIN (GET_MODE_SIZE (save_mode),
754
                         BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
755
          save_mode = BLKmode;
756
 
757
#ifdef ARGS_GROW_DOWNWARD
758
        delta = -high;
759
#else
760
        delta = low;
761
#endif
762
        stack_area = gen_rtx_MEM (save_mode,
763
                                  memory_address (save_mode,
764
                                                  plus_constant (argblock,
765
                                                                 delta)));
766
 
767
        set_mem_align (stack_area, PARM_BOUNDARY);
768
        if (save_mode == BLKmode)
769
          {
770
            save_area = assign_stack_temp (BLKmode, num_to_save, 0);
771
            emit_block_move (validize_mem (save_area), stack_area,
772
                             GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
773
          }
774
        else
775
          {
776
            save_area = gen_reg_rtx (save_mode);
777
            emit_move_insn (save_area, stack_area);
778
          }
779
 
780
        return save_area;
781
      }
782
 
783
  return NULL_RTX;
784
}
785
 
786
static void
787
restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
788
{
789
  enum machine_mode save_mode = GET_MODE (save_area);
790
  int delta;
791
  rtx stack_area;
792
 
793
#ifdef ARGS_GROW_DOWNWARD
794
  delta = -high_to_save;
795
#else
796
  delta = low_to_save;
797
#endif
798
  stack_area = gen_rtx_MEM (save_mode,
799
                            memory_address (save_mode,
800
                                            plus_constant (argblock, delta)));
801
  set_mem_align (stack_area, PARM_BOUNDARY);
802
 
803
  if (save_mode != BLKmode)
804
    emit_move_insn (stack_area, save_area);
805
  else
806
    emit_block_move (stack_area, validize_mem (save_area),
807
                     GEN_INT (high_to_save - low_to_save + 1),
808
                     BLOCK_OP_CALL_PARM);
809
}
810
#endif /* REG_PARM_STACK_SPACE */
811
 
812
/* If any elements in ARGS refer to parameters that are to be passed in
813
   registers, but not in memory, and whose alignment does not permit a
814
   direct copy into registers.  Copy the values into a group of pseudos
815
   which we will later copy into the appropriate hard registers.
816
 
817
   Pseudos for each unaligned argument will be stored into the array
818
   args[argnum].aligned_regs.  The caller is responsible for deallocating
819
   the aligned_regs array if it is nonzero.  */
820
 
821
static void
822
store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
823
{
824
  int i, j;
825
 
826
  for (i = 0; i < num_actuals; i++)
827
    if (args[i].reg != 0 && ! args[i].pass_on_stack
828
        && args[i].mode == BLKmode
829
        && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
830
            < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
831
      {
832
        int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
833
        int endian_correction = 0;
834
 
835
        if (args[i].partial)
836
          {
837
            gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
838
            args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
839
          }
840
        else
841
          {
842
            args[i].n_aligned_regs
843
              = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
844
          }
845
 
846
        args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
847
 
848
        /* Structures smaller than a word are normally aligned to the
849
           least significant byte.  On a BYTES_BIG_ENDIAN machine,
850
           this means we must skip the empty high order bytes when
851
           calculating the bit offset.  */
852
        if (bytes < UNITS_PER_WORD
853
#ifdef BLOCK_REG_PADDING
854
            && (BLOCK_REG_PADDING (args[i].mode,
855
                                   TREE_TYPE (args[i].tree_value), 1)
856
                == downward)
857
#else
858
            && BYTES_BIG_ENDIAN
859
#endif
860
            )
861
          endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
862
 
863
        for (j = 0; j < args[i].n_aligned_regs; j++)
864
          {
865
            rtx reg = gen_reg_rtx (word_mode);
866
            rtx word = operand_subword_force (args[i].value, j, BLKmode);
867
            int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
868
 
869
            args[i].aligned_regs[j] = reg;
870
            word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
871
                                      word_mode, word_mode);
872
 
873
            /* There is no need to restrict this code to loading items
874
               in TYPE_ALIGN sized hunks.  The bitfield instructions can
875
               load up entire word sized registers efficiently.
876
 
877
               ??? This may not be needed anymore.
878
               We use to emit a clobber here but that doesn't let later
879
               passes optimize the instructions we emit.  By storing 0 into
880
               the register later passes know the first AND to zero out the
881
               bitfield being set in the register is unnecessary.  The store
882
               of 0 will be deleted as will at least the first AND.  */
883
 
884
            emit_move_insn (reg, const0_rtx);
885
 
886
            bytes -= bitsize / BITS_PER_UNIT;
887
            store_bit_field (reg, bitsize, endian_correction, word_mode,
888
                             word);
889
          }
890
      }
891
}
892
 
893
/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
894
   ACTPARMS.
895
 
896
   NUM_ACTUALS is the total number of parameters.
897
 
898
   N_NAMED_ARGS is the total number of named arguments.
899
 
900
   FNDECL is the tree code for the target of this call (if known)
901
 
902
   ARGS_SO_FAR holds state needed by the target to know where to place
903
   the next argument.
904
 
905
   REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
906
   for arguments which are passed in registers.
907
 
908
   OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
909
   and may be modified by this routine.
910
 
911
   OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
912
   flags which may may be modified by this routine.
913
 
914
   MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
915
   that requires allocation of stack space.
916
 
917
   CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
918
   the thunked-to function.  */
919
 
920
static void
921
initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
922
                                 struct arg_data *args,
923
                                 struct args_size *args_size,
924
                                 int n_named_args ATTRIBUTE_UNUSED,
925
                                 tree actparms, tree fndecl,
926
                                 CUMULATIVE_ARGS *args_so_far,
927
                                 int reg_parm_stack_space,
928
                                 rtx *old_stack_level, int *old_pending_adj,
929
                                 int *must_preallocate, int *ecf_flags,
930
                                 bool *may_tailcall, bool call_from_thunk_p)
931
{
932
  /* 1 if scanning parms front to back, -1 if scanning back to front.  */
933
  int inc;
934
 
935
  /* Count arg position in order args appear.  */
936
  int argpos;
937
 
938
  int i;
939
  tree p;
940
 
941
  args_size->constant = 0;
942
  args_size->var = 0;
943
 
944
  /* In this loop, we consider args in the order they are written.
945
     We fill up ARGS from the front or from the back if necessary
946
     so that in any case the first arg to be pushed ends up at the front.  */
947
 
948
  if (PUSH_ARGS_REVERSED)
949
    {
950
      i = num_actuals - 1, inc = -1;
951
      /* In this case, must reverse order of args
952
         so that we compute and push the last arg first.  */
953
    }
954
  else
955
    {
956
      i = 0, inc = 1;
957
    }
958
 
959
  /* I counts args in order (to be) pushed; ARGPOS counts in order written.  */
960
  for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
961
    {
962
      tree type = TREE_TYPE (TREE_VALUE (p));
963
      int unsignedp;
964
      enum machine_mode mode;
965
 
966
      args[i].tree_value = TREE_VALUE (p);
967
 
968
      /* Replace erroneous argument with constant zero.  */
969
      if (type == error_mark_node || !COMPLETE_TYPE_P (type))
970
        args[i].tree_value = integer_zero_node, type = integer_type_node;
971
 
972
      /* If TYPE is a transparent union, pass things the way we would
973
         pass the first field of the union.  We have already verified that
974
         the modes are the same.  */
975
      if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
976
        type = TREE_TYPE (TYPE_FIELDS (type));
977
 
978
      /* Decide where to pass this arg.
979
 
980
         args[i].reg is nonzero if all or part is passed in registers.
981
 
982
         args[i].partial is nonzero if part but not all is passed in registers,
983
         and the exact value says how many bytes are passed in registers.
984
 
985
         args[i].pass_on_stack is nonzero if the argument must at least be
986
         computed on the stack.  It may then be loaded back into registers
987
         if args[i].reg is nonzero.
988
 
989
         These decisions are driven by the FUNCTION_... macros and must agree
990
         with those made by function.c.  */
991
 
992
      /* See if this argument should be passed by invisible reference.  */
993
      if (pass_by_reference (args_so_far, TYPE_MODE (type),
994
                             type, argpos < n_named_args))
995
        {
996
          bool callee_copies;
997
          tree base;
998
 
999
          callee_copies
1000
            = reference_callee_copied (args_so_far, TYPE_MODE (type),
1001
                                       type, argpos < n_named_args);
1002
 
1003
          /* If we're compiling a thunk, pass through invisible references
1004
             instead of making a copy.  */
1005
          if (call_from_thunk_p
1006
              || (callee_copies
1007
                  && !TREE_ADDRESSABLE (type)
1008
                  && (base = get_base_address (args[i].tree_value))
1009
                  && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1010
            {
1011
              /* We can't use sibcalls if a callee-copied argument is
1012
                 stored in the current function's frame.  */
1013
              if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1014
                *may_tailcall = false;
1015
 
1016
              args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
1017
              type = TREE_TYPE (args[i].tree_value);
1018
 
1019
              *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1020
            }
1021
          else
1022
            {
1023
              /* We make a copy of the object and pass the address to the
1024
                 function being called.  */
1025
              rtx copy;
1026
 
1027
              if (!COMPLETE_TYPE_P (type)
1028
                  || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1029
                  || (flag_stack_check && ! STACK_CHECK_BUILTIN
1030
                      && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1031
                                                STACK_CHECK_MAX_VAR_SIZE))))
1032
                {
1033
                  /* This is a variable-sized object.  Make space on the stack
1034
                     for it.  */
1035
                  rtx size_rtx = expr_size (TREE_VALUE (p));
1036
 
1037
                  if (*old_stack_level == 0)
1038
                    {
1039
                      emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1040
                      *old_pending_adj = pending_stack_adjust;
1041
                      pending_stack_adjust = 0;
1042
                    }
1043
 
1044
                  copy = gen_rtx_MEM (BLKmode,
1045
                                      allocate_dynamic_stack_space
1046
                                      (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1047
                  set_mem_attributes (copy, type, 1);
1048
                }
1049
              else
1050
                copy = assign_temp (type, 0, 1, 0);
1051
 
1052
              store_expr (args[i].tree_value, copy, 0);
1053
 
1054
              if (callee_copies)
1055
                *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1056
              else
1057
                *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1058
 
1059
              args[i].tree_value
1060
                = build_fold_addr_expr (make_tree (type, copy));
1061
              type = TREE_TYPE (args[i].tree_value);
1062
              *may_tailcall = false;
1063
            }
1064
        }
1065
 
1066
      mode = TYPE_MODE (type);
1067
      unsignedp = TYPE_UNSIGNED (type);
1068
 
1069
      if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1070
        mode = promote_mode (type, mode, &unsignedp, 1);
1071
 
1072
      args[i].unsignedp = unsignedp;
1073
      args[i].mode = mode;
1074
 
1075
      args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1076
                                  argpos < n_named_args);
1077
#ifdef FUNCTION_INCOMING_ARG
1078
      /* If this is a sibling call and the machine has register windows, the
1079
         register window has to be unwinded before calling the routine, so
1080
         arguments have to go into the incoming registers.  */
1081
      args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1082
                                                     argpos < n_named_args);
1083
#else
1084
      args[i].tail_call_reg = args[i].reg;
1085
#endif
1086
 
1087
      if (args[i].reg)
1088
        args[i].partial
1089
          = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1090
                                             argpos < n_named_args);
1091
 
1092
      args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1093
 
1094
      /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1095
         it means that we are to pass this arg in the register(s) designated
1096
         by the PARALLEL, but also to pass it in the stack.  */
1097
      if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1098
          && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1099
        args[i].pass_on_stack = 1;
1100
 
1101
      /* If this is an addressable type, we must preallocate the stack
1102
         since we must evaluate the object into its final location.
1103
 
1104
         If this is to be passed in both registers and the stack, it is simpler
1105
         to preallocate.  */
1106
      if (TREE_ADDRESSABLE (type)
1107
          || (args[i].pass_on_stack && args[i].reg != 0))
1108
        *must_preallocate = 1;
1109
 
1110
      /* If this is an addressable type, we cannot pre-evaluate it.  Thus,
1111
         we cannot consider this function call constant.  */
1112
      if (TREE_ADDRESSABLE (type))
1113
        *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1114
 
1115
      /* Compute the stack-size of this argument.  */
1116
      if (args[i].reg == 0 || args[i].partial != 0
1117
          || reg_parm_stack_space > 0
1118
          || args[i].pass_on_stack)
1119
        locate_and_pad_parm (mode, type,
1120
#ifdef STACK_PARMS_IN_REG_PARM_AREA
1121
                             1,
1122
#else
1123
                             args[i].reg != 0,
1124
#endif
1125
                             args[i].pass_on_stack ? 0 : args[i].partial,
1126
                             fndecl, args_size, &args[i].locate);
1127
#ifdef BLOCK_REG_PADDING
1128
      else
1129
        /* The argument is passed entirely in registers.  See at which
1130
           end it should be padded.  */
1131
        args[i].locate.where_pad =
1132
          BLOCK_REG_PADDING (mode, type,
1133
                             int_size_in_bytes (type) <= UNITS_PER_WORD);
1134
#endif
1135
 
1136
      /* Update ARGS_SIZE, the total stack space for args so far.  */
1137
 
1138
      args_size->constant += args[i].locate.size.constant;
1139
      if (args[i].locate.size.var)
1140
        ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1141
 
1142
      /* Increment ARGS_SO_FAR, which has info about which arg-registers
1143
         have been used, etc.  */
1144
 
1145
      FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1146
                            argpos < n_named_args);
1147
    }
1148
}
1149
 
1150
/* Update ARGS_SIZE to contain the total size for the argument block.
1151
   Return the original constant component of the argument block's size.
1152
 
1153
   REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1154
   for arguments passed in registers.  */
1155
 
1156
static int
1157
compute_argument_block_size (int reg_parm_stack_space,
1158
                             struct args_size *args_size,
1159
                             int preferred_stack_boundary ATTRIBUTE_UNUSED)
1160
{
1161
  int unadjusted_args_size = args_size->constant;
1162
 
1163
  /* For accumulate outgoing args mode we don't need to align, since the frame
1164
     will be already aligned.  Align to STACK_BOUNDARY in order to prevent
1165
     backends from generating misaligned frame sizes.  */
1166
  if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1167
    preferred_stack_boundary = STACK_BOUNDARY;
1168
 
1169
  /* Compute the actual size of the argument block required.  The variable
1170
     and constant sizes must be combined, the size may have to be rounded,
1171
     and there may be a minimum required size.  */
1172
 
1173
  if (args_size->var)
1174
    {
1175
      args_size->var = ARGS_SIZE_TREE (*args_size);
1176
      args_size->constant = 0;
1177
 
1178
      preferred_stack_boundary /= BITS_PER_UNIT;
1179
      if (preferred_stack_boundary > 1)
1180
        {
1181
          /* We don't handle this case yet.  To handle it correctly we have
1182
             to add the delta, round and subtract the delta.
1183
             Currently no machine description requires this support.  */
1184
          gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1185
          args_size->var = round_up (args_size->var, preferred_stack_boundary);
1186
        }
1187
 
1188
      if (reg_parm_stack_space > 0)
1189
        {
1190
          args_size->var
1191
            = size_binop (MAX_EXPR, args_size->var,
1192
                          ssize_int (reg_parm_stack_space));
1193
 
1194
#ifndef OUTGOING_REG_PARM_STACK_SPACE
1195
          /* The area corresponding to register parameters is not to count in
1196
             the size of the block we need.  So make the adjustment.  */
1197
          args_size->var
1198
            = size_binop (MINUS_EXPR, args_size->var,
1199
                          ssize_int (reg_parm_stack_space));
1200
#endif
1201
        }
1202
    }
1203
  else
1204
    {
1205
      preferred_stack_boundary /= BITS_PER_UNIT;
1206
      if (preferred_stack_boundary < 1)
1207
        preferred_stack_boundary = 1;
1208
      args_size->constant = (((args_size->constant
1209
                               + stack_pointer_delta
1210
                               + preferred_stack_boundary - 1)
1211
                              / preferred_stack_boundary
1212
                              * preferred_stack_boundary)
1213
                             - stack_pointer_delta);
1214
 
1215
      args_size->constant = MAX (args_size->constant,
1216
                                 reg_parm_stack_space);
1217
 
1218
#ifndef OUTGOING_REG_PARM_STACK_SPACE
1219
      args_size->constant -= reg_parm_stack_space;
1220
#endif
1221
    }
1222
  return unadjusted_args_size;
1223
}
1224
 
1225
/* Precompute parameters as needed for a function call.
1226
 
1227
   FLAGS is mask of ECF_* constants.
1228
 
1229
   NUM_ACTUALS is the number of arguments.
1230
 
1231
   ARGS is an array containing information for each argument; this
1232
   routine fills in the INITIAL_VALUE and VALUE fields for each
1233
   precomputed argument.  */
1234
 
1235
static void
1236
precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1237
{
1238
  int i;
1239
 
1240
  /* If this is a libcall, then precompute all arguments so that we do not
1241
     get extraneous instructions emitted as part of the libcall sequence.  */
1242
  if ((flags & ECF_LIBCALL_BLOCK) == 0)
1243
    return;
1244
 
1245
  for (i = 0; i < num_actuals; i++)
1246
    {
1247
      enum machine_mode mode;
1248
 
1249
      /* If this is an addressable type, we cannot pre-evaluate it.  */
1250
      gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
1251
 
1252
      args[i].initial_value = args[i].value
1253
        = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1254
 
1255
      mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1256
      if (mode != args[i].mode)
1257
        {
1258
          args[i].value
1259
            = convert_modes (args[i].mode, mode,
1260
                             args[i].value, args[i].unsignedp);
1261
#if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1262
          /* CSE will replace this only if it contains args[i].value
1263
             pseudo, so convert it down to the declared mode using
1264
             a SUBREG.  */
1265
          if (REG_P (args[i].value)
1266
              && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1267
            {
1268
              args[i].initial_value
1269
                = gen_lowpart_SUBREG (mode, args[i].value);
1270
              SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1271
              SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1272
                                            args[i].unsignedp);
1273
            }
1274
#endif
1275
        }
1276
    }
1277
}
1278
 
1279
/* Given the current state of MUST_PREALLOCATE and information about
1280
   arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1281
   compute and return the final value for MUST_PREALLOCATE.  */
1282
 
1283
static int
1284
finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
1285
{
1286
  /* See if we have or want to preallocate stack space.
1287
 
1288
     If we would have to push a partially-in-regs parm
1289
     before other stack parms, preallocate stack space instead.
1290
 
1291
     If the size of some parm is not a multiple of the required stack
1292
     alignment, we must preallocate.
1293
 
1294
     If the total size of arguments that would otherwise create a copy in
1295
     a temporary (such as a CALL) is more than half the total argument list
1296
     size, preallocation is faster.
1297
 
1298
     Another reason to preallocate is if we have a machine (like the m88k)
1299
     where stack alignment is required to be maintained between every
1300
     pair of insns, not just when the call is made.  However, we assume here
1301
     that such machines either do not have push insns (and hence preallocation
1302
     would occur anyway) or the problem is taken care of with
1303
     PUSH_ROUNDING.  */
1304
 
1305
  if (! must_preallocate)
1306
    {
1307
      int partial_seen = 0;
1308
      int copy_to_evaluate_size = 0;
1309
      int i;
1310
 
1311
      for (i = 0; i < num_actuals && ! must_preallocate; i++)
1312
        {
1313
          if (args[i].partial > 0 && ! args[i].pass_on_stack)
1314
            partial_seen = 1;
1315
          else if (partial_seen && args[i].reg == 0)
1316
            must_preallocate = 1;
1317
 
1318
          if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1319
              && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1320
                  || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1321
                  || TREE_CODE (args[i].tree_value) == COND_EXPR
1322
                  || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1323
            copy_to_evaluate_size
1324
              += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1325
        }
1326
 
1327
      if (copy_to_evaluate_size * 2 >= args_size->constant
1328
          && args_size->constant > 0)
1329
        must_preallocate = 1;
1330
    }
1331
  return must_preallocate;
1332
}
1333
 
1334
/* If we preallocated stack space, compute the address of each argument
1335
   and store it into the ARGS array.
1336
 
1337
   We need not ensure it is a valid memory address here; it will be
1338
   validized when it is used.
1339
 
1340
   ARGBLOCK is an rtx for the address of the outgoing arguments.  */
1341
 
1342
static void
1343
compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1344
{
1345
  if (argblock)
1346
    {
1347
      rtx arg_reg = argblock;
1348
      int i, arg_offset = 0;
1349
 
1350
      if (GET_CODE (argblock) == PLUS)
1351
        arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1352
 
1353
      for (i = 0; i < num_actuals; i++)
1354
        {
1355
          rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1356
          rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1357
          rtx addr;
1358
          unsigned int align, boundary;
1359
 
1360
          /* Skip this parm if it will not be passed on the stack.  */
1361
          if (! args[i].pass_on_stack && args[i].reg != 0)
1362
            continue;
1363
 
1364
          if (GET_CODE (offset) == CONST_INT)
1365
            addr = plus_constant (arg_reg, INTVAL (offset));
1366
          else
1367
            addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1368
 
1369
          addr = plus_constant (addr, arg_offset);
1370
          args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1371
          set_mem_attributes (args[i].stack,
1372
                              TREE_TYPE (args[i].tree_value), 1);
1373
          align = BITS_PER_UNIT;
1374
          boundary = args[i].locate.boundary;
1375
          if (args[i].locate.where_pad != downward)
1376
            align = boundary;
1377
          else if (GET_CODE (offset) == CONST_INT)
1378
            {
1379
              align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1380
              align = align & -align;
1381
            }
1382
          set_mem_align (args[i].stack, align);
1383
 
1384
          if (GET_CODE (slot_offset) == CONST_INT)
1385
            addr = plus_constant (arg_reg, INTVAL (slot_offset));
1386
          else
1387
            addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1388
 
1389
          addr = plus_constant (addr, arg_offset);
1390
          args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1391
          set_mem_attributes (args[i].stack_slot,
1392
                              TREE_TYPE (args[i].tree_value), 1);
1393
          set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1394
 
1395
          /* Function incoming arguments may overlap with sibling call
1396
             outgoing arguments and we cannot allow reordering of reads
1397
             from function arguments with stores to outgoing arguments
1398
             of sibling calls.  */
1399
          set_mem_alias_set (args[i].stack, 0);
1400
          set_mem_alias_set (args[i].stack_slot, 0);
1401
        }
1402
    }
1403
}
1404
 
1405
/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1406
   in a call instruction.
1407
 
1408
   FNDECL is the tree node for the target function.  For an indirect call
1409
   FNDECL will be NULL_TREE.
1410
 
1411
   ADDR is the operand 0 of CALL_EXPR for this call.  */
1412
 
1413
static rtx
1414
rtx_for_function_call (tree fndecl, tree addr)
1415
{
1416
  rtx funexp;
1417
 
1418
  /* Get the function to call, in the form of RTL.  */
1419
  if (fndecl)
1420
    {
1421
      /* If this is the first use of the function, see if we need to
1422
         make an external definition for it.  */
1423
      if (! TREE_USED (fndecl))
1424
        {
1425
          assemble_external (fndecl);
1426
          TREE_USED (fndecl) = 1;
1427
        }
1428
 
1429
      /* Get a SYMBOL_REF rtx for the function address.  */
1430
      funexp = XEXP (DECL_RTL (fndecl), 0);
1431
    }
1432
  else
1433
    /* Generate an rtx (probably a pseudo-register) for the address.  */
1434
    {
1435
      push_temp_slots ();
1436
      funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
1437
      pop_temp_slots ();        /* FUNEXP can't be BLKmode.  */
1438
    }
1439
  return funexp;
1440
}
1441
 
1442
/* Return true if and only if SIZE storage units (usually bytes)
1443
   starting from address ADDR overlap with already clobbered argument
1444
   area.  This function is used to determine if we should give up a
1445
   sibcall.  */
1446
 
1447
static bool
1448
mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1449
{
1450
  HOST_WIDE_INT i;
1451
 
1452
  if (addr == current_function_internal_arg_pointer)
1453
    i = 0;
1454
  else if (GET_CODE (addr) == PLUS
1455
           && (XEXP (addr, 0)
1456
               == current_function_internal_arg_pointer)
1457
           && GET_CODE (XEXP (addr, 1)) == CONST_INT)
1458
    i = INTVAL (XEXP (addr, 1));
1459
  else
1460
    return false;
1461
 
1462
#ifdef ARGS_GROW_DOWNWARD
1463
  i = -i - size;
1464
#endif
1465
  if (size > 0)
1466
    {
1467
      unsigned HOST_WIDE_INT k;
1468
 
1469
      for (k = 0; k < size; k++)
1470
        if (i + k < stored_args_map->n_bits
1471
            && TEST_BIT (stored_args_map, i + k))
1472
          return true;
1473
    }
1474
 
1475
  return false;
1476
}
1477
 
1478
/* Do the register loads required for any wholly-register parms or any
1479
   parms which are passed both on the stack and in a register.  Their
1480
   expressions were already evaluated.
1481
 
1482
   Mark all register-parms as living through the call, putting these USE
1483
   insns in the CALL_INSN_FUNCTION_USAGE field.
1484
 
1485
   When IS_SIBCALL, perform the check_sibcall_argument_overlap
1486
   checking, setting *SIBCALL_FAILURE if appropriate.  */
1487
 
1488
static void
1489
load_register_parameters (struct arg_data *args, int num_actuals,
1490
                          rtx *call_fusage, int flags, int is_sibcall,
1491
                          int *sibcall_failure)
1492
{
1493
  int i, j;
1494
 
1495
  for (i = 0; i < num_actuals; i++)
1496
    {
1497
      rtx reg = ((flags & ECF_SIBCALL)
1498
                 ? args[i].tail_call_reg : args[i].reg);
1499
      if (reg)
1500
        {
1501
          int partial = args[i].partial;
1502
          int nregs;
1503
          int size = 0;
1504
          rtx before_arg = get_last_insn ();
1505
          /* Set non-negative if we must move a word at a time, even if
1506
             just one word (e.g, partial == 4 && mode == DFmode).  Set
1507
             to -1 if we just use a normal move insn.  This value can be
1508
             zero if the argument is a zero size structure.  */
1509
          nregs = -1;
1510
          if (GET_CODE (reg) == PARALLEL)
1511
            ;
1512
          else if (partial)
1513
            {
1514
              gcc_assert (partial % UNITS_PER_WORD == 0);
1515
              nregs = partial / UNITS_PER_WORD;
1516
            }
1517
          else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1518
            {
1519
              size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1520
              nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1521
            }
1522
          else
1523
            size = GET_MODE_SIZE (args[i].mode);
1524
 
1525
          /* Handle calls that pass values in multiple non-contiguous
1526
             locations.  The Irix 6 ABI has examples of this.  */
1527
 
1528
          if (GET_CODE (reg) == PARALLEL)
1529
            emit_group_move (reg, args[i].parallel_value);
1530
 
1531
          /* If simple case, just do move.  If normal partial, store_one_arg
1532
             has already loaded the register for us.  In all other cases,
1533
             load the register(s) from memory.  */
1534
 
1535
          else if (nregs == -1)
1536
            {
1537
              emit_move_insn (reg, args[i].value);
1538
#ifdef BLOCK_REG_PADDING
1539
              /* Handle case where we have a value that needs shifting
1540
                 up to the msb.  eg. a QImode value and we're padding
1541
                 upward on a BYTES_BIG_ENDIAN machine.  */
1542
              if (size < UNITS_PER_WORD
1543
                  && (args[i].locate.where_pad
1544
                      == (BYTES_BIG_ENDIAN ? upward : downward)))
1545
                {
1546
                  rtx x;
1547
                  int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1548
 
1549
                  /* Assigning REG here rather than a temp makes CALL_FUSAGE
1550
                     report the whole reg as used.  Strictly speaking, the
1551
                     call only uses SIZE bytes at the msb end, but it doesn't
1552
                     seem worth generating rtl to say that.  */
1553
                  reg = gen_rtx_REG (word_mode, REGNO (reg));
1554
                  x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1555
                                    build_int_cst (NULL_TREE, shift),
1556
                                    reg, 1);
1557
                  if (x != reg)
1558
                    emit_move_insn (reg, x);
1559
                }
1560
#endif
1561
            }
1562
 
1563
          /* If we have pre-computed the values to put in the registers in
1564
             the case of non-aligned structures, copy them in now.  */
1565
 
1566
          else if (args[i].n_aligned_regs != 0)
1567
            for (j = 0; j < args[i].n_aligned_regs; j++)
1568
              emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1569
                              args[i].aligned_regs[j]);
1570
 
1571
          else if (partial == 0 || args[i].pass_on_stack)
1572
            {
1573
              rtx mem = validize_mem (args[i].value);
1574
 
1575
              /* Check for overlap with already clobbered argument area.  */
1576
              if (is_sibcall
1577
                  && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0),
1578
                                                           size))
1579
                *sibcall_failure = 1;
1580
 
1581
              /* Handle a BLKmode that needs shifting.  */
1582
              if (nregs == 1 && size < UNITS_PER_WORD
1583
#ifdef BLOCK_REG_PADDING
1584
                  && args[i].locate.where_pad == downward
1585
#else
1586
                  && BYTES_BIG_ENDIAN
1587
#endif
1588
                 )
1589
                {
1590
                  rtx tem = operand_subword_force (mem, 0, args[i].mode);
1591
                  rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1592
                  rtx x = gen_reg_rtx (word_mode);
1593
                  int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1594
                  enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1595
                                                        : LSHIFT_EXPR;
1596
 
1597
                  emit_move_insn (x, tem);
1598
                  x = expand_shift (dir, word_mode, x,
1599
                                    build_int_cst (NULL_TREE, shift),
1600
                                    ri, 1);
1601
                  if (x != ri)
1602
                    emit_move_insn (ri, x);
1603
                }
1604
              else
1605
                move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1606
            }
1607
 
1608
          /* When a parameter is a block, and perhaps in other cases, it is
1609
             possible that it did a load from an argument slot that was
1610
             already clobbered.  */
1611
          if (is_sibcall
1612
              && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1613
            *sibcall_failure = 1;
1614
 
1615
          /* Handle calls that pass values in multiple non-contiguous
1616
             locations.  The Irix 6 ABI has examples of this.  */
1617
          if (GET_CODE (reg) == PARALLEL)
1618
            use_group_regs (call_fusage, reg);
1619
          else if (nregs == -1)
1620
            use_reg (call_fusage, reg);
1621
          else if (nregs > 0)
1622
            use_regs (call_fusage, REGNO (reg), nregs);
1623
        }
1624
    }
1625
}
1626
 
1627
/* We need to pop PENDING_STACK_ADJUST bytes.  But, if the arguments
1628
   wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1629
   bytes, then we would need to push some additional bytes to pad the
1630
   arguments.  So, we compute an adjust to the stack pointer for an
1631
   amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1632
   bytes.  Then, when the arguments are pushed the stack will be perfectly
1633
   aligned.  ARGS_SIZE->CONSTANT is set to the number of bytes that should
1634
   be popped after the call.  Returns the adjustment.  */
1635
 
1636
static int
1637
combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1638
                                           struct args_size *args_size,
1639
                                           unsigned int preferred_unit_stack_boundary)
1640
{
1641
  /* The number of bytes to pop so that the stack will be
1642
     under-aligned by UNADJUSTED_ARGS_SIZE bytes.  */
1643
  HOST_WIDE_INT adjustment;
1644
  /* The alignment of the stack after the arguments are pushed, if we
1645
     just pushed the arguments without adjust the stack here.  */
1646
  unsigned HOST_WIDE_INT unadjusted_alignment;
1647
 
1648
  unadjusted_alignment
1649
    = ((stack_pointer_delta + unadjusted_args_size)
1650
       % preferred_unit_stack_boundary);
1651
 
1652
  /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1653
     as possible -- leaving just enough left to cancel out the
1654
     UNADJUSTED_ALIGNMENT.  In other words, we want to ensure that the
1655
     PENDING_STACK_ADJUST is non-negative, and congruent to
1656
     -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY.  */
1657
 
1658
  /* Begin by trying to pop all the bytes.  */
1659
  unadjusted_alignment
1660
    = (unadjusted_alignment
1661
       - (pending_stack_adjust % preferred_unit_stack_boundary));
1662
  adjustment = pending_stack_adjust;
1663
  /* Push enough additional bytes that the stack will be aligned
1664
     after the arguments are pushed.  */
1665
  if (preferred_unit_stack_boundary > 1)
1666
    {
1667
      if (unadjusted_alignment > 0)
1668
        adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1669
      else
1670
        adjustment += unadjusted_alignment;
1671
    }
1672
 
1673
  /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1674
     bytes after the call.  The right number is the entire
1675
     PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1676
     by the arguments in the first place.  */
1677
  args_size->constant
1678
    = pending_stack_adjust - adjustment + unadjusted_args_size;
1679
 
1680
  return adjustment;
1681
}
1682
 
1683
/* Scan X expression if it does not dereference any argument slots
1684
   we already clobbered by tail call arguments (as noted in stored_args_map
1685
   bitmap).
1686
   Return nonzero if X expression dereferences such argument slots,
1687
   zero otherwise.  */
1688
 
1689
static int
1690
check_sibcall_argument_overlap_1 (rtx x)
1691
{
1692
  RTX_CODE code;
1693
  int i, j;
1694
  const char *fmt;
1695
 
1696
  if (x == NULL_RTX)
1697
    return 0;
1698
 
1699
  code = GET_CODE (x);
1700
 
1701
  if (code == MEM)
1702
    return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1703
                                                 GET_MODE_SIZE (GET_MODE (x)));
1704
 
1705
  /* Scan all subexpressions.  */
1706
  fmt = GET_RTX_FORMAT (code);
1707
  for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1708
    {
1709
      if (*fmt == 'e')
1710
        {
1711
          if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1712
            return 1;
1713
        }
1714
      else if (*fmt == 'E')
1715
        {
1716
          for (j = 0; j < XVECLEN (x, i); j++)
1717
            if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1718
              return 1;
1719
        }
1720
    }
1721
  return 0;
1722
}
1723
 
1724
/* Scan sequence after INSN if it does not dereference any argument slots
1725
   we already clobbered by tail call arguments (as noted in stored_args_map
1726
   bitmap).  If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1727
   stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1728
   should be 0).  Return nonzero if sequence after INSN dereferences such argument
1729
   slots, zero otherwise.  */
1730
 
1731
static int
1732
check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1733
{
1734
  int low, high;
1735
 
1736
  if (insn == NULL_RTX)
1737
    insn = get_insns ();
1738
  else
1739
    insn = NEXT_INSN (insn);
1740
 
1741
  for (; insn; insn = NEXT_INSN (insn))
1742
    if (INSN_P (insn)
1743
        && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1744
      break;
1745
 
1746
  if (mark_stored_args_map)
1747
    {
1748
#ifdef ARGS_GROW_DOWNWARD
1749
      low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1750
#else
1751
      low = arg->locate.slot_offset.constant;
1752
#endif
1753
 
1754
      for (high = low + arg->locate.size.constant; low < high; low++)
1755
        SET_BIT (stored_args_map, low);
1756
    }
1757
  return insn != NULL_RTX;
1758
}
1759
 
1760
/* Given that a function returns a value of mode MODE at the most
1761
   significant end of hard register VALUE, shift VALUE left or right
1762
   as specified by LEFT_P.  Return true if some action was needed.  */
1763
 
1764
bool
1765
shift_return_value (enum machine_mode mode, bool left_p, rtx value)
1766
{
1767
  HOST_WIDE_INT shift;
1768
 
1769
  gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1770
  shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1771
  if (shift == 0)
1772
    return false;
1773
 
1774
  /* Use ashr rather than lshr for right shifts.  This is for the benefit
1775
     of the MIPS port, which requires SImode values to be sign-extended
1776
     when stored in 64-bit registers.  */
1777
  if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1778
                           value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1779
    gcc_unreachable ();
1780
  return true;
1781
}
1782
 
1783
/* Generate all the code for a function call
1784
   and return an rtx for its value.
1785
   Store the value in TARGET (specified as an rtx) if convenient.
1786
   If the value is stored in TARGET then TARGET is returned.
1787
   If IGNORE is nonzero, then we ignore the value of the function call.  */
1788
 
1789
rtx
1790
expand_call (tree exp, rtx target, int ignore)
1791
{
1792
  /* Nonzero if we are currently expanding a call.  */
1793
  static int currently_expanding_call = 0;
1794
 
1795
  /* List of actual parameters.  */
1796
  tree actparms = TREE_OPERAND (exp, 1);
1797
  /* RTX for the function to be called.  */
1798
  rtx funexp;
1799
  /* Sequence of insns to perform a normal "call".  */
1800
  rtx normal_call_insns = NULL_RTX;
1801
  /* Sequence of insns to perform a tail "call".  */
1802
  rtx tail_call_insns = NULL_RTX;
1803
  /* Data type of the function.  */
1804
  tree funtype;
1805
  tree type_arg_types;
1806
  /* Declaration of the function being called,
1807
     or 0 if the function is computed (not known by name).  */
1808
  tree fndecl = 0;
1809
  /* The type of the function being called.  */
1810
  tree fntype;
1811
  bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1812
  int pass;
1813
 
1814
  /* Register in which non-BLKmode value will be returned,
1815
     or 0 if no value or if value is BLKmode.  */
1816
  rtx valreg;
1817
  /* Address where we should return a BLKmode value;
1818
 
1819
  rtx structure_value_addr = 0;
1820
  /* Nonzero if that address is being passed by treating it as
1821
     an extra, implicit first parameter.  Otherwise,
1822
     it is passed by being copied directly into struct_value_rtx.  */
1823
  int structure_value_addr_parm = 0;
1824
  /* Size of aggregate value wanted, or zero if none wanted
1825
     or if we are using the non-reentrant PCC calling convention
1826
     or expecting the value in registers.  */
1827
  HOST_WIDE_INT struct_value_size = 0;
1828
  /* Nonzero if called function returns an aggregate in memory PCC style,
1829
     by returning the address of where to find it.  */
1830
  int pcc_struct_value = 0;
1831
  rtx struct_value = 0;
1832
 
1833
  /* Number of actual parameters in this call, including struct value addr.  */
1834
  int num_actuals;
1835
  /* Number of named args.  Args after this are anonymous ones
1836
     and they must all go on the stack.  */
1837
  int n_named_args;
1838
 
1839
  /* Vector of information about each argument.
1840
     Arguments are numbered in the order they will be pushed,
1841
     not the order they are written.  */
1842
  struct arg_data *args;
1843
 
1844
  /* Total size in bytes of all the stack-parms scanned so far.  */
1845
  struct args_size args_size;
1846
  struct args_size adjusted_args_size;
1847
  /* Size of arguments before any adjustments (such as rounding).  */
1848
  int unadjusted_args_size;
1849
  /* Data on reg parms scanned so far.  */
1850
  CUMULATIVE_ARGS args_so_far;
1851
  /* Nonzero if a reg parm has been scanned.  */
1852
  int reg_parm_seen;
1853
  /* Nonzero if this is an indirect function call.  */
1854
 
1855
  /* Nonzero if we must avoid push-insns in the args for this call.
1856
     If stack space is allocated for register parameters, but not by the
1857
     caller, then it is preallocated in the fixed part of the stack frame.
1858
     So the entire argument block must then be preallocated (i.e., we
1859
     ignore PUSH_ROUNDING in that case).  */
1860
 
1861
  int must_preallocate = !PUSH_ARGS;
1862
 
1863
  /* Size of the stack reserved for parameter registers.  */
1864
  int reg_parm_stack_space = 0;
1865
 
1866
  /* Address of space preallocated for stack parms
1867
     (on machines that lack push insns), or 0 if space not preallocated.  */
1868
  rtx argblock = 0;
1869
 
1870
  /* Mask of ECF_ flags.  */
1871
  int flags = 0;
1872
#ifdef REG_PARM_STACK_SPACE
1873
  /* Define the boundary of the register parm stack space that needs to be
1874
     saved, if any.  */
1875
  int low_to_save, high_to_save;
1876
  rtx save_area = 0;             /* Place that it is saved */
1877
#endif
1878
 
1879
  int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1880
  char *initial_stack_usage_map = stack_usage_map;
1881
  char *stack_usage_map_buf = NULL;
1882
 
1883
  int old_stack_allocated;
1884
 
1885
  /* State variables to track stack modifications.  */
1886
  rtx old_stack_level = 0;
1887
  int old_stack_arg_under_construction = 0;
1888
  int old_pending_adj = 0;
1889
  int old_inhibit_defer_pop = inhibit_defer_pop;
1890
 
1891
  /* Some stack pointer alterations we make are performed via
1892
     allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1893
     which we then also need to save/restore along the way.  */
1894
  int old_stack_pointer_delta = 0;
1895
 
1896
  rtx call_fusage;
1897
  tree p = TREE_OPERAND (exp, 0);
1898
  tree addr = TREE_OPERAND (exp, 0);
1899
  int i;
1900
  /* The alignment of the stack, in bits.  */
1901
  unsigned HOST_WIDE_INT preferred_stack_boundary;
1902
  /* The alignment of the stack, in bytes.  */
1903
  unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
1904
  /* The static chain value to use for this call.  */
1905
  rtx static_chain_value;
1906
  /* See if this is "nothrow" function call.  */
1907
  if (TREE_NOTHROW (exp))
1908
    flags |= ECF_NOTHROW;
1909
 
1910
  /* See if we can find a DECL-node for the actual function, and get the
1911
     function attributes (flags) from the function decl or type node.  */
1912
  fndecl = get_callee_fndecl (exp);
1913
  if (fndecl)
1914
    {
1915
      fntype = TREE_TYPE (fndecl);
1916
      flags |= flags_from_decl_or_type (fndecl);
1917
    }
1918
  else
1919
    {
1920
      fntype = TREE_TYPE (TREE_TYPE (p));
1921
      flags |= flags_from_decl_or_type (fntype);
1922
    }
1923
 
1924
  struct_value = targetm.calls.struct_value_rtx (fntype, 0);
1925
 
1926
  /* Warn if this value is an aggregate type,
1927
     regardless of which calling convention we are using for it.  */
1928
  if (AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1929
    warning (OPT_Waggregate_return, "function call has aggregate value");
1930
 
1931
  /* If the result of a pure or const function call is ignored (or void),
1932
     and none of its arguments are volatile, we can avoid expanding the
1933
     call and just evaluate the arguments for side-effects.  */
1934
  if ((flags & (ECF_CONST | ECF_PURE))
1935
      && (ignore || target == const0_rtx
1936
          || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
1937
    {
1938
      bool volatilep = false;
1939
      tree arg;
1940
 
1941
      for (arg = actparms; arg; arg = TREE_CHAIN (arg))
1942
        if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
1943
          {
1944
            volatilep = true;
1945
            break;
1946
          }
1947
 
1948
      if (! volatilep)
1949
        {
1950
          for (arg = actparms; arg; arg = TREE_CHAIN (arg))
1951
            expand_expr (TREE_VALUE (arg), const0_rtx,
1952
                         VOIDmode, EXPAND_NORMAL);
1953
          return const0_rtx;
1954
        }
1955
    }
1956
 
1957
#ifdef REG_PARM_STACK_SPACE
1958
  reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1959
#endif
1960
 
1961
#ifndef OUTGOING_REG_PARM_STACK_SPACE
1962
  if (reg_parm_stack_space > 0 && PUSH_ARGS)
1963
    must_preallocate = 1;
1964
#endif
1965
 
1966
  /* Set up a place to return a structure.  */
1967
 
1968
  /* Cater to broken compilers.  */
1969
  if (aggregate_value_p (exp, fndecl))
1970
    {
1971
      /* This call returns a big structure.  */
1972
      flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1973
 
1974
#ifdef PCC_STATIC_STRUCT_RETURN
1975
      {
1976
        pcc_struct_value = 1;
1977
      }
1978
#else /* not PCC_STATIC_STRUCT_RETURN */
1979
      {
1980
        struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1981
 
1982
        if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
1983
          structure_value_addr = XEXP (target, 0);
1984
        else
1985
          {
1986
            /* For variable-sized objects, we must be called with a target
1987
               specified.  If we were to allocate space on the stack here,
1988
               we would have no way of knowing when to free it.  */
1989
            rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
1990
 
1991
            mark_temp_addr_taken (d);
1992
            structure_value_addr = XEXP (d, 0);
1993
            target = 0;
1994
          }
1995
      }
1996
#endif /* not PCC_STATIC_STRUCT_RETURN */
1997
    }
1998
 
1999
  /* Figure out the amount to which the stack should be aligned.  */
2000
  preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2001
  if (fndecl)
2002
    {
2003
      struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2004
      if (i && i->preferred_incoming_stack_boundary)
2005
        preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2006
    }
2007
 
2008
  /* Operand 0 is a pointer-to-function; get the type of the function.  */
2009
  funtype = TREE_TYPE (addr);
2010
  gcc_assert (POINTER_TYPE_P (funtype));
2011
  funtype = TREE_TYPE (funtype);
2012
 
2013
  /* Munge the tree to split complex arguments into their imaginary
2014
     and real parts.  */
2015
  if (targetm.calls.split_complex_arg)
2016
    {
2017
      type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2018
      actparms = split_complex_values (actparms);
2019
    }
2020
  else
2021
    type_arg_types = TYPE_ARG_TYPES (funtype);
2022
 
2023
  if (flags & ECF_MAY_BE_ALLOCA)
2024
    current_function_calls_alloca = 1;
2025
 
2026
  /* If struct_value_rtx is 0, it means pass the address
2027
     as if it were an extra parameter.  */
2028
  if (structure_value_addr && struct_value == 0)
2029
    {
2030
      /* If structure_value_addr is a REG other than
2031
         virtual_outgoing_args_rtx, we can use always use it.  If it
2032
         is not a REG, we must always copy it into a register.
2033
         If it is virtual_outgoing_args_rtx, we must copy it to another
2034
         register in some cases.  */
2035
      rtx temp = (!REG_P (structure_value_addr)
2036
                  || (ACCUMULATE_OUTGOING_ARGS
2037
                      && stack_arg_under_construction
2038
                      && structure_value_addr == virtual_outgoing_args_rtx)
2039
                  ? copy_addr_to_reg (convert_memory_address
2040
                                      (Pmode, structure_value_addr))
2041
                  : structure_value_addr);
2042
 
2043
      actparms
2044
        = tree_cons (error_mark_node,
2045
                     make_tree (build_pointer_type (TREE_TYPE (funtype)),
2046
                                temp),
2047
                     actparms);
2048
      structure_value_addr_parm = 1;
2049
    }
2050
 
2051
  /* Count the arguments and set NUM_ACTUALS.  */
2052
  for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2053
    num_actuals++;
2054
 
2055
  /* Compute number of named args.
2056
     First, do a raw count of the args for INIT_CUMULATIVE_ARGS.  */
2057
 
2058
  if (type_arg_types != 0)
2059
    n_named_args
2060
      = (list_length (type_arg_types)
2061
         /* Count the struct value address, if it is passed as a parm.  */
2062
         + structure_value_addr_parm);
2063
  else
2064
    /* If we know nothing, treat all args as named.  */
2065
    n_named_args = num_actuals;
2066
 
2067
  /* Start updating where the next arg would go.
2068
 
2069
     On some machines (such as the PA) indirect calls have a different
2070
     calling convention than normal calls.  The fourth argument in
2071
     INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2072
     or not.  */
2073
  INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2074
 
2075
  /* Now possibly adjust the number of named args.
2076
     Normally, don't include the last named arg if anonymous args follow.
2077
     We do include the last named arg if
2078
     targetm.calls.strict_argument_naming() returns nonzero.
2079
     (If no anonymous args follow, the result of list_length is actually
2080
     one too large.  This is harmless.)
2081
 
2082
     If targetm.calls.pretend_outgoing_varargs_named() returns
2083
     nonzero, and targetm.calls.strict_argument_naming() returns zero,
2084
     this machine will be able to place unnamed args that were passed
2085
     in registers into the stack.  So treat all args as named.  This
2086
     allows the insns emitting for a specific argument list to be
2087
     independent of the function declaration.
2088
 
2089
     If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2090
     we do not have any reliable way to pass unnamed args in
2091
     registers, so we must force them into memory.  */
2092
 
2093
  if (type_arg_types != 0
2094
      && targetm.calls.strict_argument_naming (&args_so_far))
2095
    ;
2096
  else if (type_arg_types != 0
2097
           && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2098
    /* Don't include the last named arg.  */
2099
    --n_named_args;
2100
  else
2101
    /* Treat all args as named.  */
2102
    n_named_args = num_actuals;
2103
 
2104
  /* Make a vector to hold all the information about each arg.  */
2105
  args = alloca (num_actuals * sizeof (struct arg_data));
2106
  memset (args, 0, num_actuals * sizeof (struct arg_data));
2107
 
2108
  /* Build up entries in the ARGS array, compute the size of the
2109
     arguments into ARGS_SIZE, etc.  */
2110
  initialize_argument_information (num_actuals, args, &args_size,
2111
                                   n_named_args, actparms, fndecl,
2112
                                   &args_so_far, reg_parm_stack_space,
2113
                                   &old_stack_level, &old_pending_adj,
2114
                                   &must_preallocate, &flags,
2115
                                   &try_tail_call, CALL_FROM_THUNK_P (exp));
2116
 
2117
  if (args_size.var)
2118
    {
2119
      /* If this function requires a variable-sized argument list, don't
2120
         try to make a cse'able block for this call.  We may be able to
2121
         do this eventually, but it is too complicated to keep track of
2122
         what insns go in the cse'able block and which don't.  */
2123
 
2124
      flags &= ~ECF_LIBCALL_BLOCK;
2125
      must_preallocate = 1;
2126
    }
2127
 
2128
  /* Now make final decision about preallocating stack space.  */
2129
  must_preallocate = finalize_must_preallocate (must_preallocate,
2130
                                                num_actuals, args,
2131
                                                &args_size);
2132
 
2133
  /* If the structure value address will reference the stack pointer, we
2134
     must stabilize it.  We don't need to do this if we know that we are
2135
     not going to adjust the stack pointer in processing this call.  */
2136
 
2137
  if (structure_value_addr
2138
      && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2139
          || reg_mentioned_p (virtual_outgoing_args_rtx,
2140
                              structure_value_addr))
2141
      && (args_size.var
2142
          || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2143
    structure_value_addr = copy_to_reg (structure_value_addr);
2144
 
2145
  /* Tail calls can make things harder to debug, and we've traditionally
2146
     pushed these optimizations into -O2.  Don't try if we're already
2147
     expanding a call, as that means we're an argument.  Don't try if
2148
     there's cleanups, as we know there's code to follow the call.  */
2149
 
2150
  if (currently_expanding_call++ != 0
2151
      || !flag_optimize_sibling_calls
2152
      || args_size.var
2153
      || lookup_stmt_eh_region (exp) >= 0)
2154
    try_tail_call = 0;
2155
 
2156
  /*  Rest of purposes for tail call optimizations to fail.  */
2157
  if (
2158
#ifdef HAVE_sibcall_epilogue
2159
      !HAVE_sibcall_epilogue
2160
#else
2161
      1
2162
#endif
2163
      || !try_tail_call
2164
      /* Doing sibling call optimization needs some work, since
2165
         structure_value_addr can be allocated on the stack.
2166
         It does not seem worth the effort since few optimizable
2167
         sibling calls will return a structure.  */
2168
      || structure_value_addr != NULL_RTX
2169
      /* Check whether the target is able to optimize the call
2170
         into a sibcall.  */
2171
      || !targetm.function_ok_for_sibcall (fndecl, exp)
2172
      /* Functions that do not return exactly once may not be sibcall
2173
         optimized.  */
2174
      || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2175
      || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2176
      /* If the called function is nested in the current one, it might access
2177
         some of the caller's arguments, but could clobber them beforehand if
2178
         the argument areas are shared.  */
2179
      || (fndecl && decl_function_context (fndecl) == current_function_decl)
2180
      /* If this function requires more stack slots than the current
2181
         function, we cannot change it into a sibling call.
2182
         current_function_pretend_args_size is not part of the
2183
         stack allocated by our caller.  */
2184
      || args_size.constant > (current_function_args_size
2185
                               - current_function_pretend_args_size)
2186
      /* If the callee pops its own arguments, then it must pop exactly
2187
         the same number of arguments as the current function.  */
2188
      || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2189
          != RETURN_POPS_ARGS (current_function_decl,
2190
                               TREE_TYPE (current_function_decl),
2191
                               current_function_args_size))
2192
      || !lang_hooks.decls.ok_for_sibcall (fndecl))
2193
    try_tail_call = 0;
2194
 
2195
  /* Ensure current function's preferred stack boundary is at least
2196
     what we need.  We don't have to increase alignment for recursive
2197
     functions.  */
2198
  if (cfun->preferred_stack_boundary < preferred_stack_boundary
2199
      && fndecl != current_function_decl)
2200
    cfun->preferred_stack_boundary = preferred_stack_boundary;
2201
  if (fndecl == current_function_decl)
2202
    cfun->recursive_call_emit = true;
2203
 
2204
  preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2205
 
2206
  /* We want to make two insn chains; one for a sibling call, the other
2207
     for a normal call.  We will select one of the two chains after
2208
     initial RTL generation is complete.  */
2209
  for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2210
    {
2211
      int sibcall_failure = 0;
2212
      /* We want to emit any pending stack adjustments before the tail
2213
         recursion "call".  That way we know any adjustment after the tail
2214
         recursion call can be ignored if we indeed use the tail
2215
         call expansion.  */
2216
      int save_pending_stack_adjust = 0;
2217
      int save_stack_pointer_delta = 0;
2218
      rtx insns;
2219
      rtx before_call, next_arg_reg;
2220
 
2221
      if (pass == 0)
2222
        {
2223
          /* State variables we need to save and restore between
2224
             iterations.  */
2225
          save_pending_stack_adjust = pending_stack_adjust;
2226
          save_stack_pointer_delta = stack_pointer_delta;
2227
        }
2228
      if (pass)
2229
        flags &= ~ECF_SIBCALL;
2230
      else
2231
        flags |= ECF_SIBCALL;
2232
 
2233
      /* Other state variables that we must reinitialize each time
2234
         through the loop (that are not initialized by the loop itself).  */
2235
      argblock = 0;
2236
      call_fusage = 0;
2237
 
2238
      /* Start a new sequence for the normal call case.
2239
 
2240
         From this point on, if the sibling call fails, we want to set
2241
         sibcall_failure instead of continuing the loop.  */
2242
      start_sequence ();
2243
 
2244
      /* Don't let pending stack adjusts add up to too much.
2245
         Also, do all pending adjustments now if there is any chance
2246
         this might be a call to alloca or if we are expanding a sibling
2247
         call sequence or if we are calling a function that is to return
2248
         with stack pointer depressed.
2249
         Also do the adjustments before a throwing call, otherwise
2250
         exception handling can fail; PR 19225. */
2251
      if (pending_stack_adjust >= 32
2252
          || (pending_stack_adjust > 0
2253
              && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2254
          || (pending_stack_adjust > 0
2255
              && flag_exceptions && !(flags & ECF_NOTHROW))
2256
          || pass == 0)
2257
        do_pending_stack_adjust ();
2258
 
2259
      /* When calling a const function, we must pop the stack args right away,
2260
         so that the pop is deleted or moved with the call.  */
2261
      if (pass && (flags & ECF_LIBCALL_BLOCK))
2262
        NO_DEFER_POP;
2263
 
2264
      /* Precompute any arguments as needed.  */
2265
      if (pass)
2266
        precompute_arguments (flags, num_actuals, args);
2267
 
2268
      /* Now we are about to start emitting insns that can be deleted
2269
         if a libcall is deleted.  */
2270
      if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2271
        start_sequence ();
2272
 
2273
      if (pass == 0 && cfun->stack_protect_guard)
2274
        stack_protect_epilogue ();
2275
 
2276
      adjusted_args_size = args_size;
2277
      /* Compute the actual size of the argument block required.  The variable
2278
         and constant sizes must be combined, the size may have to be rounded,
2279
         and there may be a minimum required size.  When generating a sibcall
2280
         pattern, do not round up, since we'll be re-using whatever space our
2281
         caller provided.  */
2282
      unadjusted_args_size
2283
        = compute_argument_block_size (reg_parm_stack_space,
2284
                                       &adjusted_args_size,
2285
                                       (pass == 0 ? 0
2286
                                        : preferred_stack_boundary));
2287
 
2288
      old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2289
 
2290
      /* The argument block when performing a sibling call is the
2291
         incoming argument block.  */
2292
      if (pass == 0)
2293
        {
2294
          argblock = virtual_incoming_args_rtx;
2295
          argblock
2296
#ifdef STACK_GROWS_DOWNWARD
2297
            = plus_constant (argblock, current_function_pretend_args_size);
2298
#else
2299
            = plus_constant (argblock, -current_function_pretend_args_size);
2300
#endif
2301
          stored_args_map = sbitmap_alloc (args_size.constant);
2302
          sbitmap_zero (stored_args_map);
2303
        }
2304
 
2305
      /* If we have no actual push instructions, or shouldn't use them,
2306
         make space for all args right now.  */
2307
      else if (adjusted_args_size.var != 0)
2308
        {
2309
          if (old_stack_level == 0)
2310
            {
2311
              emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2312
              old_stack_pointer_delta = stack_pointer_delta;
2313
              old_pending_adj = pending_stack_adjust;
2314
              pending_stack_adjust = 0;
2315
              /* stack_arg_under_construction says whether a stack arg is
2316
                 being constructed at the old stack level.  Pushing the stack
2317
                 gets a clean outgoing argument block.  */
2318
              old_stack_arg_under_construction = stack_arg_under_construction;
2319
              stack_arg_under_construction = 0;
2320
            }
2321
          argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2322
        }
2323
      else
2324
        {
2325
          /* Note that we must go through the motions of allocating an argument
2326
             block even if the size is zero because we may be storing args
2327
             in the area reserved for register arguments, which may be part of
2328
             the stack frame.  */
2329
 
2330
          int needed = adjusted_args_size.constant;
2331
 
2332
          /* Store the maximum argument space used.  It will be pushed by
2333
             the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2334
             checking).  */
2335
 
2336
          if (needed > current_function_outgoing_args_size)
2337
            current_function_outgoing_args_size = needed;
2338
 
2339
          if (must_preallocate)
2340
            {
2341
              if (ACCUMULATE_OUTGOING_ARGS)
2342
                {
2343
                  /* Since the stack pointer will never be pushed, it is
2344
                     possible for the evaluation of a parm to clobber
2345
                     something we have already written to the stack.
2346
                     Since most function calls on RISC machines do not use
2347
                     the stack, this is uncommon, but must work correctly.
2348
 
2349
                     Therefore, we save any area of the stack that was already
2350
                     written and that we are using.  Here we set up to do this
2351
                     by making a new stack usage map from the old one.  The
2352
                     actual save will be done by store_one_arg.
2353
 
2354
                     Another approach might be to try to reorder the argument
2355
                     evaluations to avoid this conflicting stack usage.  */
2356
 
2357
#ifndef OUTGOING_REG_PARM_STACK_SPACE
2358
                  /* Since we will be writing into the entire argument area,
2359
                     the map must be allocated for its entire size, not just
2360
                     the part that is the responsibility of the caller.  */
2361
                  needed += reg_parm_stack_space;
2362
#endif
2363
 
2364
#ifdef ARGS_GROW_DOWNWARD
2365
                  highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2366
                                                     needed + 1);
2367
#else
2368
                  highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2369
                                                     needed);
2370
#endif
2371
                  if (stack_usage_map_buf)
2372
                    free (stack_usage_map_buf);
2373
                  stack_usage_map_buf = xmalloc (highest_outgoing_arg_in_use);
2374
                  stack_usage_map = stack_usage_map_buf;
2375
 
2376
                  if (initial_highest_arg_in_use)
2377
                    memcpy (stack_usage_map, initial_stack_usage_map,
2378
                            initial_highest_arg_in_use);
2379
 
2380
                  if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2381
                    memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2382
                           (highest_outgoing_arg_in_use
2383
                            - initial_highest_arg_in_use));
2384
                  needed = 0;
2385
 
2386
                  /* The address of the outgoing argument list must not be
2387
                     copied to a register here, because argblock would be left
2388
                     pointing to the wrong place after the call to
2389
                     allocate_dynamic_stack_space below.  */
2390
 
2391
                  argblock = virtual_outgoing_args_rtx;
2392
                }
2393
              else
2394
                {
2395
                  if (inhibit_defer_pop == 0)
2396
                    {
2397
                      /* Try to reuse some or all of the pending_stack_adjust
2398
                         to get this space.  */
2399
                      needed
2400
                        = (combine_pending_stack_adjustment_and_call
2401
                           (unadjusted_args_size,
2402
                            &adjusted_args_size,
2403
                            preferred_unit_stack_boundary));
2404
 
2405
                      /* combine_pending_stack_adjustment_and_call computes
2406
                         an adjustment before the arguments are allocated.
2407
                         Account for them and see whether or not the stack
2408
                         needs to go up or down.  */
2409
                      needed = unadjusted_args_size - needed;
2410
 
2411
                      if (needed < 0)
2412
                        {
2413
                          /* We're releasing stack space.  */
2414
                          /* ??? We can avoid any adjustment at all if we're
2415
                             already aligned.  FIXME.  */
2416
                          pending_stack_adjust = -needed;
2417
                          do_pending_stack_adjust ();
2418
                          needed = 0;
2419
                        }
2420
                      else
2421
                        /* We need to allocate space.  We'll do that in
2422
                           push_block below.  */
2423
                        pending_stack_adjust = 0;
2424
                    }
2425
 
2426
                  /* Special case this because overhead of `push_block' in
2427
                     this case is non-trivial.  */
2428
                  if (needed == 0)
2429
                    argblock = virtual_outgoing_args_rtx;
2430
                  else
2431
                    {
2432
                      argblock = push_block (GEN_INT (needed), 0, 0);
2433
#ifdef ARGS_GROW_DOWNWARD
2434
                      argblock = plus_constant (argblock, needed);
2435
#endif
2436
                    }
2437
 
2438
                  /* We only really need to call `copy_to_reg' in the case
2439
                     where push insns are going to be used to pass ARGBLOCK
2440
                     to a function call in ARGS.  In that case, the stack
2441
                     pointer changes value from the allocation point to the
2442
                     call point, and hence the value of
2443
                     VIRTUAL_OUTGOING_ARGS_RTX changes as well.  But might
2444
                     as well always do it.  */
2445
                  argblock = copy_to_reg (argblock);
2446
                }
2447
            }
2448
        }
2449
 
2450
      if (ACCUMULATE_OUTGOING_ARGS)
2451
        {
2452
          /* The save/restore code in store_one_arg handles all
2453
             cases except one: a constructor call (including a C
2454
             function returning a BLKmode struct) to initialize
2455
             an argument.  */
2456
          if (stack_arg_under_construction)
2457
            {
2458
#ifndef OUTGOING_REG_PARM_STACK_SPACE
2459
              rtx push_size = GEN_INT (reg_parm_stack_space
2460
                                       + adjusted_args_size.constant);
2461
#else
2462
              rtx push_size = GEN_INT (adjusted_args_size.constant);
2463
#endif
2464
              if (old_stack_level == 0)
2465
                {
2466
                  emit_stack_save (SAVE_BLOCK, &old_stack_level,
2467
                                   NULL_RTX);
2468
                  old_stack_pointer_delta = stack_pointer_delta;
2469
                  old_pending_adj = pending_stack_adjust;
2470
                  pending_stack_adjust = 0;
2471
                  /* stack_arg_under_construction says whether a stack
2472
                     arg is being constructed at the old stack level.
2473
                     Pushing the stack gets a clean outgoing argument
2474
                     block.  */
2475
                  old_stack_arg_under_construction
2476
                    = stack_arg_under_construction;
2477
                  stack_arg_under_construction = 0;
2478
                  /* Make a new map for the new argument list.  */
2479
                  if (stack_usage_map_buf)
2480
                    free (stack_usage_map_buf);
2481
                  stack_usage_map_buf = xmalloc (highest_outgoing_arg_in_use);
2482
                  stack_usage_map = stack_usage_map_buf;
2483
                  memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2484
                  highest_outgoing_arg_in_use = 0;
2485
                }
2486
              allocate_dynamic_stack_space (push_size, NULL_RTX,
2487
                                            BITS_PER_UNIT);
2488
            }
2489
 
2490
          /* If argument evaluation might modify the stack pointer,
2491
             copy the address of the argument list to a register.  */
2492
          for (i = 0; i < num_actuals; i++)
2493
            if (args[i].pass_on_stack)
2494
              {
2495
                argblock = copy_addr_to_reg (argblock);
2496
                break;
2497
              }
2498
        }
2499
 
2500
      compute_argument_addresses (args, argblock, num_actuals);
2501
 
2502
      /* If we push args individually in reverse order, perform stack alignment
2503
         before the first push (the last arg).  */
2504
      if (PUSH_ARGS_REVERSED && argblock == 0
2505
          && adjusted_args_size.constant != unadjusted_args_size)
2506
        {
2507
          /* When the stack adjustment is pending, we get better code
2508
             by combining the adjustments.  */
2509
          if (pending_stack_adjust
2510
              && ! (flags & ECF_LIBCALL_BLOCK)
2511
              && ! inhibit_defer_pop)
2512
            {
2513
              pending_stack_adjust
2514
                = (combine_pending_stack_adjustment_and_call
2515
                   (unadjusted_args_size,
2516
                    &adjusted_args_size,
2517
                    preferred_unit_stack_boundary));
2518
              do_pending_stack_adjust ();
2519
            }
2520
          else if (argblock == 0)
2521
            anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2522
                                        - unadjusted_args_size));
2523
        }
2524
      /* Now that the stack is properly aligned, pops can't safely
2525
         be deferred during the evaluation of the arguments.  */
2526
      NO_DEFER_POP;
2527
 
2528
      funexp = rtx_for_function_call (fndecl, addr);
2529
 
2530
      /* Figure out the register where the value, if any, will come back.  */
2531
      valreg = 0;
2532
      if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2533
          && ! structure_value_addr)
2534
        {
2535
          if (pcc_struct_value)
2536
            valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2537
                                          fndecl, NULL, (pass == 0));
2538
          else
2539
            valreg = hard_function_value (TREE_TYPE (exp), fndecl, fntype,
2540
                                          (pass == 0));
2541
        }
2542
 
2543
      /* Precompute all register parameters.  It isn't safe to compute anything
2544
         once we have started filling any specific hard regs.  */
2545
      precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2546
 
2547
      if (TREE_OPERAND (exp, 2))
2548
        static_chain_value = expand_expr (TREE_OPERAND (exp, 2),
2549
                                          NULL_RTX, VOIDmode, 0);
2550
      else
2551
        static_chain_value = 0;
2552
 
2553
#ifdef REG_PARM_STACK_SPACE
2554
      /* Save the fixed argument area if it's part of the caller's frame and
2555
         is clobbered by argument setup for this call.  */
2556
      if (ACCUMULATE_OUTGOING_ARGS && pass)
2557
        save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2558
                                              &low_to_save, &high_to_save);
2559
#endif
2560
 
2561
      /* Now store (and compute if necessary) all non-register parms.
2562
         These come before register parms, since they can require block-moves,
2563
         which could clobber the registers used for register parms.
2564
         Parms which have partial registers are not stored here,
2565
         but we do preallocate space here if they want that.  */
2566
 
2567
      for (i = 0; i < num_actuals; i++)
2568
        if (args[i].reg == 0 || args[i].pass_on_stack)
2569
          {
2570
            rtx before_arg = get_last_insn ();
2571
 
2572
            if (store_one_arg (&args[i], argblock, flags,
2573
                               adjusted_args_size.var != 0,
2574
                               reg_parm_stack_space)
2575
                || (pass == 0
2576
                    && check_sibcall_argument_overlap (before_arg,
2577
                                                       &args[i], 1)))
2578
              sibcall_failure = 1;
2579
 
2580
            if (flags & ECF_CONST
2581
                && args[i].stack
2582
                && args[i].value == args[i].stack)
2583
              call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2584
                                               gen_rtx_USE (VOIDmode,
2585
                                                            args[i].value),
2586
                                               call_fusage);
2587
          }
2588
 
2589
      /* If we have a parm that is passed in registers but not in memory
2590
         and whose alignment does not permit a direct copy into registers,
2591
         make a group of pseudos that correspond to each register that we
2592
         will later fill.  */
2593
      if (STRICT_ALIGNMENT)
2594
        store_unaligned_arguments_into_pseudos (args, num_actuals);
2595
 
2596
      /* Now store any partially-in-registers parm.
2597
         This is the last place a block-move can happen.  */
2598
      if (reg_parm_seen)
2599
        for (i = 0; i < num_actuals; i++)
2600
          if (args[i].partial != 0 && ! args[i].pass_on_stack)
2601
            {
2602
              rtx before_arg = get_last_insn ();
2603
 
2604
              if (store_one_arg (&args[i], argblock, flags,
2605
                                 adjusted_args_size.var != 0,
2606
                                 reg_parm_stack_space)
2607
                  || (pass == 0
2608
                      && check_sibcall_argument_overlap (before_arg,
2609
                                                         &args[i], 1)))
2610
                sibcall_failure = 1;
2611
            }
2612
 
2613
      /* If we pushed args in forward order, perform stack alignment
2614
         after pushing the last arg.  */
2615
      if (!PUSH_ARGS_REVERSED && argblock == 0)
2616
        anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2617
                                    - unadjusted_args_size));
2618
 
2619
      /* If register arguments require space on the stack and stack space
2620
         was not preallocated, allocate stack space here for arguments
2621
         passed in registers.  */
2622
#ifdef OUTGOING_REG_PARM_STACK_SPACE
2623
      if (!ACCUMULATE_OUTGOING_ARGS
2624
          && must_preallocate == 0 && reg_parm_stack_space > 0)
2625
        anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2626
#endif
2627
 
2628
      /* Pass the function the address in which to return a
2629
         structure value.  */
2630
      if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2631
        {
2632
          structure_value_addr
2633
            = convert_memory_address (Pmode, structure_value_addr);
2634
          emit_move_insn (struct_value,
2635
                          force_reg (Pmode,
2636
                                     force_operand (structure_value_addr,
2637
                                                    NULL_RTX)));
2638
 
2639
          if (REG_P (struct_value))
2640
            use_reg (&call_fusage, struct_value);
2641
        }
2642
 
2643
      funexp = prepare_call_address (funexp, static_chain_value,
2644
                                     &call_fusage, reg_parm_seen, pass == 0);
2645
 
2646
      load_register_parameters (args, num_actuals, &call_fusage, flags,
2647
                                pass == 0, &sibcall_failure);
2648
 
2649
      /* Save a pointer to the last insn before the call, so that we can
2650
         later safely search backwards to find the CALL_INSN.  */
2651
      before_call = get_last_insn ();
2652
 
2653
      /* Set up next argument register.  For sibling calls on machines
2654
         with register windows this should be the incoming register.  */
2655
#ifdef FUNCTION_INCOMING_ARG
2656
      if (pass == 0)
2657
        next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2658
                                              void_type_node, 1);
2659
      else
2660
#endif
2661
        next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2662
                                     void_type_node, 1);
2663
 
2664
      /* All arguments and registers used for the call must be set up by
2665
         now!  */
2666
 
2667
      /* Stack must be properly aligned now.  */
2668
      gcc_assert (!pass
2669
                  || !(stack_pointer_delta % preferred_unit_stack_boundary));
2670
 
2671
      /* Generate the actual call instruction.  */
2672
      emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2673
                   adjusted_args_size.constant, struct_value_size,
2674
                   next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2675
                   flags, & args_so_far);
2676
 
2677
      /* If a non-BLKmode value is returned at the most significant end
2678
         of a register, shift the register right by the appropriate amount
2679
         and update VALREG accordingly.  BLKmode values are handled by the
2680
         group load/store machinery below.  */
2681
      if (!structure_value_addr
2682
          && !pcc_struct_value
2683
          && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2684
          && targetm.calls.return_in_msb (TREE_TYPE (exp)))
2685
        {
2686
          if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
2687
            sibcall_failure = 1;
2688
          valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
2689
        }
2690
 
2691
      /* If call is cse'able, make appropriate pair of reg-notes around it.
2692
         Test valreg so we don't crash; may safely ignore `const'
2693
         if return type is void.  Disable for PARALLEL return values, because
2694
         we have no way to move such values into a pseudo register.  */
2695
      if (pass && (flags & ECF_LIBCALL_BLOCK))
2696
        {
2697
          rtx insns;
2698
          rtx insn;
2699
          bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
2700
 
2701
          insns = get_insns ();
2702
 
2703
          /* Expansion of block moves possibly introduced a loop that may
2704
             not appear inside libcall block.  */
2705
          for (insn = insns; insn; insn = NEXT_INSN (insn))
2706
            if (JUMP_P (insn))
2707
              failed = true;
2708
 
2709
          if (failed)
2710
            {
2711
              end_sequence ();
2712
              emit_insn (insns);
2713
            }
2714
          else
2715
            {
2716
              rtx note = 0;
2717
              rtx temp = gen_reg_rtx (GET_MODE (valreg));
2718
 
2719
              /* Mark the return value as a pointer if needed.  */
2720
              if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2721
                mark_reg_pointer (temp,
2722
                                  TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2723
 
2724
              end_sequence ();
2725
              if (flag_unsafe_math_optimizations
2726
                  && fndecl
2727
                  && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2728
                  && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2729
                      || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2730
                      || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
2731
                note = gen_rtx_fmt_e (SQRT,
2732
                                      GET_MODE (temp),
2733
                                      args[0].initial_value);
2734
              else
2735
                {
2736
                  /* Construct an "equal form" for the value which
2737
                     mentions all the arguments in order as well as
2738
                     the function name.  */
2739
                  for (i = 0; i < num_actuals; i++)
2740
                    note = gen_rtx_EXPR_LIST (VOIDmode,
2741
                                              args[i].initial_value, note);
2742
                  note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2743
 
2744
                  if (flags & ECF_PURE)
2745
                    note = gen_rtx_EXPR_LIST (VOIDmode,
2746
                        gen_rtx_USE (VOIDmode,
2747
                                     gen_rtx_MEM (BLKmode,
2748
                                                  gen_rtx_SCRATCH (VOIDmode))),
2749
                        note);
2750
                }
2751
              emit_libcall_block (insns, temp, valreg, note);
2752
 
2753
              valreg = temp;
2754
            }
2755
        }
2756
      else if (pass && (flags & ECF_MALLOC))
2757
        {
2758
          rtx temp = gen_reg_rtx (GET_MODE (valreg));
2759
          rtx last, insns;
2760
 
2761
          /* The return value from a malloc-like function is a pointer.  */
2762
          if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2763
            mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2764
 
2765
          emit_move_insn (temp, valreg);
2766
 
2767
          /* The return value from a malloc-like function can not alias
2768
             anything else.  */
2769
          last = get_last_insn ();
2770
          REG_NOTES (last) =
2771
            gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2772
 
2773
          /* Write out the sequence.  */
2774
          insns = get_insns ();
2775
          end_sequence ();
2776
          emit_insn (insns);
2777
          valreg = temp;
2778
        }
2779
 
2780
      /* For calls to `setjmp', etc., inform flow.c it should complain
2781
         if nonvolatile values are live.  For functions that cannot return,
2782
         inform flow that control does not fall through.  */
2783
 
2784
      if ((flags & ECF_NORETURN) || pass == 0)
2785
        {
2786
          /* The barrier must be emitted
2787
             immediately after the CALL_INSN.  Some ports emit more
2788
             than just a CALL_INSN above, so we must search for it here.  */
2789
 
2790
          rtx last = get_last_insn ();
2791
          while (!CALL_P (last))
2792
            {
2793
              last = PREV_INSN (last);
2794
              /* There was no CALL_INSN?  */
2795
              gcc_assert (last != before_call);
2796
            }
2797
 
2798
          emit_barrier_after (last);
2799
 
2800
          /* Stack adjustments after a noreturn call are dead code.
2801
             However when NO_DEFER_POP is in effect, we must preserve
2802
             stack_pointer_delta.  */
2803
          if (inhibit_defer_pop == 0)
2804
            {
2805
              stack_pointer_delta = old_stack_allocated;
2806
              pending_stack_adjust = 0;
2807
            }
2808
        }
2809
 
2810
      /* If value type not void, return an rtx for the value.  */
2811
 
2812
      if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2813
          || ignore)
2814
        target = const0_rtx;
2815
      else if (structure_value_addr)
2816
        {
2817
          if (target == 0 || !MEM_P (target))
2818
            {
2819
              target
2820
                = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2821
                               memory_address (TYPE_MODE (TREE_TYPE (exp)),
2822
                                               structure_value_addr));
2823
              set_mem_attributes (target, exp, 1);
2824
            }
2825
        }
2826
      else if (pcc_struct_value)
2827
        {
2828
          /* This is the special C++ case where we need to
2829
             know what the true target was.  We take care to
2830
             never use this value more than once in one expression.  */
2831
          target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2832
                                copy_to_reg (valreg));
2833
          set_mem_attributes (target, exp, 1);
2834
        }
2835
      /* Handle calls that return values in multiple non-contiguous locations.
2836
         The Irix 6 ABI has examples of this.  */
2837
      else if (GET_CODE (valreg) == PARALLEL)
2838
        {
2839
          if (target == 0)
2840
            {
2841
              /* This will only be assigned once, so it can be readonly.  */
2842
              tree nt = build_qualified_type (TREE_TYPE (exp),
2843
                                              (TYPE_QUALS (TREE_TYPE (exp))
2844
                                               | TYPE_QUAL_CONST));
2845
 
2846
              target = assign_temp (nt, 0, 1, 1);
2847
            }
2848
 
2849
          if (! rtx_equal_p (target, valreg))
2850
            emit_group_store (target, valreg, TREE_TYPE (exp),
2851
                              int_size_in_bytes (TREE_TYPE (exp)));
2852
 
2853
          /* We can not support sibling calls for this case.  */
2854
          sibcall_failure = 1;
2855
        }
2856
      else if (target
2857
               && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2858
               && GET_MODE (target) == GET_MODE (valreg))
2859
        {
2860
          bool may_overlap = false;
2861
 
2862
          /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
2863
             reg to a plain register.  */
2864
          if (REG_P (valreg)
2865
              && HARD_REGISTER_P (valreg)
2866
              && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (valreg)))
2867
              && !(REG_P (target) && !HARD_REGISTER_P (target)))
2868
            valreg = copy_to_reg (valreg);
2869
 
2870
          /* If TARGET is a MEM in the argument area, and we have
2871
             saved part of the argument area, then we can't store
2872
             directly into TARGET as it may get overwritten when we
2873
             restore the argument save area below.  Don't work too
2874
             hard though and simply force TARGET to a register if it
2875
             is a MEM; the optimizer is quite likely to sort it out.  */
2876
          if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
2877
            for (i = 0; i < num_actuals; i++)
2878
              if (args[i].save_area)
2879
                {
2880
                  may_overlap = true;
2881
                  break;
2882
                }
2883
 
2884
          if (may_overlap)
2885
            target = copy_to_reg (valreg);
2886
          else
2887
            {
2888
              /* TARGET and VALREG cannot be equal at this point
2889
                 because the latter would not have
2890
                 REG_FUNCTION_VALUE_P true, while the former would if
2891
                 it were referring to the same register.
2892
 
2893
                 If they refer to the same register, this move will be
2894
                 a no-op, except when function inlining is being
2895
                 done.  */
2896
              emit_move_insn (target, valreg);
2897
 
2898
              /* If we are setting a MEM, this code must be executed.
2899
                 Since it is emitted after the call insn, sibcall
2900
                 optimization cannot be performed in that case.  */
2901
              if (MEM_P (target))
2902
                sibcall_failure = 1;
2903
            }
2904
        }
2905
      else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2906
        {
2907
          target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2908
 
2909
          /* We can not support sibling calls for this case.  */
2910
          sibcall_failure = 1;
2911
        }
2912
      else
2913
        target = copy_to_reg (valreg);
2914
 
2915
      if (targetm.calls.promote_function_return(funtype))
2916
        {
2917
          /* If we promoted this return value, make the proper SUBREG.
2918
             TARGET might be const0_rtx here, so be careful.  */
2919
          if (REG_P (target)
2920
              && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2921
              && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2922
            {
2923
              tree type = TREE_TYPE (exp);
2924
              int unsignedp = TYPE_UNSIGNED (type);
2925
              int offset = 0;
2926
              enum machine_mode pmode;
2927
 
2928
              pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
2929
              /* If we don't promote as expected, something is wrong.  */
2930
              gcc_assert (GET_MODE (target) == pmode);
2931
 
2932
              if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
2933
                  && (GET_MODE_SIZE (GET_MODE (target))
2934
                      > GET_MODE_SIZE (TYPE_MODE (type))))
2935
                {
2936
                  offset = GET_MODE_SIZE (GET_MODE (target))
2937
                    - GET_MODE_SIZE (TYPE_MODE (type));
2938
                  if (! BYTES_BIG_ENDIAN)
2939
                    offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
2940
                  else if (! WORDS_BIG_ENDIAN)
2941
                    offset %= UNITS_PER_WORD;
2942
                }
2943
              target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
2944
              SUBREG_PROMOTED_VAR_P (target) = 1;
2945
              SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
2946
            }
2947
        }
2948
 
2949
      /* If size of args is variable or this was a constructor call for a stack
2950
         argument, restore saved stack-pointer value.  */
2951
 
2952
      if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
2953
        {
2954
          emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2955
          stack_pointer_delta = old_stack_pointer_delta;
2956
          pending_stack_adjust = old_pending_adj;
2957
          old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2958
          stack_arg_under_construction = old_stack_arg_under_construction;
2959
          highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2960
          stack_usage_map = initial_stack_usage_map;
2961
          sibcall_failure = 1;
2962
        }
2963
      else if (ACCUMULATE_OUTGOING_ARGS && pass)
2964
        {
2965
#ifdef REG_PARM_STACK_SPACE
2966
          if (save_area)
2967
            restore_fixed_argument_area (save_area, argblock,
2968
                                         high_to_save, low_to_save);
2969
#endif
2970
 
2971
          /* If we saved any argument areas, restore them.  */
2972
          for (i = 0; i < num_actuals; i++)
2973
            if (args[i].save_area)
2974
              {
2975
                enum machine_mode save_mode = GET_MODE (args[i].save_area);
2976
                rtx stack_area
2977
                  = gen_rtx_MEM (save_mode,
2978
                                 memory_address (save_mode,
2979
                                                 XEXP (args[i].stack_slot, 0)));
2980
 
2981
                if (save_mode != BLKmode)
2982
                  emit_move_insn (stack_area, args[i].save_area);
2983
                else
2984
                  emit_block_move (stack_area, args[i].save_area,
2985
                                   GEN_INT (args[i].locate.size.constant),
2986
                                   BLOCK_OP_CALL_PARM);
2987
              }
2988
 
2989
          highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2990
          stack_usage_map = initial_stack_usage_map;
2991
        }
2992
 
2993
      /* If this was alloca, record the new stack level for nonlocal gotos.
2994
         Check for the handler slots since we might not have a save area
2995
         for non-local gotos.  */
2996
 
2997
      if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
2998
        update_nonlocal_goto_save_area ();
2999
 
3000
      /* Free up storage we no longer need.  */
3001
      for (i = 0; i < num_actuals; ++i)
3002
        if (args[i].aligned_regs)
3003
          free (args[i].aligned_regs);
3004
 
3005
      insns = get_insns ();
3006
      end_sequence ();
3007
 
3008
      if (pass == 0)
3009
        {
3010
          tail_call_insns = insns;
3011
 
3012
          /* Restore the pending stack adjustment now that we have
3013
             finished generating the sibling call sequence.  */
3014
 
3015
          pending_stack_adjust = save_pending_stack_adjust;
3016
          stack_pointer_delta = save_stack_pointer_delta;
3017
 
3018
          /* Prepare arg structure for next iteration.  */
3019
          for (i = 0; i < num_actuals; i++)
3020
            {
3021
              args[i].value = 0;
3022
              args[i].aligned_regs = 0;
3023
              args[i].stack = 0;
3024
            }
3025
 
3026
          sbitmap_free (stored_args_map);
3027
        }
3028
      else
3029
        {
3030
          normal_call_insns = insns;
3031
 
3032
          /* Verify that we've deallocated all the stack we used.  */
3033
          gcc_assert ((flags & ECF_NORETURN)
3034
                      || (old_stack_allocated
3035
                          == stack_pointer_delta - pending_stack_adjust));
3036
        }
3037
 
3038
      /* If something prevents making this a sibling call,
3039
         zero out the sequence.  */
3040
      if (sibcall_failure)
3041
        tail_call_insns = NULL_RTX;
3042
      else
3043
        break;
3044
    }
3045
 
3046
  /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3047
     arguments too, as argument area is now clobbered by the call.  */
3048
  if (tail_call_insns)
3049
    {
3050
      emit_insn (tail_call_insns);
3051
      cfun->tail_call_emit = true;
3052
    }
3053
  else
3054
    emit_insn (normal_call_insns);
3055
 
3056
  currently_expanding_call--;
3057
 
3058
  /* If this function returns with the stack pointer depressed, ensure
3059
     this block saves and restores the stack pointer, show it was
3060
     changed, and adjust for any outgoing arg space.  */
3061
  if (flags & ECF_SP_DEPRESSED)
3062
    {
3063
      clear_pending_stack_adjust ();
3064
      emit_insn (gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx));
3065
      emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3066
    }
3067
 
3068
  if (stack_usage_map_buf)
3069
    free (stack_usage_map_buf);
3070
 
3071
  return target;
3072
}
3073
 
3074
/* A sibling call sequence invalidates any REG_EQUIV notes made for
3075
   this function's incoming arguments.
3076
 
3077
   At the start of RTL generation we know the only REG_EQUIV notes
3078
   in the rtl chain are those for incoming arguments, so we can look
3079
   for REG_EQUIV notes between the start of the function and the
3080
   NOTE_INSN_FUNCTION_BEG.
3081
 
3082
   This is (slight) overkill.  We could keep track of the highest
3083
   argument we clobber and be more selective in removing notes, but it
3084
   does not seem to be worth the effort.  */
3085
 
3086
void
3087
fixup_tail_calls (void)
3088
{
3089
  rtx insn;
3090
 
3091
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3092
    {
3093
      /* There are never REG_EQUIV notes for the incoming arguments
3094
         after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it.  */
3095
      if (NOTE_P (insn)
3096
          && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
3097
        break;
3098
 
3099
      while (1)
3100
        {
3101
          rtx note = find_reg_note (insn, REG_EQUIV, 0);
3102
          if (note)
3103
            {
3104
              /* Remove the note and keep looking at the notes for
3105
                 this insn.  */
3106
              remove_note (insn, note);
3107
              continue;
3108
            }
3109
          break;
3110
        }
3111
    }
3112
}
3113
 
3114
/* Traverse an argument list in VALUES and expand all complex
3115
   arguments into their components.  */
3116
static tree
3117
split_complex_values (tree values)
3118
{
3119
  tree p;
3120
 
3121
  /* Before allocating memory, check for the common case of no complex.  */
3122
  for (p = values; p; p = TREE_CHAIN (p))
3123
    {
3124
      tree type = TREE_TYPE (TREE_VALUE (p));
3125
      if (type && TREE_CODE (type) == COMPLEX_TYPE
3126
          && targetm.calls.split_complex_arg (type))
3127
        goto found;
3128
    }
3129
  return values;
3130
 
3131
 found:
3132
  values = copy_list (values);
3133
 
3134
  for (p = values; p; p = TREE_CHAIN (p))
3135
    {
3136
      tree complex_value = TREE_VALUE (p);
3137
      tree complex_type;
3138
 
3139
      complex_type = TREE_TYPE (complex_value);
3140
      if (!complex_type)
3141
        continue;
3142
 
3143
      if (TREE_CODE (complex_type) == COMPLEX_TYPE
3144
          && targetm.calls.split_complex_arg (complex_type))
3145
        {
3146
          tree subtype;
3147
          tree real, imag, next;
3148
 
3149
          subtype = TREE_TYPE (complex_type);
3150
          complex_value = save_expr (complex_value);
3151
          real = build1 (REALPART_EXPR, subtype, complex_value);
3152
          imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3153
 
3154
          TREE_VALUE (p) = real;
3155
          next = TREE_CHAIN (p);
3156
          imag = build_tree_list (NULL_TREE, imag);
3157
          TREE_CHAIN (p) = imag;
3158
          TREE_CHAIN (imag) = next;
3159
 
3160
          /* Skip the newly created node.  */
3161
          p = TREE_CHAIN (p);
3162
        }
3163
    }
3164
 
3165
  return values;
3166
}
3167
 
3168
/* Traverse a list of TYPES and expand all complex types into their
3169
   components.  */
3170
static tree
3171
split_complex_types (tree types)
3172
{
3173
  tree p;
3174
 
3175
  /* Before allocating memory, check for the common case of no complex.  */
3176
  for (p = types; p; p = TREE_CHAIN (p))
3177
    {
3178
      tree type = TREE_VALUE (p);
3179
      if (TREE_CODE (type) == COMPLEX_TYPE
3180
          && targetm.calls.split_complex_arg (type))
3181
        goto found;
3182
    }
3183
  return types;
3184
 
3185
 found:
3186
  types = copy_list (types);
3187
 
3188
  for (p = types; p; p = TREE_CHAIN (p))
3189
    {
3190
      tree complex_type = TREE_VALUE (p);
3191
 
3192
      if (TREE_CODE (complex_type) == COMPLEX_TYPE
3193
          && targetm.calls.split_complex_arg (complex_type))
3194
        {
3195
          tree next, imag;
3196
 
3197
          /* Rewrite complex type with component type.  */
3198
          TREE_VALUE (p) = TREE_TYPE (complex_type);
3199
          next = TREE_CHAIN (p);
3200
 
3201
          /* Add another component type for the imaginary part.  */
3202
          imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3203
          TREE_CHAIN (p) = imag;
3204
          TREE_CHAIN (imag) = next;
3205
 
3206
          /* Skip the newly created node.  */
3207
          p = TREE_CHAIN (p);
3208
        }
3209
    }
3210
 
3211
  return types;
3212
}
3213
 
3214
/* Output a library call to function FUN (a SYMBOL_REF rtx).
3215
   The RETVAL parameter specifies whether return value needs to be saved, other
3216
   parameters are documented in the emit_library_call function below.  */
3217
 
3218
static rtx
3219
emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3220
                           enum libcall_type fn_type,
3221
                           enum machine_mode outmode, int nargs, va_list p)
3222
{
3223
  /* Total size in bytes of all the stack-parms scanned so far.  */
3224
  struct args_size args_size;
3225
  /* Size of arguments before any adjustments (such as rounding).  */
3226
  struct args_size original_args_size;
3227
  int argnum;
3228
  rtx fun;
3229
  int inc;
3230
  int count;
3231
  rtx argblock = 0;
3232
  CUMULATIVE_ARGS args_so_far;
3233
  struct arg
3234
  {
3235
    rtx value;
3236
    enum machine_mode mode;
3237
    rtx reg;
3238
    int partial;
3239
    struct locate_and_pad_arg_data locate;
3240
    rtx save_area;
3241
  };
3242
  struct arg *argvec;
3243
  int old_inhibit_defer_pop = inhibit_defer_pop;
3244
  rtx call_fusage = 0;
3245
  rtx mem_value = 0;
3246
  rtx valreg;
3247
  int pcc_struct_value = 0;
3248
  int struct_value_size = 0;
3249
  int flags;
3250
  int reg_parm_stack_space = 0;
3251
  int needed;
3252
  rtx before_call;
3253
  tree tfom;                    /* type_for_mode (outmode, 0) */
3254
 
3255
#ifdef REG_PARM_STACK_SPACE
3256
  /* Define the boundary of the register parm stack space that needs to be
3257
     save, if any.  */
3258
  int low_to_save, high_to_save;
3259
  rtx save_area = 0;            /* Place that it is saved.  */
3260
#endif
3261
 
3262
  /* Size of the stack reserved for parameter registers.  */
3263
  int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3264
  char *initial_stack_usage_map = stack_usage_map;
3265
  char *stack_usage_map_buf = NULL;
3266
 
3267
  rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3268
 
3269
#ifdef REG_PARM_STACK_SPACE
3270
  reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3271
#endif
3272
 
3273
  /* By default, library functions can not throw.  */
3274
  flags = ECF_NOTHROW;
3275
 
3276
  switch (fn_type)
3277
    {
3278
    case LCT_NORMAL:
3279
      break;
3280
    case LCT_CONST:
3281
      flags |= ECF_CONST;
3282
      break;
3283
    case LCT_PURE:
3284
      flags |= ECF_PURE;
3285
      break;
3286
    case LCT_CONST_MAKE_BLOCK:
3287
      flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3288
      break;
3289
    case LCT_PURE_MAKE_BLOCK:
3290
      flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3291
      break;
3292
    case LCT_NORETURN:
3293
      flags |= ECF_NORETURN;
3294
      break;
3295
    case LCT_THROW:
3296
      flags = ECF_NORETURN;
3297
      break;
3298
    case LCT_RETURNS_TWICE:
3299
      flags = ECF_RETURNS_TWICE;
3300
      break;
3301
    }
3302
  fun = orgfun;
3303
 
3304
  /* Ensure current function's preferred stack boundary is at least
3305
     what we need.  */
3306
  if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3307
    cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3308
 
3309
  /* If this kind of value comes back in memory,
3310
     decide where in memory it should come back.  */
3311
  if (outmode != VOIDmode)
3312
    {
3313
      tfom = lang_hooks.types.type_for_mode (outmode, 0);
3314
      if (aggregate_value_p (tfom, 0))
3315
        {
3316
#ifdef PCC_STATIC_STRUCT_RETURN
3317
          rtx pointer_reg
3318
            = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3319
          mem_value = gen_rtx_MEM (outmode, pointer_reg);
3320
          pcc_struct_value = 1;
3321
          if (value == 0)
3322
            value = gen_reg_rtx (outmode);
3323
#else /* not PCC_STATIC_STRUCT_RETURN */
3324
          struct_value_size = GET_MODE_SIZE (outmode);
3325
          if (value != 0 && MEM_P (value))
3326
            mem_value = value;
3327
          else
3328
            mem_value = assign_temp (tfom, 0, 1, 1);
3329
#endif
3330
          /* This call returns a big structure.  */
3331
          flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3332
        }
3333
    }
3334
  else
3335
    tfom = void_type_node;
3336
 
3337
  /* ??? Unfinished: must pass the memory address as an argument.  */
3338
 
3339
  /* Copy all the libcall-arguments out of the varargs data
3340
     and into a vector ARGVEC.
3341
 
3342
     Compute how to pass each argument.  We only support a very small subset
3343
     of the full argument passing conventions to limit complexity here since
3344
     library functions shouldn't have many args.  */
3345
 
3346
  argvec = alloca ((nargs + 1) * sizeof (struct arg));
3347
  memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3348
 
3349
#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3350
  INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3351
#else
3352
  INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3353
#endif
3354
 
3355
  args_size.constant = 0;
3356
  args_size.var = 0;
3357
 
3358
  count = 0;
3359
 
3360
  /* Now we are about to start emitting insns that can be deleted
3361
     if a libcall is deleted.  */
3362
  if (flags & ECF_LIBCALL_BLOCK)
3363
    start_sequence ();
3364
 
3365
  push_temp_slots ();
3366
 
3367
  /* If there's a structure value address to be passed,
3368
     either pass it in the special place, or pass it as an extra argument.  */
3369
  if (mem_value && struct_value == 0 && ! pcc_struct_value)
3370
    {
3371
      rtx addr = XEXP (mem_value, 0);
3372
 
3373
      nargs++;
3374
 
3375
      /* Make sure it is a reasonable operand for a move or push insn.  */
3376
      if (!REG_P (addr) && !MEM_P (addr)
3377
          && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3378
        addr = force_operand (addr, NULL_RTX);
3379
 
3380
      argvec[count].value = addr;
3381
      argvec[count].mode = Pmode;
3382
      argvec[count].partial = 0;
3383
 
3384
      argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3385
      gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3386
                                                   NULL_TREE, 1) == 0);
3387
 
3388
      locate_and_pad_parm (Pmode, NULL_TREE,
3389
#ifdef STACK_PARMS_IN_REG_PARM_AREA
3390
                           1,
3391
#else
3392
                           argvec[count].reg != 0,
3393
#endif
3394
                           0, NULL_TREE, &args_size, &argvec[count].locate);
3395
 
3396
      if (argvec[count].reg == 0 || argvec[count].partial != 0
3397
          || reg_parm_stack_space > 0)
3398
        args_size.constant += argvec[count].locate.size.constant;
3399
 
3400
      FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3401
 
3402
      count++;
3403
    }
3404
 
3405
  for (; count < nargs; count++)
3406
    {
3407
      rtx val = va_arg (p, rtx);
3408
      enum machine_mode mode = va_arg (p, enum machine_mode);
3409
 
3410
      /* We cannot convert the arg value to the mode the library wants here;
3411
         must do it earlier where we know the signedness of the arg.  */
3412
      gcc_assert (mode != BLKmode
3413
                  && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3414
 
3415
      /* Make sure it is a reasonable operand for a move or push insn.  */
3416
      if (!REG_P (val) && !MEM_P (val)
3417
          && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3418
        val = force_operand (val, NULL_RTX);
3419
 
3420
      if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3421
        {
3422
          rtx slot;
3423
          int must_copy
3424
            = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3425
 
3426
          /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3427
             functions, so we have to pretend this isn't such a function.  */
3428
          if (flags & ECF_LIBCALL_BLOCK)
3429
            {
3430
              rtx insns = get_insns ();
3431
              end_sequence ();
3432
              emit_insn (insns);
3433
            }
3434
          flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3435
 
3436
          /* If this was a CONST function, it is now PURE since
3437
             it now reads memory.  */
3438
          if (flags & ECF_CONST)
3439
            {
3440
              flags &= ~ECF_CONST;
3441
              flags |= ECF_PURE;
3442
            }
3443
 
3444
          if (GET_MODE (val) == MEM && !must_copy)
3445
            slot = val;
3446
          else
3447
            {
3448
              slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3449
                                  0, 1, 1);
3450
              emit_move_insn (slot, val);
3451
            }
3452
 
3453
          call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3454
                                           gen_rtx_USE (VOIDmode, slot),
3455
                                           call_fusage);
3456
          if (must_copy)
3457
            call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3458
                                             gen_rtx_CLOBBER (VOIDmode,
3459
                                                              slot),
3460
                                             call_fusage);
3461
 
3462
          mode = Pmode;
3463
          val = force_operand (XEXP (slot, 0), NULL_RTX);
3464
        }
3465
 
3466
      argvec[count].value = val;
3467
      argvec[count].mode = mode;
3468
 
3469
      argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3470
 
3471
      argvec[count].partial
3472
        = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3473
 
3474
      locate_and_pad_parm (mode, NULL_TREE,
3475
#ifdef STACK_PARMS_IN_REG_PARM_AREA
3476
                           1,
3477
#else
3478
                           argvec[count].reg != 0,
3479
#endif
3480
                           argvec[count].partial,
3481
                           NULL_TREE, &args_size, &argvec[count].locate);
3482
 
3483
      gcc_assert (!argvec[count].locate.size.var);
3484
 
3485
      if (argvec[count].reg == 0 || argvec[count].partial != 0
3486
          || reg_parm_stack_space > 0)
3487
        args_size.constant += argvec[count].locate.size.constant;
3488
 
3489
      FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3490
    }
3491
 
3492
  /* If this machine requires an external definition for library
3493
     functions, write one out.  */
3494
  assemble_external_libcall (fun);
3495
 
3496
  original_args_size = args_size;
3497
  args_size.constant = (((args_size.constant
3498
                          + stack_pointer_delta
3499
                          + STACK_BYTES - 1)
3500
                          / STACK_BYTES
3501
                          * STACK_BYTES)
3502
                         - stack_pointer_delta);
3503
 
3504
  args_size.constant = MAX (args_size.constant,
3505
                            reg_parm_stack_space);
3506
 
3507
#ifndef OUTGOING_REG_PARM_STACK_SPACE
3508
  args_size.constant -= reg_parm_stack_space;
3509
#endif
3510
 
3511
  if (args_size.constant > current_function_outgoing_args_size)
3512
    current_function_outgoing_args_size = args_size.constant;
3513
 
3514
  if (ACCUMULATE_OUTGOING_ARGS)
3515
    {
3516
      /* Since the stack pointer will never be pushed, it is possible for
3517
         the evaluation of a parm to clobber something we have already
3518
         written to the stack.  Since most function calls on RISC machines
3519
         do not use the stack, this is uncommon, but must work correctly.
3520
 
3521
         Therefore, we save any area of the stack that was already written
3522
         and that we are using.  Here we set up to do this by making a new
3523
         stack usage map from the old one.
3524
 
3525
         Another approach might be to try to reorder the argument
3526
         evaluations to avoid this conflicting stack usage.  */
3527
 
3528
      needed = args_size.constant;
3529
 
3530
#ifndef OUTGOING_REG_PARM_STACK_SPACE
3531
      /* Since we will be writing into the entire argument area, the
3532
         map must be allocated for its entire size, not just the part that
3533
         is the responsibility of the caller.  */
3534
      needed += reg_parm_stack_space;
3535
#endif
3536
 
3537
#ifdef ARGS_GROW_DOWNWARD
3538
      highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3539
                                         needed + 1);
3540
#else
3541
      highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3542
                                         needed);
3543
#endif
3544
      stack_usage_map_buf = xmalloc (highest_outgoing_arg_in_use);
3545
      stack_usage_map = stack_usage_map_buf;
3546
 
3547
      if (initial_highest_arg_in_use)
3548
        memcpy (stack_usage_map, initial_stack_usage_map,
3549
                initial_highest_arg_in_use);
3550
 
3551
      if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3552
        memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3553
               highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3554
      needed = 0;
3555
 
3556
      /* We must be careful to use virtual regs before they're instantiated,
3557
         and real regs afterwards.  Loop optimization, for example, can create
3558
         new libcalls after we've instantiated the virtual regs, and if we
3559
         use virtuals anyway, they won't match the rtl patterns.  */
3560
 
3561
      if (virtuals_instantiated)
3562
        argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3563
      else
3564
        argblock = virtual_outgoing_args_rtx;
3565
    }
3566
  else
3567
    {
3568
      if (!PUSH_ARGS)
3569
        argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3570
    }
3571
 
3572
  /* If we push args individually in reverse order, perform stack alignment
3573
     before the first push (the last arg).  */
3574
  if (argblock == 0 && PUSH_ARGS_REVERSED)
3575
    anti_adjust_stack (GEN_INT (args_size.constant
3576
                                - original_args_size.constant));
3577
 
3578
  if (PUSH_ARGS_REVERSED)
3579
    {
3580
      inc = -1;
3581
      argnum = nargs - 1;
3582
    }
3583
  else
3584
    {
3585
      inc = 1;
3586
      argnum = 0;
3587
    }
3588
 
3589
#ifdef REG_PARM_STACK_SPACE
3590
  if (ACCUMULATE_OUTGOING_ARGS)
3591
    {
3592
      /* The argument list is the property of the called routine and it
3593
         may clobber it.  If the fixed area has been used for previous
3594
         parameters, we must save and restore it.  */
3595
      save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3596
                                            &low_to_save, &high_to_save);
3597
    }
3598
#endif
3599
 
3600
  /* Push the args that need to be pushed.  */
3601
 
3602
  /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3603
     are to be pushed.  */
3604
  for (count = 0; count < nargs; count++, argnum += inc)
3605
    {
3606
      enum machine_mode mode = argvec[argnum].mode;
3607
      rtx val = argvec[argnum].value;
3608
      rtx reg = argvec[argnum].reg;
3609
      int partial = argvec[argnum].partial;
3610
      int lower_bound = 0, upper_bound = 0, i;
3611
 
3612
      if (! (reg != 0 && partial == 0))
3613
        {
3614
          if (ACCUMULATE_OUTGOING_ARGS)
3615
            {
3616
              /* If this is being stored into a pre-allocated, fixed-size,
3617
                 stack area, save any previous data at that location.  */
3618
 
3619
#ifdef ARGS_GROW_DOWNWARD
3620
              /* stack_slot is negative, but we want to index stack_usage_map
3621
                 with positive values.  */
3622
              upper_bound = -argvec[argnum].locate.offset.constant + 1;
3623
              lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3624
#else
3625
              lower_bound = argvec[argnum].locate.offset.constant;
3626
              upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3627
#endif
3628
 
3629
              i = lower_bound;
3630
              /* Don't worry about things in the fixed argument area;
3631
                 it has already been saved.  */
3632
              if (i < reg_parm_stack_space)
3633
                i = reg_parm_stack_space;
3634
              while (i < upper_bound && stack_usage_map[i] == 0)
3635
                i++;
3636
 
3637
              if (i < upper_bound)
3638
                {
3639
                  /* We need to make a save area.  */
3640
                  unsigned int size
3641
                    = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3642
                  enum machine_mode save_mode
3643
                    = mode_for_size (size, MODE_INT, 1);
3644
                  rtx adr
3645
                    = plus_constant (argblock,
3646
                                     argvec[argnum].locate.offset.constant);
3647
                  rtx stack_area
3648
                    = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3649
 
3650
                  if (save_mode == BLKmode)
3651
                    {
3652
                      argvec[argnum].save_area
3653
                        = assign_stack_temp (BLKmode,
3654
                                             argvec[argnum].locate.size.constant,
3655
                                             0);
3656
 
3657
                      emit_block_move (validize_mem (argvec[argnum].save_area),
3658
                                       stack_area,
3659
                                       GEN_INT (argvec[argnum].locate.size.constant),
3660
                                       BLOCK_OP_CALL_PARM);
3661
                    }
3662
                  else
3663
                    {
3664
                      argvec[argnum].save_area = gen_reg_rtx (save_mode);
3665
 
3666
                      emit_move_insn (argvec[argnum].save_area, stack_area);
3667
                    }
3668
                }
3669
            }
3670
 
3671
          emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3672
                          partial, reg, 0, argblock,
3673
                          GEN_INT (argvec[argnum].locate.offset.constant),
3674
                          reg_parm_stack_space,
3675
                          ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3676
 
3677
          /* Now mark the segment we just used.  */
3678
          if (ACCUMULATE_OUTGOING_ARGS)
3679
            for (i = lower_bound; i < upper_bound; i++)
3680
              stack_usage_map[i] = 1;
3681
 
3682
          NO_DEFER_POP;
3683
 
3684
          if (flags & ECF_CONST)
3685
            {
3686
              rtx use;
3687
 
3688
              /* Indicate argument access so that alias.c knows that these
3689
                 values are live.  */
3690
              if (argblock)
3691
                use = plus_constant (argblock,
3692
                                     argvec[argnum].locate.offset.constant);
3693
              else
3694
                /* When arguments are pushed, trying to tell alias.c where
3695
                   exactly this argument is won't work, because the
3696
                   auto-increment causes confusion.  So we merely indicate
3697
                   that we access something with a known mode somewhere on
3698
                   the stack.  */
3699
                use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3700
                                    gen_rtx_SCRATCH (Pmode));
3701
              use = gen_rtx_MEM (argvec[argnum].mode, use);
3702
              use = gen_rtx_USE (VOIDmode, use);
3703
              call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3704
            }
3705
        }
3706
    }
3707
 
3708
  /* If we pushed args in forward order, perform stack alignment
3709
     after pushing the last arg.  */
3710
  if (argblock == 0 && !PUSH_ARGS_REVERSED)
3711
    anti_adjust_stack (GEN_INT (args_size.constant
3712
                                - original_args_size.constant));
3713
 
3714
  if (PUSH_ARGS_REVERSED)
3715
    argnum = nargs - 1;
3716
  else
3717
    argnum = 0;
3718
 
3719
  fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3720
 
3721
  /* Now load any reg parms into their regs.  */
3722
 
3723
  /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3724
     are to be pushed.  */
3725
  for (count = 0; count < nargs; count++, argnum += inc)
3726
    {
3727
      enum machine_mode mode = argvec[argnum].mode;
3728
      rtx val = argvec[argnum].value;
3729
      rtx reg = argvec[argnum].reg;
3730
      int partial = argvec[argnum].partial;
3731
 
3732
      /* Handle calls that pass values in multiple non-contiguous
3733
         locations.  The PA64 has examples of this for library calls.  */
3734
      if (reg != 0 && GET_CODE (reg) == PARALLEL)
3735
        emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3736
      else if (reg != 0 && partial == 0)
3737
        emit_move_insn (reg, val);
3738
 
3739
      NO_DEFER_POP;
3740
    }
3741
 
3742
  /* Any regs containing parms remain in use through the call.  */
3743
  for (count = 0; count < nargs; count++)
3744
    {
3745
      rtx reg = argvec[count].reg;
3746
      if (reg != 0 && GET_CODE (reg) == PARALLEL)
3747
        use_group_regs (&call_fusage, reg);
3748
      else if (reg != 0)
3749
        use_reg (&call_fusage, reg);
3750
    }
3751
 
3752
  /* Pass the function the address in which to return a structure value.  */
3753
  if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3754
    {
3755
      emit_move_insn (struct_value,
3756
                      force_reg (Pmode,
3757
                                 force_operand (XEXP (mem_value, 0),
3758
                                                NULL_RTX)));
3759
      if (REG_P (struct_value))
3760
        use_reg (&call_fusage, struct_value);
3761
    }
3762
 
3763
  /* Don't allow popping to be deferred, since then
3764
     cse'ing of library calls could delete a call and leave the pop.  */
3765
  NO_DEFER_POP;
3766
  valreg = (mem_value == 0 && outmode != VOIDmode
3767
            ? hard_libcall_value (outmode) : NULL_RTX);
3768
 
3769
  /* Stack must be properly aligned now.  */
3770
  gcc_assert (!(stack_pointer_delta
3771
                & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3772
 
3773
  before_call = get_last_insn ();
3774
 
3775
  /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3776
     will set inhibit_defer_pop to that value.  */
3777
  /* The return type is needed to decide how many bytes the function pops.
3778
     Signedness plays no role in that, so for simplicity, we pretend it's
3779
     always signed.  We also assume that the list of arguments passed has
3780
     no impact, so we pretend it is unknown.  */
3781
 
3782
  emit_call_1 (fun, NULL,
3783
               get_identifier (XSTR (orgfun, 0)),
3784
               build_function_type (tfom, NULL_TREE),
3785
               original_args_size.constant, args_size.constant,
3786
               struct_value_size,
3787
               FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3788
               valreg,
3789
               old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3790
 
3791
  /* For calls to `setjmp', etc., inform flow.c it should complain
3792
     if nonvolatile values are live.  For functions that cannot return,
3793
     inform flow that control does not fall through.  */
3794
 
3795
  if (flags & ECF_NORETURN)
3796
    {
3797
      /* The barrier note must be emitted
3798
         immediately after the CALL_INSN.  Some ports emit more than
3799
         just a CALL_INSN above, so we must search for it here.  */
3800
 
3801
      rtx last = get_last_insn ();
3802
      while (!CALL_P (last))
3803
        {
3804
          last = PREV_INSN (last);
3805
          /* There was no CALL_INSN?  */
3806
          gcc_assert (last != before_call);
3807
        }
3808
 
3809
      emit_barrier_after (last);
3810
    }
3811
 
3812
  /* Now restore inhibit_defer_pop to its actual original value.  */
3813
  OK_DEFER_POP;
3814
 
3815
  /* If call is cse'able, make appropriate pair of reg-notes around it.
3816
     Test valreg so we don't crash; may safely ignore `const'
3817
     if return type is void.  Disable for PARALLEL return values, because
3818
     we have no way to move such values into a pseudo register.  */
3819
  if (flags & ECF_LIBCALL_BLOCK)
3820
    {
3821
      rtx insns;
3822
 
3823
      if (valreg == 0)
3824
        {
3825
          insns = get_insns ();
3826
          end_sequence ();
3827
          emit_insn (insns);
3828
        }
3829
      else
3830
        {
3831
          rtx note = 0;
3832
          rtx temp;
3833
          int i;
3834
 
3835
          if (GET_CODE (valreg) == PARALLEL)
3836
            {
3837
              temp = gen_reg_rtx (outmode);
3838
              emit_group_store (temp, valreg, NULL_TREE,
3839
                                GET_MODE_SIZE (outmode));
3840
              valreg = temp;
3841
            }
3842
 
3843
          temp = gen_reg_rtx (GET_MODE (valreg));
3844
 
3845
          /* Construct an "equal form" for the value which mentions all the
3846
             arguments in order as well as the function name.  */
3847
          for (i = 0; i < nargs; i++)
3848
            note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3849
          note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3850
 
3851
          insns = get_insns ();
3852
          end_sequence ();
3853
 
3854
          if (flags & ECF_PURE)
3855
            note = gen_rtx_EXPR_LIST (VOIDmode,
3856
                        gen_rtx_USE (VOIDmode,
3857
                                     gen_rtx_MEM (BLKmode,
3858
                                                  gen_rtx_SCRATCH (VOIDmode))),
3859
                        note);
3860
 
3861
          emit_libcall_block (insns, temp, valreg, note);
3862
 
3863
          valreg = temp;
3864
        }
3865
    }
3866
  pop_temp_slots ();
3867
 
3868
  /* Copy the value to the right place.  */
3869
  if (outmode != VOIDmode && retval)
3870
    {
3871
      if (mem_value)
3872
        {
3873
          if (value == 0)
3874
            value = mem_value;
3875
          if (value != mem_value)
3876
            emit_move_insn (value, mem_value);
3877
        }
3878
      else if (GET_CODE (valreg) == PARALLEL)
3879
        {
3880
          if (value == 0)
3881
            value = gen_reg_rtx (outmode);
3882
          emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3883
        }
3884
      else if (value != 0)
3885
        emit_move_insn (value, valreg);
3886
      else
3887
        value = valreg;
3888
    }
3889
 
3890
  if (ACCUMULATE_OUTGOING_ARGS)
3891
    {
3892
#ifdef REG_PARM_STACK_SPACE
3893
      if (save_area)
3894
        restore_fixed_argument_area (save_area, argblock,
3895
                                     high_to_save, low_to_save);
3896
#endif
3897
 
3898
      /* If we saved any argument areas, restore them.  */
3899
      for (count = 0; count < nargs; count++)
3900
        if (argvec[count].save_area)
3901
          {
3902
            enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3903
            rtx adr = plus_constant (argblock,
3904
                                     argvec[count].locate.offset.constant);
3905
            rtx stack_area = gen_rtx_MEM (save_mode,
3906
                                          memory_address (save_mode, adr));
3907
 
3908
            if (save_mode == BLKmode)
3909
              emit_block_move (stack_area,
3910
                               validize_mem (argvec[count].save_area),
3911
                               GEN_INT (argvec[count].locate.size.constant),
3912
                               BLOCK_OP_CALL_PARM);
3913
            else
3914
              emit_move_insn (stack_area, argvec[count].save_area);
3915
          }
3916
 
3917
      highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3918
      stack_usage_map = initial_stack_usage_map;
3919
    }
3920
 
3921
  if (stack_usage_map_buf)
3922
    free (stack_usage_map_buf);
3923
 
3924
  return value;
3925
 
3926
}
3927
 
3928
/* Output a library call to function FUN (a SYMBOL_REF rtx)
3929
   (emitting the queue unless NO_QUEUE is nonzero),
3930
   for a value of mode OUTMODE,
3931
   with NARGS different arguments, passed as alternating rtx values
3932
   and machine_modes to convert them to.
3933
 
3934
   FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
3935
   calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
3936
   which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
3937
   LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
3938
   REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
3939
   or other LCT_ value for other types of library calls.  */
3940
 
3941
void
3942
emit_library_call (rtx orgfun, enum libcall_type fn_type,
3943
                   enum machine_mode outmode, int nargs, ...)
3944
{
3945
  va_list p;
3946
 
3947
  va_start (p, nargs);
3948
  emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
3949
  va_end (p);
3950
}
3951
 
3952
/* Like emit_library_call except that an extra argument, VALUE,
3953
   comes second and says where to store the result.
3954
   (If VALUE is zero, this function chooses a convenient way
3955
   to return the value.
3956
 
3957
   This function returns an rtx for where the value is to be found.
3958
   If VALUE is nonzero, VALUE is returned.  */
3959
 
3960
rtx
3961
emit_library_call_value (rtx orgfun, rtx value,
3962
                         enum libcall_type fn_type,
3963
                         enum machine_mode outmode, int nargs, ...)
3964
{
3965
  rtx result;
3966
  va_list p;
3967
 
3968
  va_start (p, nargs);
3969
  result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
3970
                                      nargs, p);
3971
  va_end (p);
3972
 
3973
  return result;
3974
}
3975
 
3976
/* Store a single argument for a function call
3977
   into the register or memory area where it must be passed.
3978
   *ARG describes the argument value and where to pass it.
3979
 
3980
   ARGBLOCK is the address of the stack-block for all the arguments,
3981
   or 0 on a machine where arguments are pushed individually.
3982
 
3983
   MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3984
   so must be careful about how the stack is used.
3985
 
3986
   VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3987
   argument stack.  This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3988
   that we need not worry about saving and restoring the stack.
3989
 
3990
   FNDECL is the declaration of the function we are calling.
3991
 
3992
   Return nonzero if this arg should cause sibcall failure,
3993
   zero otherwise.  */
3994
 
3995
static int
3996
store_one_arg (struct arg_data *arg, rtx argblock, int flags,
3997
               int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
3998
{
3999
  tree pval = arg->tree_value;
4000
  rtx reg = 0;
4001
  int partial = 0;
4002
  int used = 0;
4003
  int i, lower_bound = 0, upper_bound = 0;
4004
  int sibcall_failure = 0;
4005
 
4006
  if (TREE_CODE (pval) == ERROR_MARK)
4007
    return 1;
4008
 
4009
  /* Push a new temporary level for any temporaries we make for
4010
     this argument.  */
4011
  push_temp_slots ();
4012
 
4013
  if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4014
    {
4015
      /* If this is being stored into a pre-allocated, fixed-size, stack area,
4016
         save any previous data at that location.  */
4017
      if (argblock && ! variable_size && arg->stack)
4018
        {
4019
#ifdef ARGS_GROW_DOWNWARD
4020
          /* stack_slot is negative, but we want to index stack_usage_map
4021
             with positive values.  */
4022
          if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4023
            upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4024
          else
4025
            upper_bound = 0;
4026
 
4027
          lower_bound = upper_bound - arg->locate.size.constant;
4028
#else
4029
          if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4030
            lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4031
          else
4032
            lower_bound = 0;
4033
 
4034
          upper_bound = lower_bound + arg->locate.size.constant;
4035
#endif
4036
 
4037
          i = lower_bound;
4038
          /* Don't worry about things in the fixed argument area;
4039
             it has already been saved.  */
4040
          if (i < reg_parm_stack_space)
4041
            i = reg_parm_stack_space;
4042
          while (i < upper_bound && stack_usage_map[i] == 0)
4043
            i++;
4044
 
4045
          if (i < upper_bound)
4046
            {
4047
              /* We need to make a save area.  */
4048
              unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4049
              enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4050
              rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4051
              rtx stack_area = gen_rtx_MEM (save_mode, adr);
4052
 
4053
              if (save_mode == BLKmode)
4054
                {
4055
                  tree ot = TREE_TYPE (arg->tree_value);
4056
                  tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4057
                                                       | TYPE_QUAL_CONST));
4058
 
4059
                  arg->save_area = assign_temp (nt, 0, 1, 1);
4060
                  preserve_temp_slots (arg->save_area);
4061
                  emit_block_move (validize_mem (arg->save_area), stack_area,
4062
                                   expr_size (arg->tree_value),
4063
                                   BLOCK_OP_CALL_PARM);
4064
                }
4065
              else
4066
                {
4067
                  arg->save_area = gen_reg_rtx (save_mode);
4068
                  emit_move_insn (arg->save_area, stack_area);
4069
                }
4070
            }
4071
        }
4072
    }
4073
 
4074
  /* If this isn't going to be placed on both the stack and in registers,
4075
     set up the register and number of words.  */
4076
  if (! arg->pass_on_stack)
4077
    {
4078
      if (flags & ECF_SIBCALL)
4079
        reg = arg->tail_call_reg;
4080
      else
4081
        reg = arg->reg;
4082
      partial = arg->partial;
4083
    }
4084
 
4085
  /* Being passed entirely in a register.  We shouldn't be called in
4086
     this case.  */
4087
  gcc_assert (reg == 0 || partial != 0);
4088
 
4089
  /* If this arg needs special alignment, don't load the registers
4090
     here.  */
4091
  if (arg->n_aligned_regs != 0)
4092
    reg = 0;
4093
 
4094
  /* If this is being passed partially in a register, we can't evaluate
4095
     it directly into its stack slot.  Otherwise, we can.  */
4096
  if (arg->value == 0)
4097
    {
4098
      /* stack_arg_under_construction is nonzero if a function argument is
4099
         being evaluated directly into the outgoing argument list and
4100
         expand_call must take special action to preserve the argument list
4101
         if it is called recursively.
4102
 
4103
         For scalar function arguments stack_usage_map is sufficient to
4104
         determine which stack slots must be saved and restored.  Scalar
4105
         arguments in general have pass_on_stack == 0.
4106
 
4107
         If this argument is initialized by a function which takes the
4108
         address of the argument (a C++ constructor or a C function
4109
         returning a BLKmode structure), then stack_usage_map is
4110
         insufficient and expand_call must push the stack around the
4111
         function call.  Such arguments have pass_on_stack == 1.
4112
 
4113
         Note that it is always safe to set stack_arg_under_construction,
4114
         but this generates suboptimal code if set when not needed.  */
4115
 
4116
      if (arg->pass_on_stack)
4117
        stack_arg_under_construction++;
4118
 
4119
      arg->value = expand_expr (pval,
4120
                                (partial
4121
                                 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4122
                                ? NULL_RTX : arg->stack,
4123
                                VOIDmode, EXPAND_STACK_PARM);
4124
 
4125
      /* If we are promoting object (or for any other reason) the mode
4126
         doesn't agree, convert the mode.  */
4127
 
4128
      if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4129
        arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4130
                                    arg->value, arg->unsignedp);
4131
 
4132
      if (arg->pass_on_stack)
4133
        stack_arg_under_construction--;
4134
    }
4135
 
4136
  /* Check for overlap with already clobbered argument area.  */
4137
  if ((flags & ECF_SIBCALL)
4138
      && MEM_P (arg->value)
4139
      && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4140
                                               arg->locate.size.constant))
4141
    sibcall_failure = 1;
4142
 
4143
  /* Don't allow anything left on stack from computation
4144
     of argument to alloca.  */
4145
  if (flags & ECF_MAY_BE_ALLOCA)
4146
    do_pending_stack_adjust ();
4147
 
4148
  if (arg->value == arg->stack)
4149
    /* If the value is already in the stack slot, we are done.  */
4150
    ;
4151
  else if (arg->mode != BLKmode)
4152
    {
4153
      int size;
4154
 
4155
      /* Argument is a scalar, not entirely passed in registers.
4156
         (If part is passed in registers, arg->partial says how much
4157
         and emit_push_insn will take care of putting it there.)
4158
 
4159
         Push it, and if its size is less than the
4160
         amount of space allocated to it,
4161
         also bump stack pointer by the additional space.
4162
         Note that in C the default argument promotions
4163
         will prevent such mismatches.  */
4164
 
4165
      size = GET_MODE_SIZE (arg->mode);
4166
      /* Compute how much space the push instruction will push.
4167
         On many machines, pushing a byte will advance the stack
4168
         pointer by a halfword.  */
4169
#ifdef PUSH_ROUNDING
4170
      size = PUSH_ROUNDING (size);
4171
#endif
4172
      used = size;
4173
 
4174
      /* Compute how much space the argument should get:
4175
         round up to a multiple of the alignment for arguments.  */
4176
      if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4177
        used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4178
                 / (PARM_BOUNDARY / BITS_PER_UNIT))
4179
                * (PARM_BOUNDARY / BITS_PER_UNIT));
4180
 
4181
      /* This isn't already where we want it on the stack, so put it there.
4182
         This can either be done with push or copy insns.  */
4183
      emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4184
                      PARM_BOUNDARY, partial, reg, used - size, argblock,
4185
                      ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4186
                      ARGS_SIZE_RTX (arg->locate.alignment_pad));
4187
 
4188
      /* Unless this is a partially-in-register argument, the argument is now
4189
         in the stack.  */
4190
      if (partial == 0)
4191
        arg->value = arg->stack;
4192
    }
4193
  else
4194
    {
4195
      /* BLKmode, at least partly to be pushed.  */
4196
 
4197
      unsigned int parm_align;
4198
      int excess;
4199
      rtx size_rtx;
4200
 
4201
      /* Pushing a nonscalar.
4202
         If part is passed in registers, PARTIAL says how much
4203
         and emit_push_insn will take care of putting it there.  */
4204
 
4205
      /* Round its size up to a multiple
4206
         of the allocation unit for arguments.  */
4207
 
4208
      if (arg->locate.size.var != 0)
4209
        {
4210
          excess = 0;
4211
          size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4212
        }
4213
      else
4214
        {
4215
          /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4216
             for BLKmode is careful to avoid it.  */
4217
          excess = (arg->locate.size.constant
4218
                    - int_size_in_bytes (TREE_TYPE (pval))
4219
                    + partial);
4220
          size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4221
                                  NULL_RTX, TYPE_MODE (sizetype), 0);
4222
        }
4223
 
4224
      parm_align = arg->locate.boundary;
4225
 
4226
      /* When an argument is padded down, the block is aligned to
4227
         PARM_BOUNDARY, but the actual argument isn't.  */
4228
      if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4229
        {
4230
          if (arg->locate.size.var)
4231
            parm_align = BITS_PER_UNIT;
4232
          else if (excess)
4233
            {
4234
              unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4235
              parm_align = MIN (parm_align, excess_align);
4236
            }
4237
        }
4238
 
4239
      if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4240
        {
4241
          /* emit_push_insn might not work properly if arg->value and
4242
             argblock + arg->locate.offset areas overlap.  */
4243
          rtx x = arg->value;
4244
          int i = 0;
4245
 
4246
          if (XEXP (x, 0) == current_function_internal_arg_pointer
4247
              || (GET_CODE (XEXP (x, 0)) == PLUS
4248
                  && XEXP (XEXP (x, 0), 0) ==
4249
                     current_function_internal_arg_pointer
4250
                  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4251
            {
4252
              if (XEXP (x, 0) != current_function_internal_arg_pointer)
4253
                i = INTVAL (XEXP (XEXP (x, 0), 1));
4254
 
4255
              /* expand_call should ensure this.  */
4256
              gcc_assert (!arg->locate.offset.var
4257
                          && GET_CODE (size_rtx) == CONST_INT);
4258
 
4259
              if (arg->locate.offset.constant > i)
4260
                {
4261
                  if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4262
                    sibcall_failure = 1;
4263
                }
4264
              else if (arg->locate.offset.constant < i)
4265
                {
4266
                  if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4267
                    sibcall_failure = 1;
4268
                }
4269
            }
4270
        }
4271
 
4272
      emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4273
                      parm_align, partial, reg, excess, argblock,
4274
                      ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4275
                      ARGS_SIZE_RTX (arg->locate.alignment_pad));
4276
 
4277
      /* Unless this is a partially-in-register argument, the argument is now
4278
         in the stack.
4279
 
4280
         ??? Unlike the case above, in which we want the actual
4281
         address of the data, so that we can load it directly into a
4282
         register, here we want the address of the stack slot, so that
4283
         it's properly aligned for word-by-word copying or something
4284
         like that.  It's not clear that this is always correct.  */
4285
      if (partial == 0)
4286
        arg->value = arg->stack_slot;
4287
    }
4288
 
4289
  if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4290
    {
4291
      tree type = TREE_TYPE (arg->tree_value);
4292
      arg->parallel_value
4293
        = emit_group_load_into_temps (arg->reg, arg->value, type,
4294
                                      int_size_in_bytes (type));
4295
    }
4296
 
4297
  /* Mark all slots this store used.  */
4298
  if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4299
      && argblock && ! variable_size && arg->stack)
4300
    for (i = lower_bound; i < upper_bound; i++)
4301
      stack_usage_map[i] = 1;
4302
 
4303
  /* Once we have pushed something, pops can't safely
4304
     be deferred during the rest of the arguments.  */
4305
  NO_DEFER_POP;
4306
 
4307
  /* Free any temporary slots made in processing this argument.  Show
4308
     that we might have taken the address of something and pushed that
4309
     as an operand.  */
4310
  preserve_temp_slots (NULL_RTX);
4311
  free_temp_slots ();
4312
  pop_temp_slots ();
4313
 
4314
  return sibcall_failure;
4315
}
4316
 
4317
/* Nonzero if we do not know how to pass TYPE solely in registers.  */
4318
 
4319
bool
4320
must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4321
                             tree type)
4322
{
4323
  if (!type)
4324
    return false;
4325
 
4326
  /* If the type has variable size...  */
4327
  if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4328
    return true;
4329
 
4330
  /* If the type is marked as addressable (it is required
4331
     to be constructed into the stack)...  */
4332
  if (TREE_ADDRESSABLE (type))
4333
    return true;
4334
 
4335
  return false;
4336
}
4337
 
4338
/* Another version of the TARGET_MUST_PASS_IN_STACK hook.  This one
4339
   takes trailing padding of a structure into account.  */
4340
/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING.  */
4341
 
4342
bool
4343
must_pass_in_stack_var_size_or_pad (enum machine_mode mode, tree type)
4344
{
4345
  if (!type)
4346
    return false;
4347
 
4348
  /* If the type has variable size...  */
4349
  if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4350
    return true;
4351
 
4352
  /* If the type is marked as addressable (it is required
4353
     to be constructed into the stack)...  */
4354
  if (TREE_ADDRESSABLE (type))
4355
    return true;
4356
 
4357
  /* If the padding and mode of the type is such that a copy into
4358
     a register would put it into the wrong part of the register.  */
4359
  if (mode == BLKmode
4360
      && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4361
      && (FUNCTION_ARG_PADDING (mode, type)
4362
          == (BYTES_BIG_ENDIAN ? upward : downward)))
4363
    return true;
4364
 
4365
  return false;
4366
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.