| 1 | 684 | jeremybenn | /* Convert function calls to rtl insns, for GNU C compiler.
 | 
      
         | 2 |  |  |    Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
 | 
      
         | 3 |  |  |    1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
 | 
      
         | 4 |  |  |    2011, 2012 Free Software Foundation, Inc.
 | 
      
         | 5 |  |  |  
 | 
      
         | 6 |  |  | This file is part of GCC.
 | 
      
         | 7 |  |  |  
 | 
      
         | 8 |  |  | GCC is free software; you can redistribute it and/or modify it under
 | 
      
         | 9 |  |  | the terms of the GNU General Public License as published by the Free
 | 
      
         | 10 |  |  | Software Foundation; either version 3, or (at your option) any later
 | 
      
         | 11 |  |  | version.
 | 
      
         | 12 |  |  |  
 | 
      
         | 13 |  |  | GCC is distributed in the hope that it will be useful, but WITHOUT ANY
 | 
      
         | 14 |  |  | WARRANTY; without even the implied warranty of MERCHANTABILITY or
 | 
      
         | 15 |  |  | FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 | 
      
         | 16 |  |  | for more details.
 | 
      
         | 17 |  |  |  
 | 
      
         | 18 |  |  | You should have received a copy of the GNU General Public License
 | 
      
         | 19 |  |  | along with GCC; see the file COPYING3.  If not see
 | 
      
         | 20 |  |  | <http://www.gnu.org/licenses/>.  */
 | 
      
         | 21 |  |  |  
 | 
      
         | 22 |  |  | #include "config.h"
 | 
      
         | 23 |  |  | #include "system.h"
 | 
      
         | 24 |  |  | #include "coretypes.h"
 | 
      
         | 25 |  |  | #include "tm.h"
 | 
      
         | 26 |  |  | #include "rtl.h"
 | 
      
         | 27 |  |  | #include "tree.h"
 | 
      
         | 28 |  |  | #include "gimple.h"
 | 
      
         | 29 |  |  | #include "flags.h"
 | 
      
         | 30 |  |  | #include "expr.h"
 | 
      
         | 31 |  |  | #include "optabs.h"
 | 
      
         | 32 |  |  | #include "libfuncs.h"
 | 
      
         | 33 |  |  | #include "function.h"
 | 
      
         | 34 |  |  | #include "regs.h"
 | 
      
         | 35 |  |  | #include "diagnostic-core.h"
 | 
      
         | 36 |  |  | #include "output.h"
 | 
      
         | 37 |  |  | #include "tm_p.h"
 | 
      
         | 38 |  |  | #include "timevar.h"
 | 
      
         | 39 |  |  | #include "sbitmap.h"
 | 
      
         | 40 |  |  | #include "langhooks.h"
 | 
      
         | 41 |  |  | #include "target.h"
 | 
      
         | 42 |  |  | #include "cgraph.h"
 | 
      
         | 43 |  |  | #include "except.h"
 | 
      
         | 44 |  |  | #include "dbgcnt.h"
 | 
      
         | 45 |  |  | #include "tree-flow.h"
 | 
      
         | 46 |  |  |  
 | 
      
         | 47 |  |  | /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits.  */
 | 
      
         | 48 |  |  | #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
 | 
      
         | 49 |  |  |  
 | 
      
         | 50 |  |  | /* Data structure and subroutines used within expand_call.  */
 | 
      
         | 51 |  |  |  
 | 
      
         | 52 |  |  | struct arg_data
 | 
      
         | 53 |  |  | {
 | 
      
         | 54 |  |  |   /* Tree node for this argument.  */
 | 
      
         | 55 |  |  |   tree tree_value;
 | 
      
         | 56 |  |  |   /* Mode for value; TYPE_MODE unless promoted.  */
 | 
      
         | 57 |  |  |   enum machine_mode mode;
 | 
      
         | 58 |  |  |   /* Current RTL value for argument, or 0 if it isn't precomputed.  */
 | 
      
         | 59 |  |  |   rtx value;
 | 
      
         | 60 |  |  |   /* Initially-compute RTL value for argument; only for const functions.  */
 | 
      
         | 61 |  |  |   rtx initial_value;
 | 
      
         | 62 |  |  |   /* Register to pass this argument in, 0 if passed on stack, or an
 | 
      
         | 63 |  |  |      PARALLEL if the arg is to be copied into multiple non-contiguous
 | 
      
         | 64 |  |  |      registers.  */
 | 
      
         | 65 |  |  |   rtx reg;
 | 
      
         | 66 |  |  |   /* Register to pass this argument in when generating tail call sequence.
 | 
      
         | 67 |  |  |      This is not the same register as for normal calls on machines with
 | 
      
         | 68 |  |  |      register windows.  */
 | 
      
         | 69 |  |  |   rtx tail_call_reg;
 | 
      
         | 70 |  |  |   /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
 | 
      
         | 71 |  |  |      form for emit_group_move.  */
 | 
      
         | 72 |  |  |   rtx parallel_value;
 | 
      
         | 73 |  |  |   /* If REG was promoted from the actual mode of the argument expression,
 | 
      
         | 74 |  |  |      indicates whether the promotion is sign- or zero-extended.  */
 | 
      
         | 75 |  |  |   int unsignedp;
 | 
      
         | 76 |  |  |   /* Number of bytes to put in registers.  0 means put the whole arg
 | 
      
         | 77 |  |  |      in registers.  Also 0 if not passed in registers.  */
 | 
      
         | 78 |  |  |   int partial;
 | 
      
         | 79 |  |  |   /* Nonzero if argument must be passed on stack.
 | 
      
         | 80 |  |  |      Note that some arguments may be passed on the stack
 | 
      
         | 81 |  |  |      even though pass_on_stack is zero, just because FUNCTION_ARG says so.
 | 
      
         | 82 |  |  |      pass_on_stack identifies arguments that *cannot* go in registers.  */
 | 
      
         | 83 |  |  |   int pass_on_stack;
 | 
      
         | 84 |  |  |   /* Some fields packaged up for locate_and_pad_parm.  */
 | 
      
         | 85 |  |  |   struct locate_and_pad_arg_data locate;
 | 
      
         | 86 |  |  |   /* Location on the stack at which parameter should be stored.  The store
 | 
      
         | 87 |  |  |      has already been done if STACK == VALUE.  */
 | 
      
         | 88 |  |  |   rtx stack;
 | 
      
         | 89 |  |  |   /* Location on the stack of the start of this argument slot.  This can
 | 
      
         | 90 |  |  |      differ from STACK if this arg pads downward.  This location is known
 | 
      
         | 91 |  |  |      to be aligned to TARGET_FUNCTION_ARG_BOUNDARY.  */
 | 
      
         | 92 |  |  |   rtx stack_slot;
 | 
      
         | 93 |  |  |   /* Place that this stack area has been saved, if needed.  */
 | 
      
         | 94 |  |  |   rtx save_area;
 | 
      
         | 95 |  |  |   /* If an argument's alignment does not permit direct copying into registers,
 | 
      
         | 96 |  |  |      copy in smaller-sized pieces into pseudos.  These are stored in a
 | 
      
         | 97 |  |  |      block pointed to by this field.  The next field says how many
 | 
      
         | 98 |  |  |      word-sized pseudos we made.  */
 | 
      
         | 99 |  |  |   rtx *aligned_regs;
 | 
      
         | 100 |  |  |   int n_aligned_regs;
 | 
      
         | 101 |  |  | };
 | 
      
         | 102 |  |  |  
 | 
      
         | 103 |  |  | /* A vector of one char per byte of stack space.  A byte if nonzero if
 | 
      
         | 104 |  |  |    the corresponding stack location has been used.
 | 
      
         | 105 |  |  |    This vector is used to prevent a function call within an argument from
 | 
      
         | 106 |  |  |    clobbering any stack already set up.  */
 | 
      
         | 107 |  |  | static char *stack_usage_map;
 | 
      
         | 108 |  |  |  
 | 
      
         | 109 |  |  | /* Size of STACK_USAGE_MAP.  */
 | 
      
         | 110 |  |  | static int highest_outgoing_arg_in_use;
 | 
      
         | 111 |  |  |  
 | 
      
         | 112 |  |  | /* A bitmap of virtual-incoming stack space.  Bit is set if the corresponding
 | 
      
         | 113 |  |  |    stack location's tail call argument has been already stored into the stack.
 | 
      
         | 114 |  |  |    This bitmap is used to prevent sibling call optimization if function tries
 | 
      
         | 115 |  |  |    to use parent's incoming argument slots when they have been already
 | 
      
         | 116 |  |  |    overwritten with tail call arguments.  */
 | 
      
         | 117 |  |  | static sbitmap stored_args_map;
 | 
      
         | 118 |  |  |  
 | 
      
         | 119 |  |  | /* stack_arg_under_construction is nonzero when an argument may be
 | 
      
         | 120 |  |  |    initialized with a constructor call (including a C function that
 | 
      
         | 121 |  |  |    returns a BLKmode struct) and expand_call must take special action
 | 
      
         | 122 |  |  |    to make sure the object being constructed does not overlap the
 | 
      
         | 123 |  |  |    argument list for the constructor call.  */
 | 
      
         | 124 |  |  | static int stack_arg_under_construction;
 | 
      
         | 125 |  |  |  
 | 
      
         | 126 |  |  | static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
 | 
      
         | 127 |  |  |                          HOST_WIDE_INT, rtx, rtx, int, rtx, int,
 | 
      
         | 128 |  |  |                          cumulative_args_t);
 | 
      
         | 129 |  |  | static void precompute_register_parameters (int, struct arg_data *, int *);
 | 
      
         | 130 |  |  | static int store_one_arg (struct arg_data *, rtx, int, int, int);
 | 
      
         | 131 |  |  | static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
 | 
      
         | 132 |  |  | static int finalize_must_preallocate (int, int, struct arg_data *,
 | 
      
         | 133 |  |  |                                       struct args_size *);
 | 
      
         | 134 |  |  | static void precompute_arguments (int, struct arg_data *);
 | 
      
         | 135 |  |  | static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
 | 
      
         | 136 |  |  | static void initialize_argument_information (int, struct arg_data *,
 | 
      
         | 137 |  |  |                                              struct args_size *, int,
 | 
      
         | 138 |  |  |                                              tree, tree,
 | 
      
         | 139 |  |  |                                              tree, tree, cumulative_args_t, int,
 | 
      
         | 140 |  |  |                                              rtx *, int *, int *, int *,
 | 
      
         | 141 |  |  |                                              bool *, bool);
 | 
      
         | 142 |  |  | static void compute_argument_addresses (struct arg_data *, rtx, int);
 | 
      
         | 143 |  |  | static rtx rtx_for_function_call (tree, tree);
 | 
      
         | 144 |  |  | static void load_register_parameters (struct arg_data *, int, rtx *, int,
 | 
      
         | 145 |  |  |                                       int, int *);
 | 
      
         | 146 |  |  | static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
 | 
      
         | 147 |  |  |                                       enum machine_mode, int, va_list);
 | 
      
         | 148 |  |  | static int special_function_p (const_tree, int);
 | 
      
         | 149 |  |  | static int check_sibcall_argument_overlap_1 (rtx);
 | 
      
         | 150 |  |  | static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
 | 
      
         | 151 |  |  |  
 | 
      
         | 152 |  |  | static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
 | 
      
         | 153 |  |  |                                                       unsigned int);
 | 
      
         | 154 |  |  | static tree split_complex_types (tree);
 | 
      
         | 155 |  |  |  
 | 
      
         | 156 |  |  | #ifdef REG_PARM_STACK_SPACE
 | 
      
         | 157 |  |  | static rtx save_fixed_argument_area (int, rtx, int *, int *);
 | 
      
         | 158 |  |  | static void restore_fixed_argument_area (rtx, rtx, int, int);
 | 
      
         | 159 |  |  | #endif
 | 
      
         | 160 |  |  |  
 | 
      
         | 161 |  |  | /* Force FUNEXP into a form suitable for the address of a CALL,
 | 
      
         | 162 |  |  |    and return that as an rtx.  Also load the static chain register
 | 
      
         | 163 |  |  |    if FNDECL is a nested function.
 | 
      
         | 164 |  |  |  
 | 
      
         | 165 |  |  |    CALL_FUSAGE points to a variable holding the prospective
 | 
      
         | 166 |  |  |    CALL_INSN_FUNCTION_USAGE information.  */
 | 
      
         | 167 |  |  |  
 | 
      
         | 168 |  |  | rtx
 | 
      
         | 169 |  |  | prepare_call_address (tree fndecl, rtx funexp, rtx static_chain_value,
 | 
      
         | 170 |  |  |                       rtx *call_fusage, int reg_parm_seen, int sibcallp)
 | 
      
         | 171 |  |  | {
 | 
      
         | 172 |  |  |   /* Make a valid memory address and copy constants through pseudo-regs,
 | 
      
         | 173 |  |  |      but not for a constant address if -fno-function-cse.  */
 | 
      
         | 174 |  |  |   if (GET_CODE (funexp) != SYMBOL_REF)
 | 
      
         | 175 |  |  |     /* If we are using registers for parameters, force the
 | 
      
         | 176 |  |  |        function address into a register now.  */
 | 
      
         | 177 |  |  |     funexp = ((reg_parm_seen
 | 
      
         | 178 |  |  |                && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
 | 
      
         | 179 |  |  |               ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
 | 
      
         | 180 |  |  |               : memory_address (FUNCTION_MODE, funexp));
 | 
      
         | 181 |  |  |   else if (! sibcallp)
 | 
      
         | 182 |  |  |     {
 | 
      
         | 183 |  |  | #ifndef NO_FUNCTION_CSE
 | 
      
         | 184 |  |  |       if (optimize && ! flag_no_function_cse)
 | 
      
         | 185 |  |  |         funexp = force_reg (Pmode, funexp);
 | 
      
         | 186 |  |  | #endif
 | 
      
         | 187 |  |  |     }
 | 
      
         | 188 |  |  |  
 | 
      
         | 189 |  |  |   if (static_chain_value != 0)
 | 
      
         | 190 |  |  |     {
 | 
      
         | 191 |  |  |       rtx chain;
 | 
      
         | 192 |  |  |  
 | 
      
         | 193 |  |  |       gcc_assert (fndecl);
 | 
      
         | 194 |  |  |       chain = targetm.calls.static_chain (fndecl, false);
 | 
      
         | 195 |  |  |       static_chain_value = convert_memory_address (Pmode, static_chain_value);
 | 
      
         | 196 |  |  |  
 | 
      
         | 197 |  |  |       emit_move_insn (chain, static_chain_value);
 | 
      
         | 198 |  |  |       if (REG_P (chain))
 | 
      
         | 199 |  |  |         use_reg (call_fusage, chain);
 | 
      
         | 200 |  |  |     }
 | 
      
         | 201 |  |  |  
 | 
      
         | 202 |  |  |   return funexp;
 | 
      
         | 203 |  |  | }
 | 
      
         | 204 |  |  |  
 | 
      
         | 205 |  |  | /* Generate instructions to call function FUNEXP,
 | 
      
         | 206 |  |  |    and optionally pop the results.
 | 
      
         | 207 |  |  |    The CALL_INSN is the first insn generated.
 | 
      
         | 208 |  |  |  
 | 
      
         | 209 |  |  |    FNDECL is the declaration node of the function.  This is given to the
 | 
      
         | 210 |  |  |    hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
 | 
      
         | 211 |  |  |    its own args.
 | 
      
         | 212 |  |  |  
 | 
      
         | 213 |  |  |    FUNTYPE is the data type of the function.  This is given to the hook
 | 
      
         | 214 |  |  |    TARGET_RETURN_POPS_ARGS to determine whether this function pops its
 | 
      
         | 215 |  |  |    own args.  We used to allow an identifier for library functions, but
 | 
      
         | 216 |  |  |    that doesn't work when the return type is an aggregate type and the
 | 
      
         | 217 |  |  |    calling convention says that the pointer to this aggregate is to be
 | 
      
         | 218 |  |  |    popped by the callee.
 | 
      
         | 219 |  |  |  
 | 
      
         | 220 |  |  |    STACK_SIZE is the number of bytes of arguments on the stack,
 | 
      
         | 221 |  |  |    ROUNDED_STACK_SIZE is that number rounded up to
 | 
      
         | 222 |  |  |    PREFERRED_STACK_BOUNDARY; zero if the size is variable.  This is
 | 
      
         | 223 |  |  |    both to put into the call insn and to generate explicit popping
 | 
      
         | 224 |  |  |    code if necessary.
 | 
      
         | 225 |  |  |  
 | 
      
         | 226 |  |  |    STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
 | 
      
         | 227 |  |  |    It is zero if this call doesn't want a structure value.
 | 
      
         | 228 |  |  |  
 | 
      
         | 229 |  |  |    NEXT_ARG_REG is the rtx that results from executing
 | 
      
         | 230 |  |  |      targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
 | 
      
         | 231 |  |  |    just after all the args have had their registers assigned.
 | 
      
         | 232 |  |  |    This could be whatever you like, but normally it is the first
 | 
      
         | 233 |  |  |    arg-register beyond those used for args in this call,
 | 
      
         | 234 |  |  |    or 0 if all the arg-registers are used in this call.
 | 
      
         | 235 |  |  |    It is passed on to `gen_call' so you can put this info in the call insn.
 | 
      
         | 236 |  |  |  
 | 
      
         | 237 |  |  |    VALREG is a hard register in which a value is returned,
 | 
      
         | 238 |  |  |    or 0 if the call does not return a value.
 | 
      
         | 239 |  |  |  
 | 
      
         | 240 |  |  |    OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
 | 
      
         | 241 |  |  |    the args to this call were processed.
 | 
      
         | 242 |  |  |    We restore `inhibit_defer_pop' to that value.
 | 
      
         | 243 |  |  |  
 | 
      
         | 244 |  |  |    CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
 | 
      
         | 245 |  |  |    denote registers used by the called function.  */
 | 
      
         | 246 |  |  |  
 | 
      
         | 247 |  |  | static void
 | 
      
         | 248 |  |  | emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
 | 
      
         | 249 |  |  |              tree funtype ATTRIBUTE_UNUSED,
 | 
      
         | 250 |  |  |              HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
 | 
      
         | 251 |  |  |              HOST_WIDE_INT rounded_stack_size,
 | 
      
         | 252 |  |  |              HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
 | 
      
         | 253 |  |  |              rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
 | 
      
         | 254 |  |  |              int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
 | 
      
         | 255 |  |  |              cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
 | 
      
         | 256 |  |  | {
 | 
      
         | 257 |  |  |   rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
 | 
      
         | 258 |  |  |   rtx call_insn, call, funmem;
 | 
      
         | 259 |  |  |   int already_popped = 0;
 | 
      
         | 260 |  |  |   HOST_WIDE_INT n_popped
 | 
      
         | 261 |  |  |     = targetm.calls.return_pops_args (fndecl, funtype, stack_size);
 | 
      
         | 262 |  |  |  
 | 
      
         | 263 |  |  | #ifdef CALL_POPS_ARGS
 | 
      
         | 264 |  |  |   n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
 | 
      
         | 265 |  |  | #endif
 | 
      
         | 266 |  |  |  
 | 
      
         | 267 |  |  |   /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
 | 
      
         | 268 |  |  |      and we don't want to load it into a register as an optimization,
 | 
      
         | 269 |  |  |      because prepare_call_address already did it if it should be done.  */
 | 
      
         | 270 |  |  |   if (GET_CODE (funexp) != SYMBOL_REF)
 | 
      
         | 271 |  |  |     funexp = memory_address (FUNCTION_MODE, funexp);
 | 
      
         | 272 |  |  |  
 | 
      
         | 273 |  |  |   funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
 | 
      
         | 274 |  |  |   if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
 | 
      
         | 275 |  |  |     {
 | 
      
         | 276 |  |  |       tree t = fndecl;
 | 
      
         | 277 |  |  |  
 | 
      
         | 278 |  |  |       /* Although a built-in FUNCTION_DECL and its non-__builtin
 | 
      
         | 279 |  |  |          counterpart compare equal and get a shared mem_attrs, they
 | 
      
         | 280 |  |  |          produce different dump output in compare-debug compilations,
 | 
      
         | 281 |  |  |          if an entry gets garbage collected in one compilation, then
 | 
      
         | 282 |  |  |          adds a different (but equivalent) entry, while the other
 | 
      
         | 283 |  |  |          doesn't run the garbage collector at the same spot and then
 | 
      
         | 284 |  |  |          shares the mem_attr with the equivalent entry. */
 | 
      
         | 285 |  |  |       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
 | 
      
         | 286 |  |  |         {
 | 
      
         | 287 |  |  |           tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
 | 
      
         | 288 |  |  |           if (t2)
 | 
      
         | 289 |  |  |             t = t2;
 | 
      
         | 290 |  |  |         }
 | 
      
         | 291 |  |  |  
 | 
      
         | 292 |  |  |         set_mem_expr (funmem, t);
 | 
      
         | 293 |  |  |     }
 | 
      
         | 294 |  |  |   else if (fntree)
 | 
      
         | 295 |  |  |     set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
 | 
      
         | 296 |  |  |  
 | 
      
         | 297 |  |  | #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
 | 
      
         | 298 |  |  |   if ((ecf_flags & ECF_SIBCALL)
 | 
      
         | 299 |  |  |       && HAVE_sibcall_pop && HAVE_sibcall_value_pop
 | 
      
         | 300 |  |  |       && (n_popped > 0 || stack_size == 0))
 | 
      
         | 301 |  |  |     {
 | 
      
         | 302 |  |  |       rtx n_pop = GEN_INT (n_popped);
 | 
      
         | 303 |  |  |       rtx pat;
 | 
      
         | 304 |  |  |  
 | 
      
         | 305 |  |  |       /* If this subroutine pops its own args, record that in the call insn
 | 
      
         | 306 |  |  |          if possible, for the sake of frame pointer elimination.  */
 | 
      
         | 307 |  |  |  
 | 
      
         | 308 |  |  |       if (valreg)
 | 
      
         | 309 |  |  |         pat = GEN_SIBCALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
 | 
      
         | 310 |  |  |                                      next_arg_reg, n_pop);
 | 
      
         | 311 |  |  |       else
 | 
      
         | 312 |  |  |         pat = GEN_SIBCALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
 | 
      
         | 313 |  |  |                                n_pop);
 | 
      
         | 314 |  |  |  
 | 
      
         | 315 |  |  |       emit_call_insn (pat);
 | 
      
         | 316 |  |  |       already_popped = 1;
 | 
      
         | 317 |  |  |     }
 | 
      
         | 318 |  |  |   else
 | 
      
         | 319 |  |  | #endif
 | 
      
         | 320 |  |  |  
 | 
      
         | 321 |  |  | #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
 | 
      
         | 322 |  |  |   /* If the target has "call" or "call_value" insns, then prefer them
 | 
      
         | 323 |  |  |      if no arguments are actually popped.  If the target does not have
 | 
      
         | 324 |  |  |      "call" or "call_value" insns, then we must use the popping versions
 | 
      
         | 325 |  |  |      even if the call has no arguments to pop.  */
 | 
      
         | 326 |  |  | #if defined (HAVE_call) && defined (HAVE_call_value)
 | 
      
         | 327 |  |  |   if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
 | 
      
         | 328 |  |  |       && n_popped > 0)
 | 
      
         | 329 |  |  | #else
 | 
      
         | 330 |  |  |   if (HAVE_call_pop && HAVE_call_value_pop)
 | 
      
         | 331 |  |  | #endif
 | 
      
         | 332 |  |  |     {
 | 
      
         | 333 |  |  |       rtx n_pop = GEN_INT (n_popped);
 | 
      
         | 334 |  |  |       rtx pat;
 | 
      
         | 335 |  |  |  
 | 
      
         | 336 |  |  |       /* If this subroutine pops its own args, record that in the call insn
 | 
      
         | 337 |  |  |          if possible, for the sake of frame pointer elimination.  */
 | 
      
         | 338 |  |  |  
 | 
      
         | 339 |  |  |       if (valreg)
 | 
      
         | 340 |  |  |         pat = GEN_CALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
 | 
      
         | 341 |  |  |                                   next_arg_reg, n_pop);
 | 
      
         | 342 |  |  |       else
 | 
      
         | 343 |  |  |         pat = GEN_CALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
 | 
      
         | 344 |  |  |                             n_pop);
 | 
      
         | 345 |  |  |  
 | 
      
         | 346 |  |  |       emit_call_insn (pat);
 | 
      
         | 347 |  |  |       already_popped = 1;
 | 
      
         | 348 |  |  |     }
 | 
      
         | 349 |  |  |   else
 | 
      
         | 350 |  |  | #endif
 | 
      
         | 351 |  |  |  
 | 
      
         | 352 |  |  | #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
 | 
      
         | 353 |  |  |   if ((ecf_flags & ECF_SIBCALL)
 | 
      
         | 354 |  |  |       && HAVE_sibcall && HAVE_sibcall_value)
 | 
      
         | 355 |  |  |     {
 | 
      
         | 356 |  |  |       if (valreg)
 | 
      
         | 357 |  |  |         emit_call_insn (GEN_SIBCALL_VALUE (valreg, funmem,
 | 
      
         | 358 |  |  |                                            rounded_stack_size_rtx,
 | 
      
         | 359 |  |  |                                            next_arg_reg, NULL_RTX));
 | 
      
         | 360 |  |  |       else
 | 
      
         | 361 |  |  |         emit_call_insn (GEN_SIBCALL (funmem, rounded_stack_size_rtx,
 | 
      
         | 362 |  |  |                                      next_arg_reg,
 | 
      
         | 363 |  |  |                                      GEN_INT (struct_value_size)));
 | 
      
         | 364 |  |  |     }
 | 
      
         | 365 |  |  |   else
 | 
      
         | 366 |  |  | #endif
 | 
      
         | 367 |  |  |  
 | 
      
         | 368 |  |  | #if defined (HAVE_call) && defined (HAVE_call_value)
 | 
      
         | 369 |  |  |   if (HAVE_call && HAVE_call_value)
 | 
      
         | 370 |  |  |     {
 | 
      
         | 371 |  |  |       if (valreg)
 | 
      
         | 372 |  |  |         emit_call_insn (GEN_CALL_VALUE (valreg, funmem, rounded_stack_size_rtx,
 | 
      
         | 373 |  |  |                                         next_arg_reg, NULL_RTX));
 | 
      
         | 374 |  |  |       else
 | 
      
         | 375 |  |  |         emit_call_insn (GEN_CALL (funmem, rounded_stack_size_rtx, next_arg_reg,
 | 
      
         | 376 |  |  |                                   GEN_INT (struct_value_size)));
 | 
      
         | 377 |  |  |     }
 | 
      
         | 378 |  |  |   else
 | 
      
         | 379 |  |  | #endif
 | 
      
         | 380 |  |  |     gcc_unreachable ();
 | 
      
         | 381 |  |  |  
 | 
      
         | 382 |  |  |   /* Find the call we just emitted.  */
 | 
      
         | 383 |  |  |   call_insn = last_call_insn ();
 | 
      
         | 384 |  |  |  
 | 
      
         | 385 |  |  |   /* Some target create a fresh MEM instead of reusing the one provided
 | 
      
         | 386 |  |  |      above.  Set its MEM_EXPR.  */
 | 
      
         | 387 |  |  |   call = PATTERN (call_insn);
 | 
      
         | 388 |  |  |   if (GET_CODE (call) == PARALLEL)
 | 
      
         | 389 |  |  |     call = XVECEXP (call, 0, 0);
 | 
      
         | 390 |  |  |   if (GET_CODE (call) == SET)
 | 
      
         | 391 |  |  |     call = SET_SRC (call);
 | 
      
         | 392 |  |  |   if (GET_CODE (call) == CALL
 | 
      
         | 393 |  |  |       && MEM_P (XEXP (call, 0))
 | 
      
         | 394 |  |  |       && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
 | 
      
         | 395 |  |  |       && MEM_EXPR (funmem) != NULL_TREE)
 | 
      
         | 396 |  |  |     set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
 | 
      
         | 397 |  |  |  
 | 
      
         | 398 |  |  |   /* Put the register usage information there.  */
 | 
      
         | 399 |  |  |   add_function_usage_to (call_insn, call_fusage);
 | 
      
         | 400 |  |  |  
 | 
      
         | 401 |  |  |   /* If this is a const call, then set the insn's unchanging bit.  */
 | 
      
         | 402 |  |  |   if (ecf_flags & ECF_CONST)
 | 
      
         | 403 |  |  |     RTL_CONST_CALL_P (call_insn) = 1;
 | 
      
         | 404 |  |  |  
 | 
      
         | 405 |  |  |   /* If this is a pure call, then set the insn's unchanging bit.  */
 | 
      
         | 406 |  |  |   if (ecf_flags & ECF_PURE)
 | 
      
         | 407 |  |  |     RTL_PURE_CALL_P (call_insn) = 1;
 | 
      
         | 408 |  |  |  
 | 
      
         | 409 |  |  |   /* If this is a const call, then set the insn's unchanging bit.  */
 | 
      
         | 410 |  |  |   if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
 | 
      
         | 411 |  |  |     RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
 | 
      
         | 412 |  |  |  
 | 
      
         | 413 |  |  |   /* Create a nothrow REG_EH_REGION note, if needed.  */
 | 
      
         | 414 |  |  |   make_reg_eh_region_note (call_insn, ecf_flags, 0);
 | 
      
         | 415 |  |  |  
 | 
      
         | 416 |  |  |   if (ecf_flags & ECF_NORETURN)
 | 
      
         | 417 |  |  |     add_reg_note (call_insn, REG_NORETURN, const0_rtx);
 | 
      
         | 418 |  |  |  
 | 
      
         | 419 |  |  |   if (ecf_flags & ECF_RETURNS_TWICE)
 | 
      
         | 420 |  |  |     {
 | 
      
         | 421 |  |  |       add_reg_note (call_insn, REG_SETJMP, const0_rtx);
 | 
      
         | 422 |  |  |       cfun->calls_setjmp = 1;
 | 
      
         | 423 |  |  |     }
 | 
      
         | 424 |  |  |  
 | 
      
         | 425 |  |  |   SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
 | 
      
         | 426 |  |  |  
 | 
      
         | 427 |  |  |   /* Restore this now, so that we do defer pops for this call's args
 | 
      
         | 428 |  |  |      if the context of the call as a whole permits.  */
 | 
      
         | 429 |  |  |   inhibit_defer_pop = old_inhibit_defer_pop;
 | 
      
         | 430 |  |  |  
 | 
      
         | 431 |  |  |   if (n_popped > 0)
 | 
      
         | 432 |  |  |     {
 | 
      
         | 433 |  |  |       if (!already_popped)
 | 
      
         | 434 |  |  |         CALL_INSN_FUNCTION_USAGE (call_insn)
 | 
      
         | 435 |  |  |           = gen_rtx_EXPR_LIST (VOIDmode,
 | 
      
         | 436 |  |  |                                gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
 | 
      
         | 437 |  |  |                                CALL_INSN_FUNCTION_USAGE (call_insn));
 | 
      
         | 438 |  |  |       rounded_stack_size -= n_popped;
 | 
      
         | 439 |  |  |       rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
 | 
      
         | 440 |  |  |       stack_pointer_delta -= n_popped;
 | 
      
         | 441 |  |  |  
 | 
      
         | 442 |  |  |       add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
 | 
      
         | 443 |  |  |  
 | 
      
         | 444 |  |  |       /* If popup is needed, stack realign must use DRAP  */
 | 
      
         | 445 |  |  |       if (SUPPORTS_STACK_ALIGNMENT)
 | 
      
         | 446 |  |  |         crtl->need_drap = true;
 | 
      
         | 447 |  |  |     }
 | 
      
         | 448 |  |  |   /* For noreturn calls when not accumulating outgoing args force
 | 
      
         | 449 |  |  |      REG_ARGS_SIZE note to prevent crossjumping of calls with different
 | 
      
         | 450 |  |  |      args sizes.  */
 | 
      
         | 451 |  |  |   else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
 | 
      
         | 452 |  |  |     add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
 | 
      
         | 453 |  |  |  
 | 
      
         | 454 |  |  |   if (!ACCUMULATE_OUTGOING_ARGS)
 | 
      
         | 455 |  |  |     {
 | 
      
         | 456 |  |  |       /* If returning from the subroutine does not automatically pop the args,
 | 
      
         | 457 |  |  |          we need an instruction to pop them sooner or later.
 | 
      
         | 458 |  |  |          Perhaps do it now; perhaps just record how much space to pop later.
 | 
      
         | 459 |  |  |  
 | 
      
         | 460 |  |  |          If returning from the subroutine does pop the args, indicate that the
 | 
      
         | 461 |  |  |          stack pointer will be changed.  */
 | 
      
         | 462 |  |  |  
 | 
      
         | 463 |  |  |       if (rounded_stack_size != 0)
 | 
      
         | 464 |  |  |         {
 | 
      
         | 465 |  |  |           if (ecf_flags & ECF_NORETURN)
 | 
      
         | 466 |  |  |             /* Just pretend we did the pop.  */
 | 
      
         | 467 |  |  |             stack_pointer_delta -= rounded_stack_size;
 | 
      
         | 468 |  |  |           else if (flag_defer_pop && inhibit_defer_pop == 0
 | 
      
         | 469 |  |  |               && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
 | 
      
         | 470 |  |  |             pending_stack_adjust += rounded_stack_size;
 | 
      
         | 471 |  |  |           else
 | 
      
         | 472 |  |  |             adjust_stack (rounded_stack_size_rtx);
 | 
      
         | 473 |  |  |         }
 | 
      
         | 474 |  |  |     }
 | 
      
         | 475 |  |  |   /* When we accumulate outgoing args, we must avoid any stack manipulations.
 | 
      
         | 476 |  |  |      Restore the stack pointer to its original value now.  Usually
 | 
      
         | 477 |  |  |      ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
 | 
      
         | 478 |  |  |      On  i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
 | 
      
         | 479 |  |  |      popping variants of functions exist as well.
 | 
      
         | 480 |  |  |  
 | 
      
         | 481 |  |  |      ??? We may optimize similar to defer_pop above, but it is
 | 
      
         | 482 |  |  |      probably not worthwhile.
 | 
      
         | 483 |  |  |  
 | 
      
         | 484 |  |  |      ??? It will be worthwhile to enable combine_stack_adjustments even for
 | 
      
         | 485 |  |  |      such machines.  */
 | 
      
         | 486 |  |  |   else if (n_popped)
 | 
      
         | 487 |  |  |     anti_adjust_stack (GEN_INT (n_popped));
 | 
      
         | 488 |  |  | }
 | 
      
         | 489 |  |  |  
 | 
      
         | 490 |  |  | /* Determine if the function identified by NAME and FNDECL is one with
 | 
      
         | 491 |  |  |    special properties we wish to know about.
 | 
      
         | 492 |  |  |  
 | 
      
         | 493 |  |  |    For example, if the function might return more than one time (setjmp), then
 | 
      
         | 494 |  |  |    set RETURNS_TWICE to a nonzero value.
 | 
      
         | 495 |  |  |  
 | 
      
         | 496 |  |  |    Similarly set NORETURN if the function is in the longjmp family.
 | 
      
         | 497 |  |  |  
 | 
      
         | 498 |  |  |    Set MAY_BE_ALLOCA for any memory allocation function that might allocate
 | 
      
         | 499 |  |  |    space from the stack such as alloca.  */
 | 
      
         | 500 |  |  |  
 | 
      
         | 501 |  |  | static int
 | 
      
         | 502 |  |  | special_function_p (const_tree fndecl, int flags)
 | 
      
         | 503 |  |  | {
 | 
      
         | 504 |  |  |   if (fndecl && DECL_NAME (fndecl)
 | 
      
         | 505 |  |  |       && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
 | 
      
         | 506 |  |  |       /* Exclude functions not at the file scope, or not `extern',
 | 
      
         | 507 |  |  |          since they are not the magic functions we would otherwise
 | 
      
         | 508 |  |  |          think they are.
 | 
      
         | 509 |  |  |          FIXME: this should be handled with attributes, not with this
 | 
      
         | 510 |  |  |          hacky imitation of DECL_ASSEMBLER_NAME.  It's (also) wrong
 | 
      
         | 511 |  |  |          because you can declare fork() inside a function if you
 | 
      
         | 512 |  |  |          wish.  */
 | 
      
         | 513 |  |  |       && (DECL_CONTEXT (fndecl) == NULL_TREE
 | 
      
         | 514 |  |  |           || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
 | 
      
         | 515 |  |  |       && TREE_PUBLIC (fndecl))
 | 
      
         | 516 |  |  |     {
 | 
      
         | 517 |  |  |       const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
 | 
      
         | 518 |  |  |       const char *tname = name;
 | 
      
         | 519 |  |  |  
 | 
      
         | 520 |  |  |       /* We assume that alloca will always be called by name.  It
 | 
      
         | 521 |  |  |          makes no sense to pass it as a pointer-to-function to
 | 
      
         | 522 |  |  |          anything that does not understand its behavior.  */
 | 
      
         | 523 |  |  |       if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
 | 
      
         | 524 |  |  |             && name[0] == 'a'
 | 
      
         | 525 |  |  |             && ! strcmp (name, "alloca"))
 | 
      
         | 526 |  |  |            || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
 | 
      
         | 527 |  |  |                && name[0] == '_'
 | 
      
         | 528 |  |  |                && ! strcmp (name, "__builtin_alloca"))))
 | 
      
         | 529 |  |  |         flags |= ECF_MAY_BE_ALLOCA;
 | 
      
         | 530 |  |  |  
 | 
      
         | 531 |  |  |       /* Disregard prefix _, __, __x or __builtin_.  */
 | 
      
         | 532 |  |  |       if (name[0] == '_')
 | 
      
         | 533 |  |  |         {
 | 
      
         | 534 |  |  |           if (name[1] == '_'
 | 
      
         | 535 |  |  |               && name[2] == 'b'
 | 
      
         | 536 |  |  |               && !strncmp (name + 3, "uiltin_", 7))
 | 
      
         | 537 |  |  |             tname += 10;
 | 
      
         | 538 |  |  |           else if (name[1] == '_' && name[2] == 'x')
 | 
      
         | 539 |  |  |             tname += 3;
 | 
      
         | 540 |  |  |           else if (name[1] == '_')
 | 
      
         | 541 |  |  |             tname += 2;
 | 
      
         | 542 |  |  |           else
 | 
      
         | 543 |  |  |             tname += 1;
 | 
      
         | 544 |  |  |         }
 | 
      
         | 545 |  |  |  
 | 
      
         | 546 |  |  |       if (tname[0] == 's')
 | 
      
         | 547 |  |  |         {
 | 
      
         | 548 |  |  |           if ((tname[1] == 'e'
 | 
      
         | 549 |  |  |                && (! strcmp (tname, "setjmp")
 | 
      
         | 550 |  |  |                    || ! strcmp (tname, "setjmp_syscall")))
 | 
      
         | 551 |  |  |               || (tname[1] == 'i'
 | 
      
         | 552 |  |  |                   && ! strcmp (tname, "sigsetjmp"))
 | 
      
         | 553 |  |  |               || (tname[1] == 'a'
 | 
      
         | 554 |  |  |                   && ! strcmp (tname, "savectx")))
 | 
      
         | 555 |  |  |             flags |= ECF_RETURNS_TWICE;
 | 
      
         | 556 |  |  |  
 | 
      
         | 557 |  |  |           if (tname[1] == 'i'
 | 
      
         | 558 |  |  |               && ! strcmp (tname, "siglongjmp"))
 | 
      
         | 559 |  |  |             flags |= ECF_NORETURN;
 | 
      
         | 560 |  |  |         }
 | 
      
         | 561 |  |  |       else if ((tname[0] == 'q' && tname[1] == 's'
 | 
      
         | 562 |  |  |                 && ! strcmp (tname, "qsetjmp"))
 | 
      
         | 563 |  |  |                || (tname[0] == 'v' && tname[1] == 'f'
 | 
      
         | 564 |  |  |                    && ! strcmp (tname, "vfork"))
 | 
      
         | 565 |  |  |                || (tname[0] == 'g' && tname[1] == 'e'
 | 
      
         | 566 |  |  |                    && !strcmp (tname, "getcontext")))
 | 
      
         | 567 |  |  |         flags |= ECF_RETURNS_TWICE;
 | 
      
         | 568 |  |  |  
 | 
      
         | 569 |  |  |       else if (tname[0] == 'l' && tname[1] == 'o'
 | 
      
         | 570 |  |  |                && ! strcmp (tname, "longjmp"))
 | 
      
         | 571 |  |  |         flags |= ECF_NORETURN;
 | 
      
         | 572 |  |  |     }
 | 
      
         | 573 |  |  |  
 | 
      
         | 574 |  |  |   return flags;
 | 
      
         | 575 |  |  | }
 | 
      
         | 576 |  |  |  
 | 
      
         | 577 |  |  | /* Return nonzero when FNDECL represents a call to setjmp.  */
 | 
      
         | 578 |  |  |  
 | 
      
         | 579 |  |  | int
 | 
      
         | 580 |  |  | setjmp_call_p (const_tree fndecl)
 | 
      
         | 581 |  |  | {
 | 
      
         | 582 |  |  |   if (DECL_IS_RETURNS_TWICE (fndecl))
 | 
      
         | 583 |  |  |     return ECF_RETURNS_TWICE;
 | 
      
         | 584 |  |  |   return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
 | 
      
         | 585 |  |  | }
 | 
      
         | 586 |  |  |  
 | 
      
         | 587 |  |  |  
 | 
      
         | 588 |  |  | /* Return true if STMT is an alloca call.  */
 | 
      
         | 589 |  |  |  
 | 
      
         | 590 |  |  | bool
 | 
      
         | 591 |  |  | gimple_alloca_call_p (const_gimple stmt)
 | 
      
         | 592 |  |  | {
 | 
      
         | 593 |  |  |   tree fndecl;
 | 
      
         | 594 |  |  |  
 | 
      
         | 595 |  |  |   if (!is_gimple_call (stmt))
 | 
      
         | 596 |  |  |     return false;
 | 
      
         | 597 |  |  |  
 | 
      
         | 598 |  |  |   fndecl = gimple_call_fndecl (stmt);
 | 
      
         | 599 |  |  |   if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
 | 
      
         | 600 |  |  |     return true;
 | 
      
         | 601 |  |  |  
 | 
      
         | 602 |  |  |   return false;
 | 
      
         | 603 |  |  | }
 | 
      
         | 604 |  |  |  
 | 
      
         | 605 |  |  | /* Return true when exp contains alloca call.  */
 | 
      
         | 606 |  |  |  
 | 
      
         | 607 |  |  | bool
 | 
      
         | 608 |  |  | alloca_call_p (const_tree exp)
 | 
      
         | 609 |  |  | {
 | 
      
         | 610 |  |  |   if (TREE_CODE (exp) == CALL_EXPR
 | 
      
         | 611 |  |  |       && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
 | 
      
         | 612 |  |  |       && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
 | 
      
         | 613 |  |  |       && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
 | 
      
         | 614 |  |  |           & ECF_MAY_BE_ALLOCA))
 | 
      
         | 615 |  |  |     return true;
 | 
      
         | 616 |  |  |   return false;
 | 
      
         | 617 |  |  | }
 | 
      
         | 618 |  |  |  
 | 
      
         | 619 |  |  | /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
 | 
      
         | 620 |  |  |    function.  Return FALSE otherwise.  */
 | 
      
         | 621 |  |  |  
 | 
      
         | 622 |  |  | static bool
 | 
      
         | 623 |  |  | is_tm_builtin (const_tree fndecl)
 | 
      
         | 624 |  |  | {
 | 
      
         | 625 |  |  |   if (fndecl == NULL)
 | 
      
         | 626 |  |  |     return false;
 | 
      
         | 627 |  |  |  
 | 
      
         | 628 |  |  |   if (decl_is_tm_clone (fndecl))
 | 
      
         | 629 |  |  |     return true;
 | 
      
         | 630 |  |  |  
 | 
      
         | 631 |  |  |   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
 | 
      
         | 632 |  |  |     {
 | 
      
         | 633 |  |  |       switch (DECL_FUNCTION_CODE (fndecl))
 | 
      
         | 634 |  |  |         {
 | 
      
         | 635 |  |  |         case BUILT_IN_TM_COMMIT:
 | 
      
         | 636 |  |  |         case BUILT_IN_TM_COMMIT_EH:
 | 
      
         | 637 |  |  |         case BUILT_IN_TM_ABORT:
 | 
      
         | 638 |  |  |         case BUILT_IN_TM_IRREVOCABLE:
 | 
      
         | 639 |  |  |         case BUILT_IN_TM_GETTMCLONE_IRR:
 | 
      
         | 640 |  |  |         case BUILT_IN_TM_MEMCPY:
 | 
      
         | 641 |  |  |         case BUILT_IN_TM_MEMMOVE:
 | 
      
         | 642 |  |  |         case BUILT_IN_TM_MEMSET:
 | 
      
         | 643 |  |  |         CASE_BUILT_IN_TM_STORE (1):
 | 
      
         | 644 |  |  |         CASE_BUILT_IN_TM_STORE (2):
 | 
      
         | 645 |  |  |         CASE_BUILT_IN_TM_STORE (4):
 | 
      
         | 646 |  |  |         CASE_BUILT_IN_TM_STORE (8):
 | 
      
         | 647 |  |  |         CASE_BUILT_IN_TM_STORE (FLOAT):
 | 
      
         | 648 |  |  |         CASE_BUILT_IN_TM_STORE (DOUBLE):
 | 
      
         | 649 |  |  |         CASE_BUILT_IN_TM_STORE (LDOUBLE):
 | 
      
         | 650 |  |  |         CASE_BUILT_IN_TM_STORE (M64):
 | 
      
         | 651 |  |  |         CASE_BUILT_IN_TM_STORE (M128):
 | 
      
         | 652 |  |  |         CASE_BUILT_IN_TM_STORE (M256):
 | 
      
         | 653 |  |  |         CASE_BUILT_IN_TM_LOAD (1):
 | 
      
         | 654 |  |  |         CASE_BUILT_IN_TM_LOAD (2):
 | 
      
         | 655 |  |  |         CASE_BUILT_IN_TM_LOAD (4):
 | 
      
         | 656 |  |  |         CASE_BUILT_IN_TM_LOAD (8):
 | 
      
         | 657 |  |  |         CASE_BUILT_IN_TM_LOAD (FLOAT):
 | 
      
         | 658 |  |  |         CASE_BUILT_IN_TM_LOAD (DOUBLE):
 | 
      
         | 659 |  |  |         CASE_BUILT_IN_TM_LOAD (LDOUBLE):
 | 
      
         | 660 |  |  |         CASE_BUILT_IN_TM_LOAD (M64):
 | 
      
         | 661 |  |  |         CASE_BUILT_IN_TM_LOAD (M128):
 | 
      
         | 662 |  |  |         CASE_BUILT_IN_TM_LOAD (M256):
 | 
      
         | 663 |  |  |         case BUILT_IN_TM_LOG:
 | 
      
         | 664 |  |  |         case BUILT_IN_TM_LOG_1:
 | 
      
         | 665 |  |  |         case BUILT_IN_TM_LOG_2:
 | 
      
         | 666 |  |  |         case BUILT_IN_TM_LOG_4:
 | 
      
         | 667 |  |  |         case BUILT_IN_TM_LOG_8:
 | 
      
         | 668 |  |  |         case BUILT_IN_TM_LOG_FLOAT:
 | 
      
         | 669 |  |  |         case BUILT_IN_TM_LOG_DOUBLE:
 | 
      
         | 670 |  |  |         case BUILT_IN_TM_LOG_LDOUBLE:
 | 
      
         | 671 |  |  |         case BUILT_IN_TM_LOG_M64:
 | 
      
         | 672 |  |  |         case BUILT_IN_TM_LOG_M128:
 | 
      
         | 673 |  |  |         case BUILT_IN_TM_LOG_M256:
 | 
      
         | 674 |  |  |           return true;
 | 
      
         | 675 |  |  |         default:
 | 
      
         | 676 |  |  |           break;
 | 
      
         | 677 |  |  |         }
 | 
      
         | 678 |  |  |     }
 | 
      
         | 679 |  |  |   return false;
 | 
      
         | 680 |  |  | }
 | 
      
         | 681 |  |  |  
 | 
      
         | 682 |  |  | /* Detect flags (function attributes) from the function decl or type node.  */
 | 
      
         | 683 |  |  |  
 | 
      
         | 684 |  |  | int
 | 
      
         | 685 |  |  | flags_from_decl_or_type (const_tree exp)
 | 
      
         | 686 |  |  | {
 | 
      
         | 687 |  |  |   int flags = 0;
 | 
      
         | 688 |  |  |  
 | 
      
         | 689 |  |  |   if (DECL_P (exp))
 | 
      
         | 690 |  |  |     {
 | 
      
         | 691 |  |  |       /* The function exp may have the `malloc' attribute.  */
 | 
      
         | 692 |  |  |       if (DECL_IS_MALLOC (exp))
 | 
      
         | 693 |  |  |         flags |= ECF_MALLOC;
 | 
      
         | 694 |  |  |  
 | 
      
         | 695 |  |  |       /* The function exp may have the `returns_twice' attribute.  */
 | 
      
         | 696 |  |  |       if (DECL_IS_RETURNS_TWICE (exp))
 | 
      
         | 697 |  |  |         flags |= ECF_RETURNS_TWICE;
 | 
      
         | 698 |  |  |  
 | 
      
         | 699 |  |  |       /* Process the pure and const attributes.  */
 | 
      
         | 700 |  |  |       if (TREE_READONLY (exp))
 | 
      
         | 701 |  |  |         flags |= ECF_CONST;
 | 
      
         | 702 |  |  |       if (DECL_PURE_P (exp))
 | 
      
         | 703 |  |  |         flags |= ECF_PURE;
 | 
      
         | 704 |  |  |       if (DECL_LOOPING_CONST_OR_PURE_P (exp))
 | 
      
         | 705 |  |  |         flags |= ECF_LOOPING_CONST_OR_PURE;
 | 
      
         | 706 |  |  |  
 | 
      
         | 707 |  |  |       if (DECL_IS_NOVOPS (exp))
 | 
      
         | 708 |  |  |         flags |= ECF_NOVOPS;
 | 
      
         | 709 |  |  |       if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
 | 
      
         | 710 |  |  |         flags |= ECF_LEAF;
 | 
      
         | 711 |  |  |  
 | 
      
         | 712 |  |  |       if (TREE_NOTHROW (exp))
 | 
      
         | 713 |  |  |         flags |= ECF_NOTHROW;
 | 
      
         | 714 |  |  |  
 | 
      
         | 715 |  |  |       if (flag_tm)
 | 
      
         | 716 |  |  |         {
 | 
      
         | 717 |  |  |           if (is_tm_builtin (exp))
 | 
      
         | 718 |  |  |             flags |= ECF_TM_BUILTIN;
 | 
      
         | 719 |  |  |           else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
 | 
      
         | 720 |  |  |                    || lookup_attribute ("transaction_pure",
 | 
      
         | 721 |  |  |                                         TYPE_ATTRIBUTES (TREE_TYPE (exp))))
 | 
      
         | 722 |  |  |             flags |= ECF_TM_PURE;
 | 
      
         | 723 |  |  |         }
 | 
      
         | 724 |  |  |  
 | 
      
         | 725 |  |  |       flags = special_function_p (exp, flags);
 | 
      
         | 726 |  |  |     }
 | 
      
         | 727 |  |  |   else if (TYPE_P (exp))
 | 
      
         | 728 |  |  |     {
 | 
      
         | 729 |  |  |       if (TYPE_READONLY (exp))
 | 
      
         | 730 |  |  |         flags |= ECF_CONST;
 | 
      
         | 731 |  |  |  
 | 
      
         | 732 |  |  |       if (flag_tm
 | 
      
         | 733 |  |  |           && ((flags & ECF_CONST) != 0
 | 
      
         | 734 |  |  |               || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
 | 
      
         | 735 |  |  |         flags |= ECF_TM_PURE;
 | 
      
         | 736 |  |  |     }
 | 
      
         | 737 |  |  |  
 | 
      
         | 738 |  |  |   if (TREE_THIS_VOLATILE (exp))
 | 
      
         | 739 |  |  |     {
 | 
      
         | 740 |  |  |       flags |= ECF_NORETURN;
 | 
      
         | 741 |  |  |       if (flags & (ECF_CONST|ECF_PURE))
 | 
      
         | 742 |  |  |         flags |= ECF_LOOPING_CONST_OR_PURE;
 | 
      
         | 743 |  |  |     }
 | 
      
         | 744 |  |  |  
 | 
      
         | 745 |  |  |   return flags;
 | 
      
         | 746 |  |  | }
 | 
      
         | 747 |  |  |  
 | 
      
         | 748 |  |  | /* Detect flags from a CALL_EXPR.  */
 | 
      
         | 749 |  |  |  
 | 
      
         | 750 |  |  | int
 | 
      
         | 751 |  |  | call_expr_flags (const_tree t)
 | 
      
         | 752 |  |  | {
 | 
      
         | 753 |  |  |   int flags;
 | 
      
         | 754 |  |  |   tree decl = get_callee_fndecl (t);
 | 
      
         | 755 |  |  |  
 | 
      
         | 756 |  |  |   if (decl)
 | 
      
         | 757 |  |  |     flags = flags_from_decl_or_type (decl);
 | 
      
         | 758 |  |  |   else
 | 
      
         | 759 |  |  |     {
 | 
      
         | 760 |  |  |       t = TREE_TYPE (CALL_EXPR_FN (t));
 | 
      
         | 761 |  |  |       if (t && TREE_CODE (t) == POINTER_TYPE)
 | 
      
         | 762 |  |  |         flags = flags_from_decl_or_type (TREE_TYPE (t));
 | 
      
         | 763 |  |  |       else
 | 
      
         | 764 |  |  |         flags = 0;
 | 
      
         | 765 |  |  |     }
 | 
      
         | 766 |  |  |  
 | 
      
         | 767 |  |  |   return flags;
 | 
      
         | 768 |  |  | }
 | 
      
         | 769 |  |  |  
 | 
      
         | 770 |  |  | /* Precompute all register parameters as described by ARGS, storing values
 | 
      
         | 771 |  |  |    into fields within the ARGS array.
 | 
      
         | 772 |  |  |  
 | 
      
         | 773 |  |  |    NUM_ACTUALS indicates the total number elements in the ARGS array.
 | 
      
         | 774 |  |  |  
 | 
      
         | 775 |  |  |    Set REG_PARM_SEEN if we encounter a register parameter.  */
 | 
      
         | 776 |  |  |  
 | 
      
         | 777 |  |  | static void
 | 
      
         | 778 |  |  | precompute_register_parameters (int num_actuals, struct arg_data *args,
 | 
      
         | 779 |  |  |                                 int *reg_parm_seen)
 | 
      
         | 780 |  |  | {
 | 
      
         | 781 |  |  |   int i;
 | 
      
         | 782 |  |  |  
 | 
      
         | 783 |  |  |   *reg_parm_seen = 0;
 | 
      
         | 784 |  |  |  
 | 
      
         | 785 |  |  |   for (i = 0; i < num_actuals; i++)
 | 
      
         | 786 |  |  |     if (args[i].reg != 0 && ! args[i].pass_on_stack)
 | 
      
         | 787 |  |  |       {
 | 
      
         | 788 |  |  |         *reg_parm_seen = 1;
 | 
      
         | 789 |  |  |  
 | 
      
         | 790 |  |  |         if (args[i].value == 0)
 | 
      
         | 791 |  |  |           {
 | 
      
         | 792 |  |  |             push_temp_slots ();
 | 
      
         | 793 |  |  |             args[i].value = expand_normal (args[i].tree_value);
 | 
      
         | 794 |  |  |             preserve_temp_slots (args[i].value);
 | 
      
         | 795 |  |  |             pop_temp_slots ();
 | 
      
         | 796 |  |  |           }
 | 
      
         | 797 |  |  |  
 | 
      
         | 798 |  |  |         /* If we are to promote the function arg to a wider mode,
 | 
      
         | 799 |  |  |            do it now.  */
 | 
      
         | 800 |  |  |  
 | 
      
         | 801 |  |  |         if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
 | 
      
         | 802 |  |  |           args[i].value
 | 
      
         | 803 |  |  |             = convert_modes (args[i].mode,
 | 
      
         | 804 |  |  |                              TYPE_MODE (TREE_TYPE (args[i].tree_value)),
 | 
      
         | 805 |  |  |                              args[i].value, args[i].unsignedp);
 | 
      
         | 806 |  |  |  
 | 
      
         | 807 |  |  |         /* If the value is a non-legitimate constant, force it into a
 | 
      
         | 808 |  |  |            pseudo now.  TLS symbols sometimes need a call to resolve.  */
 | 
      
         | 809 |  |  |         if (CONSTANT_P (args[i].value)
 | 
      
         | 810 |  |  |             && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
 | 
      
         | 811 |  |  |           args[i].value = force_reg (args[i].mode, args[i].value);
 | 
      
         | 812 |  |  |  
 | 
      
         | 813 |  |  |         /* If we're going to have to load the value by parts, pull the
 | 
      
         | 814 |  |  |            parts into pseudos.  The part extraction process can involve
 | 
      
         | 815 |  |  |            non-trivial computation.  */
 | 
      
         | 816 |  |  |         if (GET_CODE (args[i].reg) == PARALLEL)
 | 
      
         | 817 |  |  |           {
 | 
      
         | 818 |  |  |             tree type = TREE_TYPE (args[i].tree_value);
 | 
      
         | 819 |  |  |             args[i].parallel_value
 | 
      
         | 820 |  |  |               = emit_group_load_into_temps (args[i].reg, args[i].value,
 | 
      
         | 821 |  |  |                                             type, int_size_in_bytes (type));
 | 
      
         | 822 |  |  |           }
 | 
      
         | 823 |  |  |  
 | 
      
         | 824 |  |  |         /* If the value is expensive, and we are inside an appropriately
 | 
      
         | 825 |  |  |            short loop, put the value into a pseudo and then put the pseudo
 | 
      
         | 826 |  |  |            into the hard reg.
 | 
      
         | 827 |  |  |  
 | 
      
         | 828 |  |  |            For small register classes, also do this if this call uses
 | 
      
         | 829 |  |  |            register parameters.  This is to avoid reload conflicts while
 | 
      
         | 830 |  |  |            loading the parameters registers.  */
 | 
      
         | 831 |  |  |  
 | 
      
         | 832 |  |  |         else if ((! (REG_P (args[i].value)
 | 
      
         | 833 |  |  |                      || (GET_CODE (args[i].value) == SUBREG
 | 
      
         | 834 |  |  |                          && REG_P (SUBREG_REG (args[i].value)))))
 | 
      
         | 835 |  |  |                  && args[i].mode != BLKmode
 | 
      
         | 836 |  |  |                  && set_src_cost (args[i].value, optimize_insn_for_speed_p ())
 | 
      
         | 837 |  |  |                     > COSTS_N_INSNS (1)
 | 
      
         | 838 |  |  |                  && ((*reg_parm_seen
 | 
      
         | 839 |  |  |                       && targetm.small_register_classes_for_mode_p (args[i].mode))
 | 
      
         | 840 |  |  |                      || optimize))
 | 
      
         | 841 |  |  |           args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
 | 
      
         | 842 |  |  |       }
 | 
      
         | 843 |  |  | }
 | 
      
         | 844 |  |  |  
 | 
      
         | 845 |  |  | #ifdef REG_PARM_STACK_SPACE
 | 
      
         | 846 |  |  |  
 | 
      
         | 847 |  |  |   /* The argument list is the property of the called routine and it
 | 
      
         | 848 |  |  |      may clobber it.  If the fixed area has been used for previous
 | 
      
         | 849 |  |  |      parameters, we must save and restore it.  */
 | 
      
         | 850 |  |  |  
 | 
      
         | 851 |  |  | static rtx
 | 
      
         | 852 |  |  | save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
 | 
      
         | 853 |  |  | {
 | 
      
         | 854 |  |  |   int low;
 | 
      
         | 855 |  |  |   int high;
 | 
      
         | 856 |  |  |  
 | 
      
         | 857 |  |  |   /* Compute the boundary of the area that needs to be saved, if any.  */
 | 
      
         | 858 |  |  |   high = reg_parm_stack_space;
 | 
      
         | 859 |  |  | #ifdef ARGS_GROW_DOWNWARD
 | 
      
         | 860 |  |  |   high += 1;
 | 
      
         | 861 |  |  | #endif
 | 
      
         | 862 |  |  |   if (high > highest_outgoing_arg_in_use)
 | 
      
         | 863 |  |  |     high = highest_outgoing_arg_in_use;
 | 
      
         | 864 |  |  |  
 | 
      
         | 865 |  |  |   for (low = 0; low < high; low++)
 | 
      
         | 866 |  |  |     if (stack_usage_map[low] != 0)
 | 
      
         | 867 |  |  |       {
 | 
      
         | 868 |  |  |         int num_to_save;
 | 
      
         | 869 |  |  |         enum machine_mode save_mode;
 | 
      
         | 870 |  |  |         int delta;
 | 
      
         | 871 |  |  |         rtx stack_area;
 | 
      
         | 872 |  |  |         rtx save_area;
 | 
      
         | 873 |  |  |  
 | 
      
         | 874 |  |  |         while (stack_usage_map[--high] == 0)
 | 
      
         | 875 |  |  |           ;
 | 
      
         | 876 |  |  |  
 | 
      
         | 877 |  |  |         *low_to_save = low;
 | 
      
         | 878 |  |  |         *high_to_save = high;
 | 
      
         | 879 |  |  |  
 | 
      
         | 880 |  |  |         num_to_save = high - low + 1;
 | 
      
         | 881 |  |  |         save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
 | 
      
         | 882 |  |  |  
 | 
      
         | 883 |  |  |         /* If we don't have the required alignment, must do this
 | 
      
         | 884 |  |  |            in BLKmode.  */
 | 
      
         | 885 |  |  |         if ((low & (MIN (GET_MODE_SIZE (save_mode),
 | 
      
         | 886 |  |  |                          BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
 | 
      
         | 887 |  |  |           save_mode = BLKmode;
 | 
      
         | 888 |  |  |  
 | 
      
         | 889 |  |  | #ifdef ARGS_GROW_DOWNWARD
 | 
      
         | 890 |  |  |         delta = -high;
 | 
      
         | 891 |  |  | #else
 | 
      
         | 892 |  |  |         delta = low;
 | 
      
         | 893 |  |  | #endif
 | 
      
         | 894 |  |  |         stack_area = gen_rtx_MEM (save_mode,
 | 
      
         | 895 |  |  |                                   memory_address (save_mode,
 | 
      
         | 896 |  |  |                                                   plus_constant (argblock,
 | 
      
         | 897 |  |  |                                                                  delta)));
 | 
      
         | 898 |  |  |  
 | 
      
         | 899 |  |  |         set_mem_align (stack_area, PARM_BOUNDARY);
 | 
      
         | 900 |  |  |         if (save_mode == BLKmode)
 | 
      
         | 901 |  |  |           {
 | 
      
         | 902 |  |  |             save_area = assign_stack_temp (BLKmode, num_to_save, 0);
 | 
      
         | 903 |  |  |             emit_block_move (validize_mem (save_area), stack_area,
 | 
      
         | 904 |  |  |                              GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
 | 
      
         | 905 |  |  |           }
 | 
      
         | 906 |  |  |         else
 | 
      
         | 907 |  |  |           {
 | 
      
         | 908 |  |  |             save_area = gen_reg_rtx (save_mode);
 | 
      
         | 909 |  |  |             emit_move_insn (save_area, stack_area);
 | 
      
         | 910 |  |  |           }
 | 
      
         | 911 |  |  |  
 | 
      
         | 912 |  |  |         return save_area;
 | 
      
         | 913 |  |  |       }
 | 
      
         | 914 |  |  |  
 | 
      
         | 915 |  |  |   return NULL_RTX;
 | 
      
         | 916 |  |  | }
 | 
      
         | 917 |  |  |  
 | 
      
         | 918 |  |  | static void
 | 
      
         | 919 |  |  | restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
 | 
      
         | 920 |  |  | {
 | 
      
         | 921 |  |  |   enum machine_mode save_mode = GET_MODE (save_area);
 | 
      
         | 922 |  |  |   int delta;
 | 
      
         | 923 |  |  |   rtx stack_area;
 | 
      
         | 924 |  |  |  
 | 
      
         | 925 |  |  | #ifdef ARGS_GROW_DOWNWARD
 | 
      
         | 926 |  |  |   delta = -high_to_save;
 | 
      
         | 927 |  |  | #else
 | 
      
         | 928 |  |  |   delta = low_to_save;
 | 
      
         | 929 |  |  | #endif
 | 
      
         | 930 |  |  |   stack_area = gen_rtx_MEM (save_mode,
 | 
      
         | 931 |  |  |                             memory_address (save_mode,
 | 
      
         | 932 |  |  |                                             plus_constant (argblock, delta)));
 | 
      
         | 933 |  |  |   set_mem_align (stack_area, PARM_BOUNDARY);
 | 
      
         | 934 |  |  |  
 | 
      
         | 935 |  |  |   if (save_mode != BLKmode)
 | 
      
         | 936 |  |  |     emit_move_insn (stack_area, save_area);
 | 
      
         | 937 |  |  |   else
 | 
      
         | 938 |  |  |     emit_block_move (stack_area, validize_mem (save_area),
 | 
      
         | 939 |  |  |                      GEN_INT (high_to_save - low_to_save + 1),
 | 
      
         | 940 |  |  |                      BLOCK_OP_CALL_PARM);
 | 
      
         | 941 |  |  | }
 | 
      
         | 942 |  |  | #endif /* REG_PARM_STACK_SPACE */
 | 
      
         | 943 |  |  |  
 | 
      
         | 944 |  |  | /* If any elements in ARGS refer to parameters that are to be passed in
 | 
      
         | 945 |  |  |    registers, but not in memory, and whose alignment does not permit a
 | 
      
         | 946 |  |  |    direct copy into registers.  Copy the values into a group of pseudos
 | 
      
         | 947 |  |  |    which we will later copy into the appropriate hard registers.
 | 
      
         | 948 |  |  |  
 | 
      
         | 949 |  |  |    Pseudos for each unaligned argument will be stored into the array
 | 
      
         | 950 |  |  |    args[argnum].aligned_regs.  The caller is responsible for deallocating
 | 
      
         | 951 |  |  |    the aligned_regs array if it is nonzero.  */
 | 
      
         | 952 |  |  |  
 | 
      
         | 953 |  |  | static void
 | 
      
         | 954 |  |  | store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
 | 
      
         | 955 |  |  | {
 | 
      
         | 956 |  |  |   int i, j;
 | 
      
         | 957 |  |  |  
 | 
      
         | 958 |  |  |   for (i = 0; i < num_actuals; i++)
 | 
      
         | 959 |  |  |     if (args[i].reg != 0 && ! args[i].pass_on_stack
 | 
      
         | 960 |  |  |         && args[i].mode == BLKmode
 | 
      
         | 961 |  |  |         && MEM_P (args[i].value)
 | 
      
         | 962 |  |  |         && (MEM_ALIGN (args[i].value)
 | 
      
         | 963 |  |  |             < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
 | 
      
         | 964 |  |  |       {
 | 
      
         | 965 |  |  |         int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
 | 
      
         | 966 |  |  |         int endian_correction = 0;
 | 
      
         | 967 |  |  |  
 | 
      
         | 968 |  |  |         if (args[i].partial)
 | 
      
         | 969 |  |  |           {
 | 
      
         | 970 |  |  |             gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
 | 
      
         | 971 |  |  |             args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
 | 
      
         | 972 |  |  |           }
 | 
      
         | 973 |  |  |         else
 | 
      
         | 974 |  |  |           {
 | 
      
         | 975 |  |  |             args[i].n_aligned_regs
 | 
      
         | 976 |  |  |               = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
 | 
      
         | 977 |  |  |           }
 | 
      
         | 978 |  |  |  
 | 
      
         | 979 |  |  |         args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
 | 
      
         | 980 |  |  |  
 | 
      
         | 981 |  |  |         /* Structures smaller than a word are normally aligned to the
 | 
      
         | 982 |  |  |            least significant byte.  On a BYTES_BIG_ENDIAN machine,
 | 
      
         | 983 |  |  |            this means we must skip the empty high order bytes when
 | 
      
         | 984 |  |  |            calculating the bit offset.  */
 | 
      
         | 985 |  |  |         if (bytes < UNITS_PER_WORD
 | 
      
         | 986 |  |  | #ifdef BLOCK_REG_PADDING
 | 
      
         | 987 |  |  |             && (BLOCK_REG_PADDING (args[i].mode,
 | 
      
         | 988 |  |  |                                    TREE_TYPE (args[i].tree_value), 1)
 | 
      
         | 989 |  |  |                 == downward)
 | 
      
         | 990 |  |  | #else
 | 
      
         | 991 |  |  |             && BYTES_BIG_ENDIAN
 | 
      
         | 992 |  |  | #endif
 | 
      
         | 993 |  |  |             )
 | 
      
         | 994 |  |  |           endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
 | 
      
         | 995 |  |  |  
 | 
      
         | 996 |  |  |         for (j = 0; j < args[i].n_aligned_regs; j++)
 | 
      
         | 997 |  |  |           {
 | 
      
         | 998 |  |  |             rtx reg = gen_reg_rtx (word_mode);
 | 
      
         | 999 |  |  |             rtx word = operand_subword_force (args[i].value, j, BLKmode);
 | 
      
         | 1000 |  |  |             int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
 | 
      
         | 1001 |  |  |  
 | 
      
         | 1002 |  |  |             args[i].aligned_regs[j] = reg;
 | 
      
         | 1003 |  |  |             word = extract_bit_field (word, bitsize, 0, 1, false, NULL_RTX,
 | 
      
         | 1004 |  |  |                                       word_mode, word_mode);
 | 
      
         | 1005 |  |  |  
 | 
      
         | 1006 |  |  |             /* There is no need to restrict this code to loading items
 | 
      
         | 1007 |  |  |                in TYPE_ALIGN sized hunks.  The bitfield instructions can
 | 
      
         | 1008 |  |  |                load up entire word sized registers efficiently.
 | 
      
         | 1009 |  |  |  
 | 
      
         | 1010 |  |  |                ??? This may not be needed anymore.
 | 
      
         | 1011 |  |  |                We use to emit a clobber here but that doesn't let later
 | 
      
         | 1012 |  |  |                passes optimize the instructions we emit.  By storing 0 into
 | 
      
         | 1013 |  |  |                the register later passes know the first AND to zero out the
 | 
      
         | 1014 |  |  |                bitfield being set in the register is unnecessary.  The store
 | 
      
         | 1015 |  |  |                of 0 will be deleted as will at least the first AND.  */
 | 
      
         | 1016 |  |  |  
 | 
      
         | 1017 |  |  |             emit_move_insn (reg, const0_rtx);
 | 
      
         | 1018 |  |  |  
 | 
      
         | 1019 |  |  |             bytes -= bitsize / BITS_PER_UNIT;
 | 
      
         | 1020 |  |  |             store_bit_field (reg, bitsize, endian_correction, 0, 0,
 | 
      
         | 1021 |  |  |                              word_mode, word);
 | 
      
         | 1022 |  |  |           }
 | 
      
         | 1023 |  |  |       }
 | 
      
         | 1024 |  |  | }
 | 
      
         | 1025 |  |  |  
 | 
      
         | 1026 |  |  | /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
 | 
      
         | 1027 |  |  |    CALL_EXPR EXP.
 | 
      
         | 1028 |  |  |  
 | 
      
         | 1029 |  |  |    NUM_ACTUALS is the total number of parameters.
 | 
      
         | 1030 |  |  |  
 | 
      
         | 1031 |  |  |    N_NAMED_ARGS is the total number of named arguments.
 | 
      
         | 1032 |  |  |  
 | 
      
         | 1033 |  |  |    STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
 | 
      
         | 1034 |  |  |    value, or null.
 | 
      
         | 1035 |  |  |  
 | 
      
         | 1036 |  |  |    FNDECL is the tree code for the target of this call (if known)
 | 
      
         | 1037 |  |  |  
 | 
      
         | 1038 |  |  |    ARGS_SO_FAR holds state needed by the target to know where to place
 | 
      
         | 1039 |  |  |    the next argument.
 | 
      
         | 1040 |  |  |  
 | 
      
         | 1041 |  |  |    REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
 | 
      
         | 1042 |  |  |    for arguments which are passed in registers.
 | 
      
         | 1043 |  |  |  
 | 
      
         | 1044 |  |  |    OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
 | 
      
         | 1045 |  |  |    and may be modified by this routine.
 | 
      
         | 1046 |  |  |  
 | 
      
         | 1047 |  |  |    OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
 | 
      
         | 1048 |  |  |    flags which may may be modified by this routine.
 | 
      
         | 1049 |  |  |  
 | 
      
         | 1050 |  |  |    MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
 | 
      
         | 1051 |  |  |    that requires allocation of stack space.
 | 
      
         | 1052 |  |  |  
 | 
      
         | 1053 |  |  |    CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
 | 
      
         | 1054 |  |  |    the thunked-to function.  */
 | 
      
         | 1055 |  |  |  
 | 
      
         | 1056 |  |  | static void
 | 
      
         | 1057 |  |  | initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
 | 
      
         | 1058 |  |  |                                  struct arg_data *args,
 | 
      
         | 1059 |  |  |                                  struct args_size *args_size,
 | 
      
         | 1060 |  |  |                                  int n_named_args ATTRIBUTE_UNUSED,
 | 
      
         | 1061 |  |  |                                  tree exp, tree struct_value_addr_value,
 | 
      
         | 1062 |  |  |                                  tree fndecl, tree fntype,
 | 
      
         | 1063 |  |  |                                  cumulative_args_t args_so_far,
 | 
      
         | 1064 |  |  |                                  int reg_parm_stack_space,
 | 
      
         | 1065 |  |  |                                  rtx *old_stack_level, int *old_pending_adj,
 | 
      
         | 1066 |  |  |                                  int *must_preallocate, int *ecf_flags,
 | 
      
         | 1067 |  |  |                                  bool *may_tailcall, bool call_from_thunk_p)
 | 
      
         | 1068 |  |  | {
 | 
      
         | 1069 |  |  |   CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
 | 
      
         | 1070 |  |  |   location_t loc = EXPR_LOCATION (exp);
 | 
      
         | 1071 |  |  |   /* 1 if scanning parms front to back, -1 if scanning back to front.  */
 | 
      
         | 1072 |  |  |   int inc;
 | 
      
         | 1073 |  |  |  
 | 
      
         | 1074 |  |  |   /* Count arg position in order args appear.  */
 | 
      
         | 1075 |  |  |   int argpos;
 | 
      
         | 1076 |  |  |  
 | 
      
         | 1077 |  |  |   int i;
 | 
      
         | 1078 |  |  |  
 | 
      
         | 1079 |  |  |   args_size->constant = 0;
 | 
      
         | 1080 |  |  |   args_size->var = 0;
 | 
      
         | 1081 |  |  |  
 | 
      
         | 1082 |  |  |   /* In this loop, we consider args in the order they are written.
 | 
      
         | 1083 |  |  |      We fill up ARGS from the front or from the back if necessary
 | 
      
         | 1084 |  |  |      so that in any case the first arg to be pushed ends up at the front.  */
 | 
      
         | 1085 |  |  |  
 | 
      
         | 1086 |  |  |   if (PUSH_ARGS_REVERSED)
 | 
      
         | 1087 |  |  |     {
 | 
      
         | 1088 |  |  |       i = num_actuals - 1, inc = -1;
 | 
      
         | 1089 |  |  |       /* In this case, must reverse order of args
 | 
      
         | 1090 |  |  |          so that we compute and push the last arg first.  */
 | 
      
         | 1091 |  |  |     }
 | 
      
         | 1092 |  |  |   else
 | 
      
         | 1093 |  |  |     {
 | 
      
         | 1094 |  |  |       i = 0, inc = 1;
 | 
      
         | 1095 |  |  |     }
 | 
      
         | 1096 |  |  |  
 | 
      
         | 1097 |  |  |   /* First fill in the actual arguments in the ARGS array, splitting
 | 
      
         | 1098 |  |  |      complex arguments if necessary.  */
 | 
      
         | 1099 |  |  |   {
 | 
      
         | 1100 |  |  |     int j = i;
 | 
      
         | 1101 |  |  |     call_expr_arg_iterator iter;
 | 
      
         | 1102 |  |  |     tree arg;
 | 
      
         | 1103 |  |  |  
 | 
      
         | 1104 |  |  |     if (struct_value_addr_value)
 | 
      
         | 1105 |  |  |       {
 | 
      
         | 1106 |  |  |         args[j].tree_value = struct_value_addr_value;
 | 
      
         | 1107 |  |  |         j += inc;
 | 
      
         | 1108 |  |  |       }
 | 
      
         | 1109 |  |  |     FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
 | 
      
         | 1110 |  |  |       {
 | 
      
         | 1111 |  |  |         tree argtype = TREE_TYPE (arg);
 | 
      
         | 1112 |  |  |         if (targetm.calls.split_complex_arg
 | 
      
         | 1113 |  |  |             && argtype
 | 
      
         | 1114 |  |  |             && TREE_CODE (argtype) == COMPLEX_TYPE
 | 
      
         | 1115 |  |  |             && targetm.calls.split_complex_arg (argtype))
 | 
      
         | 1116 |  |  |           {
 | 
      
         | 1117 |  |  |             tree subtype = TREE_TYPE (argtype);
 | 
      
         | 1118 |  |  |             args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
 | 
      
         | 1119 |  |  |             j += inc;
 | 
      
         | 1120 |  |  |             args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
 | 
      
         | 1121 |  |  |           }
 | 
      
         | 1122 |  |  |         else
 | 
      
         | 1123 |  |  |           args[j].tree_value = arg;
 | 
      
         | 1124 |  |  |         j += inc;
 | 
      
         | 1125 |  |  |       }
 | 
      
         | 1126 |  |  |   }
 | 
      
         | 1127 |  |  |  
 | 
      
         | 1128 |  |  |   /* I counts args in order (to be) pushed; ARGPOS counts in order written.  */
 | 
      
         | 1129 |  |  |   for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
 | 
      
         | 1130 |  |  |     {
 | 
      
         | 1131 |  |  |       tree type = TREE_TYPE (args[i].tree_value);
 | 
      
         | 1132 |  |  |       int unsignedp;
 | 
      
         | 1133 |  |  |       enum machine_mode mode;
 | 
      
         | 1134 |  |  |  
 | 
      
         | 1135 |  |  |       /* Replace erroneous argument with constant zero.  */
 | 
      
         | 1136 |  |  |       if (type == error_mark_node || !COMPLETE_TYPE_P (type))
 | 
      
         | 1137 |  |  |         args[i].tree_value = integer_zero_node, type = integer_type_node;
 | 
      
         | 1138 |  |  |  
 | 
      
         | 1139 |  |  |       /* If TYPE is a transparent union or record, pass things the way
 | 
      
         | 1140 |  |  |          we would pass the first field of the union or record.  We have
 | 
      
         | 1141 |  |  |          already verified that the modes are the same.  */
 | 
      
         | 1142 |  |  |       if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
 | 
      
         | 1143 |  |  |            && TYPE_TRANSPARENT_AGGR (type))
 | 
      
         | 1144 |  |  |         type = TREE_TYPE (first_field (type));
 | 
      
         | 1145 |  |  |  
 | 
      
         | 1146 |  |  |       /* Decide where to pass this arg.
 | 
      
         | 1147 |  |  |  
 | 
      
         | 1148 |  |  |          args[i].reg is nonzero if all or part is passed in registers.
 | 
      
         | 1149 |  |  |  
 | 
      
         | 1150 |  |  |          args[i].partial is nonzero if part but not all is passed in registers,
 | 
      
         | 1151 |  |  |          and the exact value says how many bytes are passed in registers.
 | 
      
         | 1152 |  |  |  
 | 
      
         | 1153 |  |  |          args[i].pass_on_stack is nonzero if the argument must at least be
 | 
      
         | 1154 |  |  |          computed on the stack.  It may then be loaded back into registers
 | 
      
         | 1155 |  |  |          if args[i].reg is nonzero.
 | 
      
         | 1156 |  |  |  
 | 
      
         | 1157 |  |  |          These decisions are driven by the FUNCTION_... macros and must agree
 | 
      
         | 1158 |  |  |          with those made by function.c.  */
 | 
      
         | 1159 |  |  |  
 | 
      
         | 1160 |  |  |       /* See if this argument should be passed by invisible reference.  */
 | 
      
         | 1161 |  |  |       if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
 | 
      
         | 1162 |  |  |                              type, argpos < n_named_args))
 | 
      
         | 1163 |  |  |         {
 | 
      
         | 1164 |  |  |           bool callee_copies;
 | 
      
         | 1165 |  |  |           tree base = NULL_TREE;
 | 
      
         | 1166 |  |  |  
 | 
      
         | 1167 |  |  |           callee_copies
 | 
      
         | 1168 |  |  |             = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
 | 
      
         | 1169 |  |  |                                        type, argpos < n_named_args);
 | 
      
         | 1170 |  |  |  
 | 
      
         | 1171 |  |  |           /* If we're compiling a thunk, pass through invisible references
 | 
      
         | 1172 |  |  |              instead of making a copy.  */
 | 
      
         | 1173 |  |  |           if (call_from_thunk_p
 | 
      
         | 1174 |  |  |               || (callee_copies
 | 
      
         | 1175 |  |  |                   && !TREE_ADDRESSABLE (type)
 | 
      
         | 1176 |  |  |                   && (base = get_base_address (args[i].tree_value))
 | 
      
         | 1177 |  |  |                   && TREE_CODE (base) != SSA_NAME
 | 
      
         | 1178 |  |  |                   && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
 | 
      
         | 1179 |  |  |             {
 | 
      
         | 1180 |  |  |               mark_addressable (args[i].tree_value);
 | 
      
         | 1181 |  |  |  
 | 
      
         | 1182 |  |  |               /* We can't use sibcalls if a callee-copied argument is
 | 
      
         | 1183 |  |  |                  stored in the current function's frame.  */
 | 
      
         | 1184 |  |  |               if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
 | 
      
         | 1185 |  |  |                 *may_tailcall = false;
 | 
      
         | 1186 |  |  |  
 | 
      
         | 1187 |  |  |               args[i].tree_value = build_fold_addr_expr_loc (loc,
 | 
      
         | 1188 |  |  |                                                          args[i].tree_value);
 | 
      
         | 1189 |  |  |               type = TREE_TYPE (args[i].tree_value);
 | 
      
         | 1190 |  |  |  
 | 
      
         | 1191 |  |  |               if (*ecf_flags & ECF_CONST)
 | 
      
         | 1192 |  |  |                 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
 | 
      
         | 1193 |  |  |             }
 | 
      
         | 1194 |  |  |           else
 | 
      
         | 1195 |  |  |             {
 | 
      
         | 1196 |  |  |               /* We make a copy of the object and pass the address to the
 | 
      
         | 1197 |  |  |                  function being called.  */
 | 
      
         | 1198 |  |  |               rtx copy;
 | 
      
         | 1199 |  |  |  
 | 
      
         | 1200 |  |  |               if (!COMPLETE_TYPE_P (type)
 | 
      
         | 1201 |  |  |                   || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
 | 
      
         | 1202 |  |  |                   || (flag_stack_check == GENERIC_STACK_CHECK
 | 
      
         | 1203 |  |  |                       && compare_tree_int (TYPE_SIZE_UNIT (type),
 | 
      
         | 1204 |  |  |                                            STACK_CHECK_MAX_VAR_SIZE) > 0))
 | 
      
         | 1205 |  |  |                 {
 | 
      
         | 1206 |  |  |                   /* This is a variable-sized object.  Make space on the stack
 | 
      
         | 1207 |  |  |                      for it.  */
 | 
      
         | 1208 |  |  |                   rtx size_rtx = expr_size (args[i].tree_value);
 | 
      
         | 1209 |  |  |  
 | 
      
         | 1210 |  |  |                   if (*old_stack_level == 0)
 | 
      
         | 1211 |  |  |                     {
 | 
      
         | 1212 |  |  |                       emit_stack_save (SAVE_BLOCK, old_stack_level);
 | 
      
         | 1213 |  |  |                       *old_pending_adj = pending_stack_adjust;
 | 
      
         | 1214 |  |  |                       pending_stack_adjust = 0;
 | 
      
         | 1215 |  |  |                     }
 | 
      
         | 1216 |  |  |  
 | 
      
         | 1217 |  |  |                   /* We can pass TRUE as the 4th argument because we just
 | 
      
         | 1218 |  |  |                      saved the stack pointer and will restore it right after
 | 
      
         | 1219 |  |  |                      the call.  */
 | 
      
         | 1220 |  |  |                   copy = allocate_dynamic_stack_space (size_rtx,
 | 
      
         | 1221 |  |  |                                                        TYPE_ALIGN (type),
 | 
      
         | 1222 |  |  |                                                        TYPE_ALIGN (type),
 | 
      
         | 1223 |  |  |                                                        true);
 | 
      
         | 1224 |  |  |                   copy = gen_rtx_MEM (BLKmode, copy);
 | 
      
         | 1225 |  |  |                   set_mem_attributes (copy, type, 1);
 | 
      
         | 1226 |  |  |                 }
 | 
      
         | 1227 |  |  |               else
 | 
      
         | 1228 |  |  |                 copy = assign_temp (type, 0, 1, 0);
 | 
      
         | 1229 |  |  |  
 | 
      
         | 1230 |  |  |               store_expr (args[i].tree_value, copy, 0, false);
 | 
      
         | 1231 |  |  |  
 | 
      
         | 1232 |  |  |               /* Just change the const function to pure and then let
 | 
      
         | 1233 |  |  |                  the next test clear the pure based on
 | 
      
         | 1234 |  |  |                  callee_copies.  */
 | 
      
         | 1235 |  |  |               if (*ecf_flags & ECF_CONST)
 | 
      
         | 1236 |  |  |                 {
 | 
      
         | 1237 |  |  |                   *ecf_flags &= ~ECF_CONST;
 | 
      
         | 1238 |  |  |                   *ecf_flags |= ECF_PURE;
 | 
      
         | 1239 |  |  |                 }
 | 
      
         | 1240 |  |  |  
 | 
      
         | 1241 |  |  |               if (!callee_copies && *ecf_flags & ECF_PURE)
 | 
      
         | 1242 |  |  |                 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
 | 
      
         | 1243 |  |  |  
 | 
      
         | 1244 |  |  |               args[i].tree_value
 | 
      
         | 1245 |  |  |                 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
 | 
      
         | 1246 |  |  |               type = TREE_TYPE (args[i].tree_value);
 | 
      
         | 1247 |  |  |               *may_tailcall = false;
 | 
      
         | 1248 |  |  |             }
 | 
      
         | 1249 |  |  |         }
 | 
      
         | 1250 |  |  |  
 | 
      
         | 1251 |  |  |       unsignedp = TYPE_UNSIGNED (type);
 | 
      
         | 1252 |  |  |       mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
 | 
      
         | 1253 |  |  |                                     fndecl ? TREE_TYPE (fndecl) : fntype, 0);
 | 
      
         | 1254 |  |  |  
 | 
      
         | 1255 |  |  |       args[i].unsignedp = unsignedp;
 | 
      
         | 1256 |  |  |       args[i].mode = mode;
 | 
      
         | 1257 |  |  |  
 | 
      
         | 1258 |  |  |       args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
 | 
      
         | 1259 |  |  |                                                 argpos < n_named_args);
 | 
      
         | 1260 |  |  |  
 | 
      
         | 1261 |  |  |       /* If this is a sibling call and the machine has register windows, the
 | 
      
         | 1262 |  |  |          register window has to be unwinded before calling the routine, so
 | 
      
         | 1263 |  |  |          arguments have to go into the incoming registers.  */
 | 
      
         | 1264 |  |  |       if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
 | 
      
         | 1265 |  |  |         args[i].tail_call_reg
 | 
      
         | 1266 |  |  |           = targetm.calls.function_incoming_arg (args_so_far, mode, type,
 | 
      
         | 1267 |  |  |                                                  argpos < n_named_args);
 | 
      
         | 1268 |  |  |       else
 | 
      
         | 1269 |  |  |         args[i].tail_call_reg = args[i].reg;
 | 
      
         | 1270 |  |  |  
 | 
      
         | 1271 |  |  |       if (args[i].reg)
 | 
      
         | 1272 |  |  |         args[i].partial
 | 
      
         | 1273 |  |  |           = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
 | 
      
         | 1274 |  |  |                                              argpos < n_named_args);
 | 
      
         | 1275 |  |  |  
 | 
      
         | 1276 |  |  |       args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
 | 
      
         | 1277 |  |  |  
 | 
      
         | 1278 |  |  |       /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
 | 
      
         | 1279 |  |  |          it means that we are to pass this arg in the register(s) designated
 | 
      
         | 1280 |  |  |          by the PARALLEL, but also to pass it in the stack.  */
 | 
      
         | 1281 |  |  |       if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
 | 
      
         | 1282 |  |  |           && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
 | 
      
         | 1283 |  |  |         args[i].pass_on_stack = 1;
 | 
      
         | 1284 |  |  |  
 | 
      
         | 1285 |  |  |       /* If this is an addressable type, we must preallocate the stack
 | 
      
         | 1286 |  |  |          since we must evaluate the object into its final location.
 | 
      
         | 1287 |  |  |  
 | 
      
         | 1288 |  |  |          If this is to be passed in both registers and the stack, it is simpler
 | 
      
         | 1289 |  |  |          to preallocate.  */
 | 
      
         | 1290 |  |  |       if (TREE_ADDRESSABLE (type)
 | 
      
         | 1291 |  |  |           || (args[i].pass_on_stack && args[i].reg != 0))
 | 
      
         | 1292 |  |  |         *must_preallocate = 1;
 | 
      
         | 1293 |  |  |  
 | 
      
         | 1294 |  |  |       /* Compute the stack-size of this argument.  */
 | 
      
         | 1295 |  |  |       if (args[i].reg == 0 || args[i].partial != 0
 | 
      
         | 1296 |  |  |           || reg_parm_stack_space > 0
 | 
      
         | 1297 |  |  |           || args[i].pass_on_stack)
 | 
      
         | 1298 |  |  |         locate_and_pad_parm (mode, type,
 | 
      
         | 1299 |  |  | #ifdef STACK_PARMS_IN_REG_PARM_AREA
 | 
      
         | 1300 |  |  |                              1,
 | 
      
         | 1301 |  |  | #else
 | 
      
         | 1302 |  |  |                              args[i].reg != 0,
 | 
      
         | 1303 |  |  | #endif
 | 
      
         | 1304 |  |  |                              args[i].pass_on_stack ? 0 : args[i].partial,
 | 
      
         | 1305 |  |  |                              fndecl, args_size, &args[i].locate);
 | 
      
         | 1306 |  |  | #ifdef BLOCK_REG_PADDING
 | 
      
         | 1307 |  |  |       else
 | 
      
         | 1308 |  |  |         /* The argument is passed entirely in registers.  See at which
 | 
      
         | 1309 |  |  |            end it should be padded.  */
 | 
      
         | 1310 |  |  |         args[i].locate.where_pad =
 | 
      
         | 1311 |  |  |           BLOCK_REG_PADDING (mode, type,
 | 
      
         | 1312 |  |  |                              int_size_in_bytes (type) <= UNITS_PER_WORD);
 | 
      
         | 1313 |  |  | #endif
 | 
      
         | 1314 |  |  |  
 | 
      
         | 1315 |  |  |       /* Update ARGS_SIZE, the total stack space for args so far.  */
 | 
      
         | 1316 |  |  |  
 | 
      
         | 1317 |  |  |       args_size->constant += args[i].locate.size.constant;
 | 
      
         | 1318 |  |  |       if (args[i].locate.size.var)
 | 
      
         | 1319 |  |  |         ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
 | 
      
         | 1320 |  |  |  
 | 
      
         | 1321 |  |  |       /* Increment ARGS_SO_FAR, which has info about which arg-registers
 | 
      
         | 1322 |  |  |          have been used, etc.  */
 | 
      
         | 1323 |  |  |  
 | 
      
         | 1324 |  |  |       targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
 | 
      
         | 1325 |  |  |                                           type, argpos < n_named_args);
 | 
      
         | 1326 |  |  |     }
 | 
      
         | 1327 |  |  | }
 | 
      
         | 1328 |  |  |  
 | 
      
         | 1329 |  |  | /* Update ARGS_SIZE to contain the total size for the argument block.
 | 
      
         | 1330 |  |  |    Return the original constant component of the argument block's size.
 | 
      
         | 1331 |  |  |  
 | 
      
         | 1332 |  |  |    REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
 | 
      
         | 1333 |  |  |    for arguments passed in registers.  */
 | 
      
         | 1334 |  |  |  
 | 
      
         | 1335 |  |  | static int
 | 
      
         | 1336 |  |  | compute_argument_block_size (int reg_parm_stack_space,
 | 
      
         | 1337 |  |  |                              struct args_size *args_size,
 | 
      
         | 1338 |  |  |                              tree fndecl ATTRIBUTE_UNUSED,
 | 
      
         | 1339 |  |  |                              tree fntype ATTRIBUTE_UNUSED,
 | 
      
         | 1340 |  |  |                              int preferred_stack_boundary ATTRIBUTE_UNUSED)
 | 
      
         | 1341 |  |  | {
 | 
      
         | 1342 |  |  |   int unadjusted_args_size = args_size->constant;
 | 
      
         | 1343 |  |  |  
 | 
      
         | 1344 |  |  |   /* For accumulate outgoing args mode we don't need to align, since the frame
 | 
      
         | 1345 |  |  |      will be already aligned.  Align to STACK_BOUNDARY in order to prevent
 | 
      
         | 1346 |  |  |      backends from generating misaligned frame sizes.  */
 | 
      
         | 1347 |  |  |   if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
 | 
      
         | 1348 |  |  |     preferred_stack_boundary = STACK_BOUNDARY;
 | 
      
         | 1349 |  |  |  
 | 
      
         | 1350 |  |  |   /* Compute the actual size of the argument block required.  The variable
 | 
      
         | 1351 |  |  |      and constant sizes must be combined, the size may have to be rounded,
 | 
      
         | 1352 |  |  |      and there may be a minimum required size.  */
 | 
      
         | 1353 |  |  |  
 | 
      
         | 1354 |  |  |   if (args_size->var)
 | 
      
         | 1355 |  |  |     {
 | 
      
         | 1356 |  |  |       args_size->var = ARGS_SIZE_TREE (*args_size);
 | 
      
         | 1357 |  |  |       args_size->constant = 0;
 | 
      
         | 1358 |  |  |  
 | 
      
         | 1359 |  |  |       preferred_stack_boundary /= BITS_PER_UNIT;
 | 
      
         | 1360 |  |  |       if (preferred_stack_boundary > 1)
 | 
      
         | 1361 |  |  |         {
 | 
      
         | 1362 |  |  |           /* We don't handle this case yet.  To handle it correctly we have
 | 
      
         | 1363 |  |  |              to add the delta, round and subtract the delta.
 | 
      
         | 1364 |  |  |              Currently no machine description requires this support.  */
 | 
      
         | 1365 |  |  |           gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
 | 
      
         | 1366 |  |  |           args_size->var = round_up (args_size->var, preferred_stack_boundary);
 | 
      
         | 1367 |  |  |         }
 | 
      
         | 1368 |  |  |  
 | 
      
         | 1369 |  |  |       if (reg_parm_stack_space > 0)
 | 
      
         | 1370 |  |  |         {
 | 
      
         | 1371 |  |  |           args_size->var
 | 
      
         | 1372 |  |  |             = size_binop (MAX_EXPR, args_size->var,
 | 
      
         | 1373 |  |  |                           ssize_int (reg_parm_stack_space));
 | 
      
         | 1374 |  |  |  
 | 
      
         | 1375 |  |  |           /* The area corresponding to register parameters is not to count in
 | 
      
         | 1376 |  |  |              the size of the block we need.  So make the adjustment.  */
 | 
      
         | 1377 |  |  |           if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
 | 
      
         | 1378 |  |  |             args_size->var
 | 
      
         | 1379 |  |  |               = size_binop (MINUS_EXPR, args_size->var,
 | 
      
         | 1380 |  |  |                             ssize_int (reg_parm_stack_space));
 | 
      
         | 1381 |  |  |         }
 | 
      
         | 1382 |  |  |     }
 | 
      
         | 1383 |  |  |   else
 | 
      
         | 1384 |  |  |     {
 | 
      
         | 1385 |  |  |       preferred_stack_boundary /= BITS_PER_UNIT;
 | 
      
         | 1386 |  |  |       if (preferred_stack_boundary < 1)
 | 
      
         | 1387 |  |  |         preferred_stack_boundary = 1;
 | 
      
         | 1388 |  |  |       args_size->constant = (((args_size->constant
 | 
      
         | 1389 |  |  |                                + stack_pointer_delta
 | 
      
         | 1390 |  |  |                                + preferred_stack_boundary - 1)
 | 
      
         | 1391 |  |  |                               / preferred_stack_boundary
 | 
      
         | 1392 |  |  |                               * preferred_stack_boundary)
 | 
      
         | 1393 |  |  |                              - stack_pointer_delta);
 | 
      
         | 1394 |  |  |  
 | 
      
         | 1395 |  |  |       args_size->constant = MAX (args_size->constant,
 | 
      
         | 1396 |  |  |                                  reg_parm_stack_space);
 | 
      
         | 1397 |  |  |  
 | 
      
         | 1398 |  |  |       if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
 | 
      
         | 1399 |  |  |         args_size->constant -= reg_parm_stack_space;
 | 
      
         | 1400 |  |  |     }
 | 
      
         | 1401 |  |  |   return unadjusted_args_size;
 | 
      
         | 1402 |  |  | }
 | 
      
         | 1403 |  |  |  
 | 
      
         | 1404 |  |  | /* Precompute parameters as needed for a function call.
 | 
      
         | 1405 |  |  |  
 | 
      
         | 1406 |  |  |    FLAGS is mask of ECF_* constants.
 | 
      
         | 1407 |  |  |  
 | 
      
         | 1408 |  |  |    NUM_ACTUALS is the number of arguments.
 | 
      
         | 1409 |  |  |  
 | 
      
         | 1410 |  |  |    ARGS is an array containing information for each argument; this
 | 
      
         | 1411 |  |  |    routine fills in the INITIAL_VALUE and VALUE fields for each
 | 
      
         | 1412 |  |  |    precomputed argument.  */
 | 
      
         | 1413 |  |  |  
 | 
      
         | 1414 |  |  | static void
 | 
      
         | 1415 |  |  | precompute_arguments (int num_actuals, struct arg_data *args)
 | 
      
         | 1416 |  |  | {
 | 
      
         | 1417 |  |  |   int i;
 | 
      
         | 1418 |  |  |  
 | 
      
         | 1419 |  |  |   /* If this is a libcall, then precompute all arguments so that we do not
 | 
      
         | 1420 |  |  |      get extraneous instructions emitted as part of the libcall sequence.  */
 | 
      
         | 1421 |  |  |  
 | 
      
         | 1422 |  |  |   /* If we preallocated the stack space, and some arguments must be passed
 | 
      
         | 1423 |  |  |      on the stack, then we must precompute any parameter which contains a
 | 
      
         | 1424 |  |  |      function call which will store arguments on the stack.
 | 
      
         | 1425 |  |  |      Otherwise, evaluating the parameter may clobber previous parameters
 | 
      
         | 1426 |  |  |      which have already been stored into the stack.  (we have code to avoid
 | 
      
         | 1427 |  |  |      such case by saving the outgoing stack arguments, but it results in
 | 
      
         | 1428 |  |  |      worse code)  */
 | 
      
         | 1429 |  |  |   if (!ACCUMULATE_OUTGOING_ARGS)
 | 
      
         | 1430 |  |  |     return;
 | 
      
         | 1431 |  |  |  
 | 
      
         | 1432 |  |  |   for (i = 0; i < num_actuals; i++)
 | 
      
         | 1433 |  |  |     {
 | 
      
         | 1434 |  |  |       tree type;
 | 
      
         | 1435 |  |  |       enum machine_mode mode;
 | 
      
         | 1436 |  |  |  
 | 
      
         | 1437 |  |  |       if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
 | 
      
         | 1438 |  |  |         continue;
 | 
      
         | 1439 |  |  |  
 | 
      
         | 1440 |  |  |       /* If this is an addressable type, we cannot pre-evaluate it.  */
 | 
      
         | 1441 |  |  |       type = TREE_TYPE (args[i].tree_value);
 | 
      
         | 1442 |  |  |       gcc_assert (!TREE_ADDRESSABLE (type));
 | 
      
         | 1443 |  |  |  
 | 
      
         | 1444 |  |  |       args[i].initial_value = args[i].value
 | 
      
         | 1445 |  |  |         = expand_normal (args[i].tree_value);
 | 
      
         | 1446 |  |  |  
 | 
      
         | 1447 |  |  |       mode = TYPE_MODE (type);
 | 
      
         | 1448 |  |  |       if (mode != args[i].mode)
 | 
      
         | 1449 |  |  |         {
 | 
      
         | 1450 |  |  |           int unsignedp = args[i].unsignedp;
 | 
      
         | 1451 |  |  |           args[i].value
 | 
      
         | 1452 |  |  |             = convert_modes (args[i].mode, mode,
 | 
      
         | 1453 |  |  |                              args[i].value, args[i].unsignedp);
 | 
      
         | 1454 |  |  |  
 | 
      
         | 1455 |  |  |           /* CSE will replace this only if it contains args[i].value
 | 
      
         | 1456 |  |  |              pseudo, so convert it down to the declared mode using
 | 
      
         | 1457 |  |  |              a SUBREG.  */
 | 
      
         | 1458 |  |  |           if (REG_P (args[i].value)
 | 
      
         | 1459 |  |  |               && GET_MODE_CLASS (args[i].mode) == MODE_INT
 | 
      
         | 1460 |  |  |               && promote_mode (type, mode, &unsignedp) != args[i].mode)
 | 
      
         | 1461 |  |  |             {
 | 
      
         | 1462 |  |  |               args[i].initial_value
 | 
      
         | 1463 |  |  |                 = gen_lowpart_SUBREG (mode, args[i].value);
 | 
      
         | 1464 |  |  |               SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
 | 
      
         | 1465 |  |  |               SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
 | 
      
         | 1466 |  |  |                                             args[i].unsignedp);
 | 
      
         | 1467 |  |  |             }
 | 
      
         | 1468 |  |  |         }
 | 
      
         | 1469 |  |  |     }
 | 
      
         | 1470 |  |  | }
 | 
      
         | 1471 |  |  |  
 | 
      
         | 1472 |  |  | /* Given the current state of MUST_PREALLOCATE and information about
 | 
      
         | 1473 |  |  |    arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
 | 
      
         | 1474 |  |  |    compute and return the final value for MUST_PREALLOCATE.  */
 | 
      
         | 1475 |  |  |  
 | 
      
         | 1476 |  |  | static int
 | 
      
         | 1477 |  |  | finalize_must_preallocate (int must_preallocate, int num_actuals,
 | 
      
         | 1478 |  |  |                            struct arg_data *args, struct args_size *args_size)
 | 
      
         | 1479 |  |  | {
 | 
      
         | 1480 |  |  |   /* See if we have or want to preallocate stack space.
 | 
      
         | 1481 |  |  |  
 | 
      
         | 1482 |  |  |      If we would have to push a partially-in-regs parm
 | 
      
         | 1483 |  |  |      before other stack parms, preallocate stack space instead.
 | 
      
         | 1484 |  |  |  
 | 
      
         | 1485 |  |  |      If the size of some parm is not a multiple of the required stack
 | 
      
         | 1486 |  |  |      alignment, we must preallocate.
 | 
      
         | 1487 |  |  |  
 | 
      
         | 1488 |  |  |      If the total size of arguments that would otherwise create a copy in
 | 
      
         | 1489 |  |  |      a temporary (such as a CALL) is more than half the total argument list
 | 
      
         | 1490 |  |  |      size, preallocation is faster.
 | 
      
         | 1491 |  |  |  
 | 
      
         | 1492 |  |  |      Another reason to preallocate is if we have a machine (like the m88k)
 | 
      
         | 1493 |  |  |      where stack alignment is required to be maintained between every
 | 
      
         | 1494 |  |  |      pair of insns, not just when the call is made.  However, we assume here
 | 
      
         | 1495 |  |  |      that such machines either do not have push insns (and hence preallocation
 | 
      
         | 1496 |  |  |      would occur anyway) or the problem is taken care of with
 | 
      
         | 1497 |  |  |      PUSH_ROUNDING.  */
 | 
      
         | 1498 |  |  |  
 | 
      
         | 1499 |  |  |   if (! must_preallocate)
 | 
      
         | 1500 |  |  |     {
 | 
      
         | 1501 |  |  |       int partial_seen = 0;
 | 
      
         | 1502 |  |  |       int copy_to_evaluate_size = 0;
 | 
      
         | 1503 |  |  |       int i;
 | 
      
         | 1504 |  |  |  
 | 
      
         | 1505 |  |  |       for (i = 0; i < num_actuals && ! must_preallocate; i++)
 | 
      
         | 1506 |  |  |         {
 | 
      
         | 1507 |  |  |           if (args[i].partial > 0 && ! args[i].pass_on_stack)
 | 
      
         | 1508 |  |  |             partial_seen = 1;
 | 
      
         | 1509 |  |  |           else if (partial_seen && args[i].reg == 0)
 | 
      
         | 1510 |  |  |             must_preallocate = 1;
 | 
      
         | 1511 |  |  |  
 | 
      
         | 1512 |  |  |           if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
 | 
      
         | 1513 |  |  |               && (TREE_CODE (args[i].tree_value) == CALL_EXPR
 | 
      
         | 1514 |  |  |                   || TREE_CODE (args[i].tree_value) == TARGET_EXPR
 | 
      
         | 1515 |  |  |                   || TREE_CODE (args[i].tree_value) == COND_EXPR
 | 
      
         | 1516 |  |  |                   || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
 | 
      
         | 1517 |  |  |             copy_to_evaluate_size
 | 
      
         | 1518 |  |  |               += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
 | 
      
         | 1519 |  |  |         }
 | 
      
         | 1520 |  |  |  
 | 
      
         | 1521 |  |  |       if (copy_to_evaluate_size * 2 >= args_size->constant
 | 
      
         | 1522 |  |  |           && args_size->constant > 0)
 | 
      
         | 1523 |  |  |         must_preallocate = 1;
 | 
      
         | 1524 |  |  |     }
 | 
      
         | 1525 |  |  |   return must_preallocate;
 | 
      
         | 1526 |  |  | }
 | 
      
         | 1527 |  |  |  
 | 
      
         | 1528 |  |  | /* If we preallocated stack space, compute the address of each argument
 | 
      
         | 1529 |  |  |    and store it into the ARGS array.
 | 
      
         | 1530 |  |  |  
 | 
      
         | 1531 |  |  |    We need not ensure it is a valid memory address here; it will be
 | 
      
         | 1532 |  |  |    validized when it is used.
 | 
      
         | 1533 |  |  |  
 | 
      
         | 1534 |  |  |    ARGBLOCK is an rtx for the address of the outgoing arguments.  */
 | 
      
         | 1535 |  |  |  
 | 
      
         | 1536 |  |  | static void
 | 
      
         | 1537 |  |  | compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
 | 
      
         | 1538 |  |  | {
 | 
      
         | 1539 |  |  |   if (argblock)
 | 
      
         | 1540 |  |  |     {
 | 
      
         | 1541 |  |  |       rtx arg_reg = argblock;
 | 
      
         | 1542 |  |  |       int i, arg_offset = 0;
 | 
      
         | 1543 |  |  |  
 | 
      
         | 1544 |  |  |       if (GET_CODE (argblock) == PLUS)
 | 
      
         | 1545 |  |  |         arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
 | 
      
         | 1546 |  |  |  
 | 
      
         | 1547 |  |  |       for (i = 0; i < num_actuals; i++)
 | 
      
         | 1548 |  |  |         {
 | 
      
         | 1549 |  |  |           rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
 | 
      
         | 1550 |  |  |           rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
 | 
      
         | 1551 |  |  |           rtx addr;
 | 
      
         | 1552 |  |  |           unsigned int align, boundary;
 | 
      
         | 1553 |  |  |           unsigned int units_on_stack = 0;
 | 
      
         | 1554 |  |  |           enum machine_mode partial_mode = VOIDmode;
 | 
      
         | 1555 |  |  |  
 | 
      
         | 1556 |  |  |           /* Skip this parm if it will not be passed on the stack.  */
 | 
      
         | 1557 |  |  |           if (! args[i].pass_on_stack
 | 
      
         | 1558 |  |  |               && args[i].reg != 0
 | 
      
         | 1559 |  |  |               && args[i].partial == 0)
 | 
      
         | 1560 |  |  |             continue;
 | 
      
         | 1561 |  |  |  
 | 
      
         | 1562 |  |  |           if (CONST_INT_P (offset))
 | 
      
         | 1563 |  |  |             addr = plus_constant (arg_reg, INTVAL (offset));
 | 
      
         | 1564 |  |  |           else
 | 
      
         | 1565 |  |  |             addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
 | 
      
         | 1566 |  |  |  
 | 
      
         | 1567 |  |  |           addr = plus_constant (addr, arg_offset);
 | 
      
         | 1568 |  |  |  
 | 
      
         | 1569 |  |  |           if (args[i].partial != 0)
 | 
      
         | 1570 |  |  |             {
 | 
      
         | 1571 |  |  |               /* Only part of the parameter is being passed on the stack.
 | 
      
         | 1572 |  |  |                  Generate a simple memory reference of the correct size.  */
 | 
      
         | 1573 |  |  |               units_on_stack = args[i].locate.size.constant;
 | 
      
         | 1574 |  |  |               partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
 | 
      
         | 1575 |  |  |                                             MODE_INT, 1);
 | 
      
         | 1576 |  |  |               args[i].stack = gen_rtx_MEM (partial_mode, addr);
 | 
      
         | 1577 |  |  |               set_mem_size (args[i].stack, units_on_stack);
 | 
      
         | 1578 |  |  |             }
 | 
      
         | 1579 |  |  |           else
 | 
      
         | 1580 |  |  |             {
 | 
      
         | 1581 |  |  |               args[i].stack = gen_rtx_MEM (args[i].mode, addr);
 | 
      
         | 1582 |  |  |               set_mem_attributes (args[i].stack,
 | 
      
         | 1583 |  |  |                                   TREE_TYPE (args[i].tree_value), 1);
 | 
      
         | 1584 |  |  |             }
 | 
      
         | 1585 |  |  |           align = BITS_PER_UNIT;
 | 
      
         | 1586 |  |  |           boundary = args[i].locate.boundary;
 | 
      
         | 1587 |  |  |           if (args[i].locate.where_pad != downward)
 | 
      
         | 1588 |  |  |             align = boundary;
 | 
      
         | 1589 |  |  |           else if (CONST_INT_P (offset))
 | 
      
         | 1590 |  |  |             {
 | 
      
         | 1591 |  |  |               align = INTVAL (offset) * BITS_PER_UNIT | boundary;
 | 
      
         | 1592 |  |  |               align = align & -align;
 | 
      
         | 1593 |  |  |             }
 | 
      
         | 1594 |  |  |           set_mem_align (args[i].stack, align);
 | 
      
         | 1595 |  |  |  
 | 
      
         | 1596 |  |  |           if (CONST_INT_P (slot_offset))
 | 
      
         | 1597 |  |  |             addr = plus_constant (arg_reg, INTVAL (slot_offset));
 | 
      
         | 1598 |  |  |           else
 | 
      
         | 1599 |  |  |             addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
 | 
      
         | 1600 |  |  |  
 | 
      
         | 1601 |  |  |           addr = plus_constant (addr, arg_offset);
 | 
      
         | 1602 |  |  |  
 | 
      
         | 1603 |  |  |           if (args[i].partial != 0)
 | 
      
         | 1604 |  |  |             {
 | 
      
         | 1605 |  |  |               /* Only part of the parameter is being passed on the stack.
 | 
      
         | 1606 |  |  |                  Generate a simple memory reference of the correct size.
 | 
      
         | 1607 |  |  |                */
 | 
      
         | 1608 |  |  |               args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
 | 
      
         | 1609 |  |  |               set_mem_size (args[i].stack_slot, units_on_stack);
 | 
      
         | 1610 |  |  |             }
 | 
      
         | 1611 |  |  |           else
 | 
      
         | 1612 |  |  |             {
 | 
      
         | 1613 |  |  |               args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
 | 
      
         | 1614 |  |  |               set_mem_attributes (args[i].stack_slot,
 | 
      
         | 1615 |  |  |                                   TREE_TYPE (args[i].tree_value), 1);
 | 
      
         | 1616 |  |  |             }
 | 
      
         | 1617 |  |  |           set_mem_align (args[i].stack_slot, args[i].locate.boundary);
 | 
      
         | 1618 |  |  |  
 | 
      
         | 1619 |  |  |           /* Function incoming arguments may overlap with sibling call
 | 
      
         | 1620 |  |  |              outgoing arguments and we cannot allow reordering of reads
 | 
      
         | 1621 |  |  |              from function arguments with stores to outgoing arguments
 | 
      
         | 1622 |  |  |              of sibling calls.  */
 | 
      
         | 1623 |  |  |           set_mem_alias_set (args[i].stack, 0);
 | 
      
         | 1624 |  |  |           set_mem_alias_set (args[i].stack_slot, 0);
 | 
      
         | 1625 |  |  |         }
 | 
      
         | 1626 |  |  |     }
 | 
      
         | 1627 |  |  | }
 | 
      
         | 1628 |  |  |  
 | 
      
         | 1629 |  |  | /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
 | 
      
         | 1630 |  |  |    in a call instruction.
 | 
      
         | 1631 |  |  |  
 | 
      
         | 1632 |  |  |    FNDECL is the tree node for the target function.  For an indirect call
 | 
      
         | 1633 |  |  |    FNDECL will be NULL_TREE.
 | 
      
         | 1634 |  |  |  
 | 
      
         | 1635 |  |  |    ADDR is the operand 0 of CALL_EXPR for this call.  */
 | 
      
         | 1636 |  |  |  
 | 
      
         | 1637 |  |  | static rtx
 | 
      
         | 1638 |  |  | rtx_for_function_call (tree fndecl, tree addr)
 | 
      
         | 1639 |  |  | {
 | 
      
         | 1640 |  |  |   rtx funexp;
 | 
      
         | 1641 |  |  |  
 | 
      
         | 1642 |  |  |   /* Get the function to call, in the form of RTL.  */
 | 
      
         | 1643 |  |  |   if (fndecl)
 | 
      
         | 1644 |  |  |     {
 | 
      
         | 1645 |  |  |       /* If this is the first use of the function, see if we need to
 | 
      
         | 1646 |  |  |          make an external definition for it.  */
 | 
      
         | 1647 |  |  |       if (!TREE_USED (fndecl) && fndecl != current_function_decl)
 | 
      
         | 1648 |  |  |         {
 | 
      
         | 1649 |  |  |           assemble_external (fndecl);
 | 
      
         | 1650 |  |  |           TREE_USED (fndecl) = 1;
 | 
      
         | 1651 |  |  |         }
 | 
      
         | 1652 |  |  |  
 | 
      
         | 1653 |  |  |       /* Get a SYMBOL_REF rtx for the function address.  */
 | 
      
         | 1654 |  |  |       funexp = XEXP (DECL_RTL (fndecl), 0);
 | 
      
         | 1655 |  |  |     }
 | 
      
         | 1656 |  |  |   else
 | 
      
         | 1657 |  |  |     /* Generate an rtx (probably a pseudo-register) for the address.  */
 | 
      
         | 1658 |  |  |     {
 | 
      
         | 1659 |  |  |       push_temp_slots ();
 | 
      
         | 1660 |  |  |       funexp = expand_normal (addr);
 | 
      
         | 1661 |  |  |       pop_temp_slots ();        /* FUNEXP can't be BLKmode.  */
 | 
      
         | 1662 |  |  |     }
 | 
      
         | 1663 |  |  |   return funexp;
 | 
      
         | 1664 |  |  | }
 | 
      
         | 1665 |  |  |  
 | 
      
         | 1666 |  |  | /* Internal state for internal_arg_pointer_based_exp and its helpers.  */
 | 
      
         | 1667 |  |  | static struct
 | 
      
         | 1668 |  |  | {
 | 
      
         | 1669 |  |  |   /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
 | 
      
         | 1670 |  |  |      or NULL_RTX if none has been scanned yet.  */
 | 
      
         | 1671 |  |  |   rtx scan_start;
 | 
      
         | 1672 |  |  |   /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
 | 
      
         | 1673 |  |  |      based on crtl->args.internal_arg_pointer.  The element is NULL_RTX if the
 | 
      
         | 1674 |  |  |      pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
 | 
      
         | 1675 |  |  |      with fixed offset, or PC if this is with variable or unknown offset.  */
 | 
      
         | 1676 |  |  |   VEC(rtx, heap) *cache;
 | 
      
         | 1677 |  |  | } internal_arg_pointer_exp_state;
 | 
      
         | 1678 |  |  |  
 | 
      
         | 1679 |  |  | static rtx internal_arg_pointer_based_exp (rtx, bool);
 | 
      
         | 1680 |  |  |  
 | 
      
         | 1681 |  |  | /* Helper function for internal_arg_pointer_based_exp.  Scan insns in
 | 
      
         | 1682 |  |  |    the tail call sequence, starting with first insn that hasn't been
 | 
      
         | 1683 |  |  |    scanned yet, and note for each pseudo on the LHS whether it is based
 | 
      
         | 1684 |  |  |    on crtl->args.internal_arg_pointer or not, and what offset from that
 | 
      
         | 1685 |  |  |    that pointer it has.  */
 | 
      
         | 1686 |  |  |  
 | 
      
         | 1687 |  |  | static void
 | 
      
         | 1688 |  |  | internal_arg_pointer_based_exp_scan (void)
 | 
      
         | 1689 |  |  | {
 | 
      
         | 1690 |  |  |   rtx insn, scan_start = internal_arg_pointer_exp_state.scan_start;
 | 
      
         | 1691 |  |  |  
 | 
      
         | 1692 |  |  |   if (scan_start == NULL_RTX)
 | 
      
         | 1693 |  |  |     insn = get_insns ();
 | 
      
         | 1694 |  |  |   else
 | 
      
         | 1695 |  |  |     insn = NEXT_INSN (scan_start);
 | 
      
         | 1696 |  |  |  
 | 
      
         | 1697 |  |  |   while (insn)
 | 
      
         | 1698 |  |  |     {
 | 
      
         | 1699 |  |  |       rtx set = single_set (insn);
 | 
      
         | 1700 |  |  |       if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
 | 
      
         | 1701 |  |  |         {
 | 
      
         | 1702 |  |  |           rtx val = NULL_RTX;
 | 
      
         | 1703 |  |  |           unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
 | 
      
         | 1704 |  |  |           /* Punt on pseudos set multiple times.  */
 | 
      
         | 1705 |  |  |           if (idx < VEC_length (rtx, internal_arg_pointer_exp_state.cache)
 | 
      
         | 1706 |  |  |               && (VEC_index (rtx, internal_arg_pointer_exp_state.cache, idx)
 | 
      
         | 1707 |  |  |                   != NULL_RTX))
 | 
      
         | 1708 |  |  |             val = pc_rtx;
 | 
      
         | 1709 |  |  |           else
 | 
      
         | 1710 |  |  |             val = internal_arg_pointer_based_exp (SET_SRC (set), false);
 | 
      
         | 1711 |  |  |           if (val != NULL_RTX)
 | 
      
         | 1712 |  |  |             {
 | 
      
         | 1713 |  |  |               if (idx
 | 
      
         | 1714 |  |  |                   >= VEC_length (rtx, internal_arg_pointer_exp_state.cache))
 | 
      
         | 1715 |  |  |                 VEC_safe_grow_cleared (rtx, heap,
 | 
      
         | 1716 |  |  |                                        internal_arg_pointer_exp_state.cache,
 | 
      
         | 1717 |  |  |                                        idx + 1);
 | 
      
         | 1718 |  |  |               VEC_replace (rtx, internal_arg_pointer_exp_state.cache,
 | 
      
         | 1719 |  |  |                            idx, val);
 | 
      
         | 1720 |  |  |             }
 | 
      
         | 1721 |  |  |         }
 | 
      
         | 1722 |  |  |       if (NEXT_INSN (insn) == NULL_RTX)
 | 
      
         | 1723 |  |  |         scan_start = insn;
 | 
      
         | 1724 |  |  |       insn = NEXT_INSN (insn);
 | 
      
         | 1725 |  |  |     }
 | 
      
         | 1726 |  |  |  
 | 
      
         | 1727 |  |  |   internal_arg_pointer_exp_state.scan_start = scan_start;
 | 
      
         | 1728 |  |  | }
 | 
      
         | 1729 |  |  |  
 | 
      
         | 1730 |  |  | /* Helper function for internal_arg_pointer_based_exp, called through
 | 
      
         | 1731 |  |  |    for_each_rtx.  Return 1 if *LOC is a register based on
 | 
      
         | 1732 |  |  |    crtl->args.internal_arg_pointer.  Return -1 if *LOC is not based on it
 | 
      
         | 1733 |  |  |    and the subexpressions need not be examined.  Otherwise return 0.  */
 | 
      
         | 1734 |  |  |  
 | 
      
         | 1735 |  |  | static int
 | 
      
         | 1736 |  |  | internal_arg_pointer_based_exp_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
 | 
      
         | 1737 |  |  | {
 | 
      
         | 1738 |  |  |   if (REG_P (*loc) && internal_arg_pointer_based_exp (*loc, false) != NULL_RTX)
 | 
      
         | 1739 |  |  |     return 1;
 | 
      
         | 1740 |  |  |   if (MEM_P (*loc))
 | 
      
         | 1741 |  |  |     return -1;
 | 
      
         | 1742 |  |  |   return 0;
 | 
      
         | 1743 |  |  | }
 | 
      
         | 1744 |  |  |  
 | 
      
         | 1745 |  |  | /* Compute whether RTL is based on crtl->args.internal_arg_pointer.  Return
 | 
      
         | 1746 |  |  |    NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
 | 
      
         | 1747 |  |  |    it with fixed offset, or PC if this is with variable or unknown offset.
 | 
      
         | 1748 |  |  |    TOPLEVEL is true if the function is invoked at the topmost level.  */
 | 
      
         | 1749 |  |  |  
 | 
      
         | 1750 |  |  | static rtx
 | 
      
         | 1751 |  |  | internal_arg_pointer_based_exp (rtx rtl, bool toplevel)
 | 
      
         | 1752 |  |  | {
 | 
      
         | 1753 |  |  |   if (CONSTANT_P (rtl))
 | 
      
         | 1754 |  |  |     return NULL_RTX;
 | 
      
         | 1755 |  |  |  
 | 
      
         | 1756 |  |  |   if (rtl == crtl->args.internal_arg_pointer)
 | 
      
         | 1757 |  |  |     return const0_rtx;
 | 
      
         | 1758 |  |  |  
 | 
      
         | 1759 |  |  |   if (REG_P (rtl) && HARD_REGISTER_P (rtl))
 | 
      
         | 1760 |  |  |     return NULL_RTX;
 | 
      
         | 1761 |  |  |  
 | 
      
         | 1762 |  |  |   if (GET_CODE (rtl) == PLUS && CONST_INT_P (XEXP (rtl, 1)))
 | 
      
         | 1763 |  |  |     {
 | 
      
         | 1764 |  |  |       rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
 | 
      
         | 1765 |  |  |       if (val == NULL_RTX || val == pc_rtx)
 | 
      
         | 1766 |  |  |         return val;
 | 
      
         | 1767 |  |  |       return plus_constant (val, INTVAL (XEXP (rtl, 1)));
 | 
      
         | 1768 |  |  |     }
 | 
      
         | 1769 |  |  |  
 | 
      
         | 1770 |  |  |   /* When called at the topmost level, scan pseudo assignments in between the
 | 
      
         | 1771 |  |  |      last scanned instruction in the tail call sequence and the latest insn
 | 
      
         | 1772 |  |  |      in that sequence.  */
 | 
      
         | 1773 |  |  |   if (toplevel)
 | 
      
         | 1774 |  |  |     internal_arg_pointer_based_exp_scan ();
 | 
      
         | 1775 |  |  |  
 | 
      
         | 1776 |  |  |   if (REG_P (rtl))
 | 
      
         | 1777 |  |  |     {
 | 
      
         | 1778 |  |  |       unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
 | 
      
         | 1779 |  |  |       if (idx < VEC_length (rtx, internal_arg_pointer_exp_state.cache))
 | 
      
         | 1780 |  |  |         return VEC_index (rtx, internal_arg_pointer_exp_state.cache, idx);
 | 
      
         | 1781 |  |  |  
 | 
      
         | 1782 |  |  |       return NULL_RTX;
 | 
      
         | 1783 |  |  |     }
 | 
      
         | 1784 |  |  |  
 | 
      
         | 1785 |  |  |   if (for_each_rtx (&rtl, internal_arg_pointer_based_exp_1, NULL))
 | 
      
         | 1786 |  |  |     return pc_rtx;
 | 
      
         | 1787 |  |  |  
 | 
      
         | 1788 |  |  |   return NULL_RTX;
 | 
      
         | 1789 |  |  | }
 | 
      
         | 1790 |  |  |  
 | 
      
         | 1791 |  |  | /* Return true if and only if SIZE storage units (usually bytes)
 | 
      
         | 1792 |  |  |    starting from address ADDR overlap with already clobbered argument
 | 
      
         | 1793 |  |  |    area.  This function is used to determine if we should give up a
 | 
      
         | 1794 |  |  |    sibcall.  */
 | 
      
         | 1795 |  |  |  
 | 
      
         | 1796 |  |  | static bool
 | 
      
         | 1797 |  |  | mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
 | 
      
         | 1798 |  |  | {
 | 
      
         | 1799 |  |  |   HOST_WIDE_INT i;
 | 
      
         | 1800 |  |  |   rtx val;
 | 
      
         | 1801 |  |  |  
 | 
      
         | 1802 |  |  |   if (sbitmap_empty_p (stored_args_map))
 | 
      
         | 1803 |  |  |     return false;
 | 
      
         | 1804 |  |  |   val = internal_arg_pointer_based_exp (addr, true);
 | 
      
         | 1805 |  |  |   if (val == NULL_RTX)
 | 
      
         | 1806 |  |  |     return false;
 | 
      
         | 1807 |  |  |   else if (val == pc_rtx)
 | 
      
         | 1808 |  |  |     return true;
 | 
      
         | 1809 |  |  |   else
 | 
      
         | 1810 |  |  |     i = INTVAL (val);
 | 
      
         | 1811 |  |  | #ifdef STACK_GROWS_DOWNWARD
 | 
      
         | 1812 |  |  |   i -= crtl->args.pretend_args_size;
 | 
      
         | 1813 |  |  | #else
 | 
      
         | 1814 |  |  |   i += crtl->args.pretend_args_size;
 | 
      
         | 1815 |  |  | #endif
 | 
      
         | 1816 |  |  |  
 | 
      
         | 1817 |  |  | #ifdef ARGS_GROW_DOWNWARD
 | 
      
         | 1818 |  |  |   i = -i - size;
 | 
      
         | 1819 |  |  | #endif
 | 
      
         | 1820 |  |  |   if (size > 0)
 | 
      
         | 1821 |  |  |     {
 | 
      
         | 1822 |  |  |       unsigned HOST_WIDE_INT k;
 | 
      
         | 1823 |  |  |  
 | 
      
         | 1824 |  |  |       for (k = 0; k < size; k++)
 | 
      
         | 1825 |  |  |         if (i + k < stored_args_map->n_bits
 | 
      
         | 1826 |  |  |             && TEST_BIT (stored_args_map, i + k))
 | 
      
         | 1827 |  |  |           return true;
 | 
      
         | 1828 |  |  |     }
 | 
      
         | 1829 |  |  |  
 | 
      
         | 1830 |  |  |   return false;
 | 
      
         | 1831 |  |  | }
 | 
      
         | 1832 |  |  |  
 | 
      
         | 1833 |  |  | /* Do the register loads required for any wholly-register parms or any
 | 
      
         | 1834 |  |  |    parms which are passed both on the stack and in a register.  Their
 | 
      
         | 1835 |  |  |    expressions were already evaluated.
 | 
      
         | 1836 |  |  |  
 | 
      
         | 1837 |  |  |    Mark all register-parms as living through the call, putting these USE
 | 
      
         | 1838 |  |  |    insns in the CALL_INSN_FUNCTION_USAGE field.
 | 
      
         | 1839 |  |  |  
 | 
      
         | 1840 |  |  |    When IS_SIBCALL, perform the check_sibcall_argument_overlap
 | 
      
         | 1841 |  |  |    checking, setting *SIBCALL_FAILURE if appropriate.  */
 | 
      
         | 1842 |  |  |  
 | 
      
         | 1843 |  |  | static void
 | 
      
         | 1844 |  |  | load_register_parameters (struct arg_data *args, int num_actuals,
 | 
      
         | 1845 |  |  |                           rtx *call_fusage, int flags, int is_sibcall,
 | 
      
         | 1846 |  |  |                           int *sibcall_failure)
 | 
      
         | 1847 |  |  | {
 | 
      
         | 1848 |  |  |   int i, j;
 | 
      
         | 1849 |  |  |  
 | 
      
         | 1850 |  |  |   for (i = 0; i < num_actuals; i++)
 | 
      
         | 1851 |  |  |     {
 | 
      
         | 1852 |  |  |       rtx reg = ((flags & ECF_SIBCALL)
 | 
      
         | 1853 |  |  |                  ? args[i].tail_call_reg : args[i].reg);
 | 
      
         | 1854 |  |  |       if (reg)
 | 
      
         | 1855 |  |  |         {
 | 
      
         | 1856 |  |  |           int partial = args[i].partial;
 | 
      
         | 1857 |  |  |           int nregs;
 | 
      
         | 1858 |  |  |           int size = 0;
 | 
      
         | 1859 |  |  |           rtx before_arg = get_last_insn ();
 | 
      
         | 1860 |  |  |           /* Set non-negative if we must move a word at a time, even if
 | 
      
         | 1861 |  |  |              just one word (e.g, partial == 4 && mode == DFmode).  Set
 | 
      
         | 1862 |  |  |              to -1 if we just use a normal move insn.  This value can be
 | 
      
         | 1863 |  |  |              zero if the argument is a zero size structure.  */
 | 
      
         | 1864 |  |  |           nregs = -1;
 | 
      
         | 1865 |  |  |           if (GET_CODE (reg) == PARALLEL)
 | 
      
         | 1866 |  |  |             ;
 | 
      
         | 1867 |  |  |           else if (partial)
 | 
      
         | 1868 |  |  |             {
 | 
      
         | 1869 |  |  |               gcc_assert (partial % UNITS_PER_WORD == 0);
 | 
      
         | 1870 |  |  |               nregs = partial / UNITS_PER_WORD;
 | 
      
         | 1871 |  |  |             }
 | 
      
         | 1872 |  |  |           else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
 | 
      
         | 1873 |  |  |             {
 | 
      
         | 1874 |  |  |               size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
 | 
      
         | 1875 |  |  |               nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
 | 
      
         | 1876 |  |  |             }
 | 
      
         | 1877 |  |  |           else
 | 
      
         | 1878 |  |  |             size = GET_MODE_SIZE (args[i].mode);
 | 
      
         | 1879 |  |  |  
 | 
      
         | 1880 |  |  |           /* Handle calls that pass values in multiple non-contiguous
 | 
      
         | 1881 |  |  |              locations.  The Irix 6 ABI has examples of this.  */
 | 
      
         | 1882 |  |  |  
 | 
      
         | 1883 |  |  |           if (GET_CODE (reg) == PARALLEL)
 | 
      
         | 1884 |  |  |             emit_group_move (reg, args[i].parallel_value);
 | 
      
         | 1885 |  |  |  
 | 
      
         | 1886 |  |  |           /* If simple case, just do move.  If normal partial, store_one_arg
 | 
      
         | 1887 |  |  |              has already loaded the register for us.  In all other cases,
 | 
      
         | 1888 |  |  |              load the register(s) from memory.  */
 | 
      
         | 1889 |  |  |  
 | 
      
         | 1890 |  |  |           else if (nregs == -1)
 | 
      
         | 1891 |  |  |             {
 | 
      
         | 1892 |  |  |               emit_move_insn (reg, args[i].value);
 | 
      
         | 1893 |  |  | #ifdef BLOCK_REG_PADDING
 | 
      
         | 1894 |  |  |               /* Handle case where we have a value that needs shifting
 | 
      
         | 1895 |  |  |                  up to the msb.  eg. a QImode value and we're padding
 | 
      
         | 1896 |  |  |                  upward on a BYTES_BIG_ENDIAN machine.  */
 | 
      
         | 1897 |  |  |               if (size < UNITS_PER_WORD
 | 
      
         | 1898 |  |  |                   && (args[i].locate.where_pad
 | 
      
         | 1899 |  |  |                       == (BYTES_BIG_ENDIAN ? upward : downward)))
 | 
      
         | 1900 |  |  |                 {
 | 
      
         | 1901 |  |  |                   rtx x;
 | 
      
         | 1902 |  |  |                   int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
 | 
      
         | 1903 |  |  |  
 | 
      
         | 1904 |  |  |                   /* Assigning REG here rather than a temp makes CALL_FUSAGE
 | 
      
         | 1905 |  |  |                      report the whole reg as used.  Strictly speaking, the
 | 
      
         | 1906 |  |  |                      call only uses SIZE bytes at the msb end, but it doesn't
 | 
      
         | 1907 |  |  |                      seem worth generating rtl to say that.  */
 | 
      
         | 1908 |  |  |                   reg = gen_rtx_REG (word_mode, REGNO (reg));
 | 
      
         | 1909 |  |  |                   x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
 | 
      
         | 1910 |  |  |                   if (x != reg)
 | 
      
         | 1911 |  |  |                     emit_move_insn (reg, x);
 | 
      
         | 1912 |  |  |                 }
 | 
      
         | 1913 |  |  | #endif
 | 
      
         | 1914 |  |  |             }
 | 
      
         | 1915 |  |  |  
 | 
      
         | 1916 |  |  |           /* If we have pre-computed the values to put in the registers in
 | 
      
         | 1917 |  |  |              the case of non-aligned structures, copy them in now.  */
 | 
      
         | 1918 |  |  |  
 | 
      
         | 1919 |  |  |           else if (args[i].n_aligned_regs != 0)
 | 
      
         | 1920 |  |  |             for (j = 0; j < args[i].n_aligned_regs; j++)
 | 
      
         | 1921 |  |  |               emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
 | 
      
         | 1922 |  |  |                               args[i].aligned_regs[j]);
 | 
      
         | 1923 |  |  |  
 | 
      
         | 1924 |  |  |           else if (partial == 0 || args[i].pass_on_stack)
 | 
      
         | 1925 |  |  |             {
 | 
      
         | 1926 |  |  |               rtx mem = validize_mem (args[i].value);
 | 
      
         | 1927 |  |  |  
 | 
      
         | 1928 |  |  |               /* Check for overlap with already clobbered argument area,
 | 
      
         | 1929 |  |  |                  providing that this has non-zero size.  */
 | 
      
         | 1930 |  |  |               if (is_sibcall
 | 
      
         | 1931 |  |  |                   && (size == 0
 | 
      
         | 1932 |  |  |                       || mem_overlaps_already_clobbered_arg_p
 | 
      
         | 1933 |  |  |                                            (XEXP (args[i].value, 0), size)))
 | 
      
         | 1934 |  |  |                 *sibcall_failure = 1;
 | 
      
         | 1935 |  |  |  
 | 
      
         | 1936 |  |  |               /* Handle a BLKmode that needs shifting.  */
 | 
      
         | 1937 |  |  |               if (nregs == 1 && size < UNITS_PER_WORD
 | 
      
         | 1938 |  |  | #ifdef BLOCK_REG_PADDING
 | 
      
         | 1939 |  |  |                   && args[i].locate.where_pad == downward
 | 
      
         | 1940 |  |  | #else
 | 
      
         | 1941 |  |  |                   && BYTES_BIG_ENDIAN
 | 
      
         | 1942 |  |  | #endif
 | 
      
         | 1943 |  |  |                  )
 | 
      
         | 1944 |  |  |                 {
 | 
      
         | 1945 |  |  |                   rtx tem = operand_subword_force (mem, 0, args[i].mode);
 | 
      
         | 1946 |  |  |                   rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
 | 
      
         | 1947 |  |  |                   rtx x = gen_reg_rtx (word_mode);
 | 
      
         | 1948 |  |  |                   int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
 | 
      
         | 1949 |  |  |                   enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
 | 
      
         | 1950 |  |  |                                                         : LSHIFT_EXPR;
 | 
      
         | 1951 |  |  |  
 | 
      
         | 1952 |  |  |                   emit_move_insn (x, tem);
 | 
      
         | 1953 |  |  |                   x = expand_shift (dir, word_mode, x, shift, ri, 1);
 | 
      
         | 1954 |  |  |                   if (x != ri)
 | 
      
         | 1955 |  |  |                     emit_move_insn (ri, x);
 | 
      
         | 1956 |  |  |                 }
 | 
      
         | 1957 |  |  |               else
 | 
      
         | 1958 |  |  |                 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
 | 
      
         | 1959 |  |  |             }
 | 
      
         | 1960 |  |  |  
 | 
      
         | 1961 |  |  |           /* When a parameter is a block, and perhaps in other cases, it is
 | 
      
         | 1962 |  |  |              possible that it did a load from an argument slot that was
 | 
      
         | 1963 |  |  |              already clobbered.  */
 | 
      
         | 1964 |  |  |           if (is_sibcall
 | 
      
         | 1965 |  |  |               && check_sibcall_argument_overlap (before_arg, &args[i], 0))
 | 
      
         | 1966 |  |  |             *sibcall_failure = 1;
 | 
      
         | 1967 |  |  |  
 | 
      
         | 1968 |  |  |           /* Handle calls that pass values in multiple non-contiguous
 | 
      
         | 1969 |  |  |              locations.  The Irix 6 ABI has examples of this.  */
 | 
      
         | 1970 |  |  |           if (GET_CODE (reg) == PARALLEL)
 | 
      
         | 1971 |  |  |             use_group_regs (call_fusage, reg);
 | 
      
         | 1972 |  |  |           else if (nregs == -1)
 | 
      
         | 1973 |  |  |             use_reg_mode (call_fusage, reg,
 | 
      
         | 1974 |  |  |                           TYPE_MODE (TREE_TYPE (args[i].tree_value)));
 | 
      
         | 1975 |  |  |           else if (nregs > 0)
 | 
      
         | 1976 |  |  |             use_regs (call_fusage, REGNO (reg), nregs);
 | 
      
         | 1977 |  |  |         }
 | 
      
         | 1978 |  |  |     }
 | 
      
         | 1979 |  |  | }
 | 
      
         | 1980 |  |  |  
 | 
      
         | 1981 |  |  | /* We need to pop PENDING_STACK_ADJUST bytes.  But, if the arguments
 | 
      
         | 1982 |  |  |    wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
 | 
      
         | 1983 |  |  |    bytes, then we would need to push some additional bytes to pad the
 | 
      
         | 1984 |  |  |    arguments.  So, we compute an adjust to the stack pointer for an
 | 
      
         | 1985 |  |  |    amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
 | 
      
         | 1986 |  |  |    bytes.  Then, when the arguments are pushed the stack will be perfectly
 | 
      
         | 1987 |  |  |    aligned.  ARGS_SIZE->CONSTANT is set to the number of bytes that should
 | 
      
         | 1988 |  |  |    be popped after the call.  Returns the adjustment.  */
 | 
      
         | 1989 |  |  |  
 | 
      
         | 1990 |  |  | static int
 | 
      
         | 1991 |  |  | combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
 | 
      
         | 1992 |  |  |                                            struct args_size *args_size,
 | 
      
         | 1993 |  |  |                                            unsigned int preferred_unit_stack_boundary)
 | 
      
         | 1994 |  |  | {
 | 
      
         | 1995 |  |  |   /* The number of bytes to pop so that the stack will be
 | 
      
         | 1996 |  |  |      under-aligned by UNADJUSTED_ARGS_SIZE bytes.  */
 | 
      
         | 1997 |  |  |   HOST_WIDE_INT adjustment;
 | 
      
         | 1998 |  |  |   /* The alignment of the stack after the arguments are pushed, if we
 | 
      
         | 1999 |  |  |      just pushed the arguments without adjust the stack here.  */
 | 
      
         | 2000 |  |  |   unsigned HOST_WIDE_INT unadjusted_alignment;
 | 
      
         | 2001 |  |  |  
 | 
      
         | 2002 |  |  |   unadjusted_alignment
 | 
      
         | 2003 |  |  |     = ((stack_pointer_delta + unadjusted_args_size)
 | 
      
         | 2004 |  |  |        % preferred_unit_stack_boundary);
 | 
      
         | 2005 |  |  |  
 | 
      
         | 2006 |  |  |   /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
 | 
      
         | 2007 |  |  |      as possible -- leaving just enough left to cancel out the
 | 
      
         | 2008 |  |  |      UNADJUSTED_ALIGNMENT.  In other words, we want to ensure that the
 | 
      
         | 2009 |  |  |      PENDING_STACK_ADJUST is non-negative, and congruent to
 | 
      
         | 2010 |  |  |      -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY.  */
 | 
      
         | 2011 |  |  |  
 | 
      
         | 2012 |  |  |   /* Begin by trying to pop all the bytes.  */
 | 
      
         | 2013 |  |  |   unadjusted_alignment
 | 
      
         | 2014 |  |  |     = (unadjusted_alignment
 | 
      
         | 2015 |  |  |        - (pending_stack_adjust % preferred_unit_stack_boundary));
 | 
      
         | 2016 |  |  |   adjustment = pending_stack_adjust;
 | 
      
         | 2017 |  |  |   /* Push enough additional bytes that the stack will be aligned
 | 
      
         | 2018 |  |  |      after the arguments are pushed.  */
 | 
      
         | 2019 |  |  |   if (preferred_unit_stack_boundary > 1)
 | 
      
         | 2020 |  |  |     {
 | 
      
         | 2021 |  |  |       if (unadjusted_alignment > 0)
 | 
      
         | 2022 |  |  |         adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
 | 
      
         | 2023 |  |  |       else
 | 
      
         | 2024 |  |  |         adjustment += unadjusted_alignment;
 | 
      
         | 2025 |  |  |     }
 | 
      
         | 2026 |  |  |  
 | 
      
         | 2027 |  |  |   /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
 | 
      
         | 2028 |  |  |      bytes after the call.  The right number is the entire
 | 
      
         | 2029 |  |  |      PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
 | 
      
         | 2030 |  |  |      by the arguments in the first place.  */
 | 
      
         | 2031 |  |  |   args_size->constant
 | 
      
         | 2032 |  |  |     = pending_stack_adjust - adjustment + unadjusted_args_size;
 | 
      
         | 2033 |  |  |  
 | 
      
         | 2034 |  |  |   return adjustment;
 | 
      
         | 2035 |  |  | }
 | 
      
         | 2036 |  |  |  
 | 
      
         | 2037 |  |  | /* Scan X expression if it does not dereference any argument slots
 | 
      
         | 2038 |  |  |    we already clobbered by tail call arguments (as noted in stored_args_map
 | 
      
         | 2039 |  |  |    bitmap).
 | 
      
         | 2040 |  |  |    Return nonzero if X expression dereferences such argument slots,
 | 
      
         | 2041 |  |  |    zero otherwise.  */
 | 
      
         | 2042 |  |  |  
 | 
      
         | 2043 |  |  | static int
 | 
      
         | 2044 |  |  | check_sibcall_argument_overlap_1 (rtx x)
 | 
      
         | 2045 |  |  | {
 | 
      
         | 2046 |  |  |   RTX_CODE code;
 | 
      
         | 2047 |  |  |   int i, j;
 | 
      
         | 2048 |  |  |   const char *fmt;
 | 
      
         | 2049 |  |  |  
 | 
      
         | 2050 |  |  |   if (x == NULL_RTX)
 | 
      
         | 2051 |  |  |     return 0;
 | 
      
         | 2052 |  |  |  
 | 
      
         | 2053 |  |  |   code = GET_CODE (x);
 | 
      
         | 2054 |  |  |  
 | 
      
         | 2055 |  |  |   /* We need not check the operands of the CALL expression itself.  */
 | 
      
         | 2056 |  |  |   if (code == CALL)
 | 
      
         | 2057 |  |  |     return 0;
 | 
      
         | 2058 |  |  |  
 | 
      
         | 2059 |  |  |   if (code == MEM)
 | 
      
         | 2060 |  |  |     return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
 | 
      
         | 2061 |  |  |                                                  GET_MODE_SIZE (GET_MODE (x)));
 | 
      
         | 2062 |  |  |  
 | 
      
         | 2063 |  |  |   /* Scan all subexpressions.  */
 | 
      
         | 2064 |  |  |   fmt = GET_RTX_FORMAT (code);
 | 
      
         | 2065 |  |  |   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
 | 
      
         | 2066 |  |  |     {
 | 
      
         | 2067 |  |  |       if (*fmt == 'e')
 | 
      
         | 2068 |  |  |         {
 | 
      
         | 2069 |  |  |           if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
 | 
      
         | 2070 |  |  |             return 1;
 | 
      
         | 2071 |  |  |         }
 | 
      
         | 2072 |  |  |       else if (*fmt == 'E')
 | 
      
         | 2073 |  |  |         {
 | 
      
         | 2074 |  |  |           for (j = 0; j < XVECLEN (x, i); j++)
 | 
      
         | 2075 |  |  |             if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
 | 
      
         | 2076 |  |  |               return 1;
 | 
      
         | 2077 |  |  |         }
 | 
      
         | 2078 |  |  |     }
 | 
      
         | 2079 |  |  |   return 0;
 | 
      
         | 2080 |  |  | }
 | 
      
         | 2081 |  |  |  
 | 
      
         | 2082 |  |  | /* Scan sequence after INSN if it does not dereference any argument slots
 | 
      
         | 2083 |  |  |    we already clobbered by tail call arguments (as noted in stored_args_map
 | 
      
         | 2084 |  |  |    bitmap).  If MARK_STORED_ARGS_MAP, add stack slots for ARG to
 | 
      
         | 2085 |  |  |    stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
 | 
      
         | 2086 |  |  |    should be 0).  Return nonzero if sequence after INSN dereferences such argument
 | 
      
         | 2087 |  |  |    slots, zero otherwise.  */
 | 
      
         | 2088 |  |  |  
 | 
      
         | 2089 |  |  | static int
 | 
      
         | 2090 |  |  | check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
 | 
      
         | 2091 |  |  | {
 | 
      
         | 2092 |  |  |   int low, high;
 | 
      
         | 2093 |  |  |  
 | 
      
         | 2094 |  |  |   if (insn == NULL_RTX)
 | 
      
         | 2095 |  |  |     insn = get_insns ();
 | 
      
         | 2096 |  |  |   else
 | 
      
         | 2097 |  |  |     insn = NEXT_INSN (insn);
 | 
      
         | 2098 |  |  |  
 | 
      
         | 2099 |  |  |   for (; insn; insn = NEXT_INSN (insn))
 | 
      
         | 2100 |  |  |     if (INSN_P (insn)
 | 
      
         | 2101 |  |  |         && check_sibcall_argument_overlap_1 (PATTERN (insn)))
 | 
      
         | 2102 |  |  |       break;
 | 
      
         | 2103 |  |  |  
 | 
      
         | 2104 |  |  |   if (mark_stored_args_map)
 | 
      
         | 2105 |  |  |     {
 | 
      
         | 2106 |  |  | #ifdef ARGS_GROW_DOWNWARD
 | 
      
         | 2107 |  |  |       low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
 | 
      
         | 2108 |  |  | #else
 | 
      
         | 2109 |  |  |       low = arg->locate.slot_offset.constant;
 | 
      
         | 2110 |  |  | #endif
 | 
      
         | 2111 |  |  |  
 | 
      
         | 2112 |  |  |       for (high = low + arg->locate.size.constant; low < high; low++)
 | 
      
         | 2113 |  |  |         SET_BIT (stored_args_map, low);
 | 
      
         | 2114 |  |  |     }
 | 
      
         | 2115 |  |  |   return insn != NULL_RTX;
 | 
      
         | 2116 |  |  | }
 | 
      
         | 2117 |  |  |  
 | 
      
         | 2118 |  |  | /* Given that a function returns a value of mode MODE at the most
 | 
      
         | 2119 |  |  |    significant end of hard register VALUE, shift VALUE left or right
 | 
      
         | 2120 |  |  |    as specified by LEFT_P.  Return true if some action was needed.  */
 | 
      
         | 2121 |  |  |  
 | 
      
         | 2122 |  |  | bool
 | 
      
         | 2123 |  |  | shift_return_value (enum machine_mode mode, bool left_p, rtx value)
 | 
      
         | 2124 |  |  | {
 | 
      
         | 2125 |  |  |   HOST_WIDE_INT shift;
 | 
      
         | 2126 |  |  |  
 | 
      
         | 2127 |  |  |   gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
 | 
      
         | 2128 |  |  |   shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
 | 
      
         | 2129 |  |  |   if (shift == 0)
 | 
      
         | 2130 |  |  |     return false;
 | 
      
         | 2131 |  |  |  
 | 
      
         | 2132 |  |  |   /* Use ashr rather than lshr for right shifts.  This is for the benefit
 | 
      
         | 2133 |  |  |      of the MIPS port, which requires SImode values to be sign-extended
 | 
      
         | 2134 |  |  |      when stored in 64-bit registers.  */
 | 
      
         | 2135 |  |  |   if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
 | 
      
         | 2136 |  |  |                            value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
 | 
      
         | 2137 |  |  |     gcc_unreachable ();
 | 
      
         | 2138 |  |  |   return true;
 | 
      
         | 2139 |  |  | }
 | 
      
         | 2140 |  |  |  
 | 
      
         | 2141 |  |  | /* If X is a likely-spilled register value, copy it to a pseudo
 | 
      
         | 2142 |  |  |    register and return that register.  Return X otherwise.  */
 | 
      
         | 2143 |  |  |  
 | 
      
         | 2144 |  |  | static rtx
 | 
      
         | 2145 |  |  | avoid_likely_spilled_reg (rtx x)
 | 
      
         | 2146 |  |  | {
 | 
      
         | 2147 |  |  |   rtx new_rtx;
 | 
      
         | 2148 |  |  |  
 | 
      
         | 2149 |  |  |   if (REG_P (x)
 | 
      
         | 2150 |  |  |       && HARD_REGISTER_P (x)
 | 
      
         | 2151 |  |  |       && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
 | 
      
         | 2152 |  |  |     {
 | 
      
         | 2153 |  |  |       /* Make sure that we generate a REG rather than a CONCAT.
 | 
      
         | 2154 |  |  |          Moves into CONCATs can need nontrivial instructions,
 | 
      
         | 2155 |  |  |          and the whole point of this function is to avoid
 | 
      
         | 2156 |  |  |          using the hard register directly in such a situation.  */
 | 
      
         | 2157 |  |  |       generating_concat_p = 0;
 | 
      
         | 2158 |  |  |       new_rtx = gen_reg_rtx (GET_MODE (x));
 | 
      
         | 2159 |  |  |       generating_concat_p = 1;
 | 
      
         | 2160 |  |  |       emit_move_insn (new_rtx, x);
 | 
      
         | 2161 |  |  |       return new_rtx;
 | 
      
         | 2162 |  |  |     }
 | 
      
         | 2163 |  |  |   return x;
 | 
      
         | 2164 |  |  | }
 | 
      
         | 2165 |  |  |  
 | 
      
         | 2166 |  |  | /* Generate all the code for a CALL_EXPR exp
 | 
      
         | 2167 |  |  |    and return an rtx for its value.
 | 
      
         | 2168 |  |  |    Store the value in TARGET (specified as an rtx) if convenient.
 | 
      
         | 2169 |  |  |    If the value is stored in TARGET then TARGET is returned.
 | 
      
         | 2170 |  |  |    If IGNORE is nonzero, then we ignore the value of the function call.  */
 | 
      
         | 2171 |  |  |  
 | 
      
         | 2172 |  |  | rtx
 | 
      
         | 2173 |  |  | expand_call (tree exp, rtx target, int ignore)
 | 
      
         | 2174 |  |  | {
 | 
      
         | 2175 |  |  |   /* Nonzero if we are currently expanding a call.  */
 | 
      
         | 2176 |  |  |   static int currently_expanding_call = 0;
 | 
      
         | 2177 |  |  |  
 | 
      
         | 2178 |  |  |   /* RTX for the function to be called.  */
 | 
      
         | 2179 |  |  |   rtx funexp;
 | 
      
         | 2180 |  |  |   /* Sequence of insns to perform a normal "call".  */
 | 
      
         | 2181 |  |  |   rtx normal_call_insns = NULL_RTX;
 | 
      
         | 2182 |  |  |   /* Sequence of insns to perform a tail "call".  */
 | 
      
         | 2183 |  |  |   rtx tail_call_insns = NULL_RTX;
 | 
      
         | 2184 |  |  |   /* Data type of the function.  */
 | 
      
         | 2185 |  |  |   tree funtype;
 | 
      
         | 2186 |  |  |   tree type_arg_types;
 | 
      
         | 2187 |  |  |   tree rettype;
 | 
      
         | 2188 |  |  |   /* Declaration of the function being called,
 | 
      
         | 2189 |  |  |      or 0 if the function is computed (not known by name).  */
 | 
      
         | 2190 |  |  |   tree fndecl = 0;
 | 
      
         | 2191 |  |  |   /* The type of the function being called.  */
 | 
      
         | 2192 |  |  |   tree fntype;
 | 
      
         | 2193 |  |  |   bool try_tail_call = CALL_EXPR_TAILCALL (exp);
 | 
      
         | 2194 |  |  |   int pass;
 | 
      
         | 2195 |  |  |  
 | 
      
         | 2196 |  |  |   /* Register in which non-BLKmode value will be returned,
 | 
      
         | 2197 |  |  |      or 0 if no value or if value is BLKmode.  */
 | 
      
         | 2198 |  |  |   rtx valreg;
 | 
      
         | 2199 |  |  |   /* Address where we should return a BLKmode value;
 | 
      
         | 2200 |  |  |  
 | 
      
         | 2201 |  |  |   rtx structure_value_addr = 0;
 | 
      
         | 2202 |  |  |   /* Nonzero if that address is being passed by treating it as
 | 
      
         | 2203 |  |  |      an extra, implicit first parameter.  Otherwise,
 | 
      
         | 2204 |  |  |      it is passed by being copied directly into struct_value_rtx.  */
 | 
      
         | 2205 |  |  |   int structure_value_addr_parm = 0;
 | 
      
         | 2206 |  |  |   /* Holds the value of implicit argument for the struct value.  */
 | 
      
         | 2207 |  |  |   tree structure_value_addr_value = NULL_TREE;
 | 
      
         | 2208 |  |  |   /* Size of aggregate value wanted, or zero if none wanted
 | 
      
         | 2209 |  |  |      or if we are using the non-reentrant PCC calling convention
 | 
      
         | 2210 |  |  |      or expecting the value in registers.  */
 | 
      
         | 2211 |  |  |   HOST_WIDE_INT struct_value_size = 0;
 | 
      
         | 2212 |  |  |   /* Nonzero if called function returns an aggregate in memory PCC style,
 | 
      
         | 2213 |  |  |      by returning the address of where to find it.  */
 | 
      
         | 2214 |  |  |   int pcc_struct_value = 0;
 | 
      
         | 2215 |  |  |   rtx struct_value = 0;
 | 
      
         | 2216 |  |  |  
 | 
      
         | 2217 |  |  |   /* Number of actual parameters in this call, including struct value addr.  */
 | 
      
         | 2218 |  |  |   int num_actuals;
 | 
      
         | 2219 |  |  |   /* Number of named args.  Args after this are anonymous ones
 | 
      
         | 2220 |  |  |      and they must all go on the stack.  */
 | 
      
         | 2221 |  |  |   int n_named_args;
 | 
      
         | 2222 |  |  |   /* Number of complex actual arguments that need to be split.  */
 | 
      
         | 2223 |  |  |   int num_complex_actuals = 0;
 | 
      
         | 2224 |  |  |  
 | 
      
         | 2225 |  |  |   /* Vector of information about each argument.
 | 
      
         | 2226 |  |  |      Arguments are numbered in the order they will be pushed,
 | 
      
         | 2227 |  |  |      not the order they are written.  */
 | 
      
         | 2228 |  |  |   struct arg_data *args;
 | 
      
         | 2229 |  |  |  
 | 
      
         | 2230 |  |  |   /* Total size in bytes of all the stack-parms scanned so far.  */
 | 
      
         | 2231 |  |  |   struct args_size args_size;
 | 
      
         | 2232 |  |  |   struct args_size adjusted_args_size;
 | 
      
         | 2233 |  |  |   /* Size of arguments before any adjustments (such as rounding).  */
 | 
      
         | 2234 |  |  |   int unadjusted_args_size;
 | 
      
         | 2235 |  |  |   /* Data on reg parms scanned so far.  */
 | 
      
         | 2236 |  |  |   CUMULATIVE_ARGS args_so_far_v;
 | 
      
         | 2237 |  |  |   cumulative_args_t args_so_far;
 | 
      
         | 2238 |  |  |   /* Nonzero if a reg parm has been scanned.  */
 | 
      
         | 2239 |  |  |   int reg_parm_seen;
 | 
      
         | 2240 |  |  |   /* Nonzero if this is an indirect function call.  */
 | 
      
         | 2241 |  |  |  
 | 
      
         | 2242 |  |  |   /* Nonzero if we must avoid push-insns in the args for this call.
 | 
      
         | 2243 |  |  |      If stack space is allocated for register parameters, but not by the
 | 
      
         | 2244 |  |  |      caller, then it is preallocated in the fixed part of the stack frame.
 | 
      
         | 2245 |  |  |      So the entire argument block must then be preallocated (i.e., we
 | 
      
         | 2246 |  |  |      ignore PUSH_ROUNDING in that case).  */
 | 
      
         | 2247 |  |  |  
 | 
      
         | 2248 |  |  |   int must_preallocate = !PUSH_ARGS;
 | 
      
         | 2249 |  |  |  
 | 
      
         | 2250 |  |  |   /* Size of the stack reserved for parameter registers.  */
 | 
      
         | 2251 |  |  |   int reg_parm_stack_space = 0;
 | 
      
         | 2252 |  |  |  
 | 
      
         | 2253 |  |  |   /* Address of space preallocated for stack parms
 | 
      
         | 2254 |  |  |      (on machines that lack push insns), or 0 if space not preallocated.  */
 | 
      
         | 2255 |  |  |   rtx argblock = 0;
 | 
      
         | 2256 |  |  |  
 | 
      
         | 2257 |  |  |   /* Mask of ECF_ flags.  */
 | 
      
         | 2258 |  |  |   int flags = 0;
 | 
      
         | 2259 |  |  | #ifdef REG_PARM_STACK_SPACE
 | 
      
         | 2260 |  |  |   /* Define the boundary of the register parm stack space that needs to be
 | 
      
         | 2261 |  |  |      saved, if any.  */
 | 
      
         | 2262 |  |  |   int low_to_save, high_to_save;
 | 
      
         | 2263 |  |  |   rtx save_area = 0;             /* Place that it is saved */
 | 
      
         | 2264 |  |  | #endif
 | 
      
         | 2265 |  |  |  
 | 
      
         | 2266 |  |  |   int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
 | 
      
         | 2267 |  |  |   char *initial_stack_usage_map = stack_usage_map;
 | 
      
         | 2268 |  |  |   char *stack_usage_map_buf = NULL;
 | 
      
         | 2269 |  |  |  
 | 
      
         | 2270 |  |  |   int old_stack_allocated;
 | 
      
         | 2271 |  |  |  
 | 
      
         | 2272 |  |  |   /* State variables to track stack modifications.  */
 | 
      
         | 2273 |  |  |   rtx old_stack_level = 0;
 | 
      
         | 2274 |  |  |   int old_stack_arg_under_construction = 0;
 | 
      
         | 2275 |  |  |   int old_pending_adj = 0;
 | 
      
         | 2276 |  |  |   int old_inhibit_defer_pop = inhibit_defer_pop;
 | 
      
         | 2277 |  |  |  
 | 
      
         | 2278 |  |  |   /* Some stack pointer alterations we make are performed via
 | 
      
         | 2279 |  |  |      allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
 | 
      
         | 2280 |  |  |      which we then also need to save/restore along the way.  */
 | 
      
         | 2281 |  |  |   int old_stack_pointer_delta = 0;
 | 
      
         | 2282 |  |  |  
 | 
      
         | 2283 |  |  |   rtx call_fusage;
 | 
      
         | 2284 |  |  |   tree addr = CALL_EXPR_FN (exp);
 | 
      
         | 2285 |  |  |   int i;
 | 
      
         | 2286 |  |  |   /* The alignment of the stack, in bits.  */
 | 
      
         | 2287 |  |  |   unsigned HOST_WIDE_INT preferred_stack_boundary;
 | 
      
         | 2288 |  |  |   /* The alignment of the stack, in bytes.  */
 | 
      
         | 2289 |  |  |   unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
 | 
      
         | 2290 |  |  |   /* The static chain value to use for this call.  */
 | 
      
         | 2291 |  |  |   rtx static_chain_value;
 | 
      
         | 2292 |  |  |   /* See if this is "nothrow" function call.  */
 | 
      
         | 2293 |  |  |   if (TREE_NOTHROW (exp))
 | 
      
         | 2294 |  |  |     flags |= ECF_NOTHROW;
 | 
      
         | 2295 |  |  |  
 | 
      
         | 2296 |  |  |   /* See if we can find a DECL-node for the actual function, and get the
 | 
      
         | 2297 |  |  |      function attributes (flags) from the function decl or type node.  */
 | 
      
         | 2298 |  |  |   fndecl = get_callee_fndecl (exp);
 | 
      
         | 2299 |  |  |   if (fndecl)
 | 
      
         | 2300 |  |  |     {
 | 
      
         | 2301 |  |  |       fntype = TREE_TYPE (fndecl);
 | 
      
         | 2302 |  |  |       flags |= flags_from_decl_or_type (fndecl);
 | 
      
         | 2303 |  |  |     }
 | 
      
         | 2304 |  |  |   else
 | 
      
         | 2305 |  |  |     {
 | 
      
         | 2306 |  |  |       fntype = TREE_TYPE (TREE_TYPE (addr));
 | 
      
         | 2307 |  |  |       flags |= flags_from_decl_or_type (fntype);
 | 
      
         | 2308 |  |  |     }
 | 
      
         | 2309 |  |  |   rettype = TREE_TYPE (exp);
 | 
      
         | 2310 |  |  |  
 | 
      
         | 2311 |  |  |   struct_value = targetm.calls.struct_value_rtx (fntype, 0);
 | 
      
         | 2312 |  |  |  
 | 
      
         | 2313 |  |  |   /* Warn if this value is an aggregate type,
 | 
      
         | 2314 |  |  |      regardless of which calling convention we are using for it.  */
 | 
      
         | 2315 |  |  |   if (AGGREGATE_TYPE_P (rettype))
 | 
      
         | 2316 |  |  |     warning (OPT_Waggregate_return, "function call has aggregate value");
 | 
      
         | 2317 |  |  |  
 | 
      
         | 2318 |  |  |   /* If the result of a non looping pure or const function call is
 | 
      
         | 2319 |  |  |      ignored (or void), and none of its arguments are volatile, we can
 | 
      
         | 2320 |  |  |      avoid expanding the call and just evaluate the arguments for
 | 
      
         | 2321 |  |  |      side-effects.  */
 | 
      
         | 2322 |  |  |   if ((flags & (ECF_CONST | ECF_PURE))
 | 
      
         | 2323 |  |  |       && (!(flags & ECF_LOOPING_CONST_OR_PURE))
 | 
      
         | 2324 |  |  |       && (ignore || target == const0_rtx
 | 
      
         | 2325 |  |  |           || TYPE_MODE (rettype) == VOIDmode))
 | 
      
         | 2326 |  |  |     {
 | 
      
         | 2327 |  |  |       bool volatilep = false;
 | 
      
         | 2328 |  |  |       tree arg;
 | 
      
         | 2329 |  |  |       call_expr_arg_iterator iter;
 | 
      
         | 2330 |  |  |  
 | 
      
         | 2331 |  |  |       FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
 | 
      
         | 2332 |  |  |         if (TREE_THIS_VOLATILE (arg))
 | 
      
         | 2333 |  |  |           {
 | 
      
         | 2334 |  |  |             volatilep = true;
 | 
      
         | 2335 |  |  |             break;
 | 
      
         | 2336 |  |  |           }
 | 
      
         | 2337 |  |  |  
 | 
      
         | 2338 |  |  |       if (! volatilep)
 | 
      
         | 2339 |  |  |         {
 | 
      
         | 2340 |  |  |           FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
 | 
      
         | 2341 |  |  |             expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
 | 
      
         | 2342 |  |  |           return const0_rtx;
 | 
      
         | 2343 |  |  |         }
 | 
      
         | 2344 |  |  |     }
 | 
      
         | 2345 |  |  |  
 | 
      
         | 2346 |  |  | #ifdef REG_PARM_STACK_SPACE
 | 
      
         | 2347 |  |  |   reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
 | 
      
         | 2348 |  |  | #endif
 | 
      
         | 2349 |  |  |  
 | 
      
         | 2350 |  |  |   if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
 | 
      
         | 2351 |  |  |       && reg_parm_stack_space > 0 && PUSH_ARGS)
 | 
      
         | 2352 |  |  |     must_preallocate = 1;
 | 
      
         | 2353 |  |  |  
 | 
      
         | 2354 |  |  |   /* Set up a place to return a structure.  */
 | 
      
         | 2355 |  |  |  
 | 
      
         | 2356 |  |  |   /* Cater to broken compilers.  */
 | 
      
         | 2357 |  |  |   if (aggregate_value_p (exp, fntype))
 | 
      
         | 2358 |  |  |     {
 | 
      
         | 2359 |  |  |       /* This call returns a big structure.  */
 | 
      
         | 2360 |  |  |       flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
 | 
      
         | 2361 |  |  |  
 | 
      
         | 2362 |  |  | #ifdef PCC_STATIC_STRUCT_RETURN
 | 
      
         | 2363 |  |  |       {
 | 
      
         | 2364 |  |  |         pcc_struct_value = 1;
 | 
      
         | 2365 |  |  |       }
 | 
      
         | 2366 |  |  | #else /* not PCC_STATIC_STRUCT_RETURN */
 | 
      
         | 2367 |  |  |       {
 | 
      
         | 2368 |  |  |         struct_value_size = int_size_in_bytes (rettype);
 | 
      
         | 2369 |  |  |  
 | 
      
         | 2370 |  |  |         if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
 | 
      
         | 2371 |  |  |           structure_value_addr = XEXP (target, 0);
 | 
      
         | 2372 |  |  |         else
 | 
      
         | 2373 |  |  |           {
 | 
      
         | 2374 |  |  |             /* For variable-sized objects, we must be called with a target
 | 
      
         | 2375 |  |  |                specified.  If we were to allocate space on the stack here,
 | 
      
         | 2376 |  |  |                we would have no way of knowing when to free it.  */
 | 
      
         | 2377 |  |  |             rtx d = assign_temp (rettype, 0, 1, 1);
 | 
      
         | 2378 |  |  |  
 | 
      
         | 2379 |  |  |             mark_temp_addr_taken (d);
 | 
      
         | 2380 |  |  |             structure_value_addr = XEXP (d, 0);
 | 
      
         | 2381 |  |  |             target = 0;
 | 
      
         | 2382 |  |  |           }
 | 
      
         | 2383 |  |  |       }
 | 
      
         | 2384 |  |  | #endif /* not PCC_STATIC_STRUCT_RETURN */
 | 
      
         | 2385 |  |  |     }
 | 
      
         | 2386 |  |  |  
 | 
      
         | 2387 |  |  |   /* Figure out the amount to which the stack should be aligned.  */
 | 
      
         | 2388 |  |  |   preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
 | 
      
         | 2389 |  |  |   if (fndecl)
 | 
      
         | 2390 |  |  |     {
 | 
      
         | 2391 |  |  |       struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
 | 
      
         | 2392 |  |  |       /* Without automatic stack alignment, we can't increase preferred
 | 
      
         | 2393 |  |  |          stack boundary.  With automatic stack alignment, it is
 | 
      
         | 2394 |  |  |          unnecessary since unless we can guarantee that all callers will
 | 
      
         | 2395 |  |  |          align the outgoing stack properly, callee has to align its
 | 
      
         | 2396 |  |  |          stack anyway.  */
 | 
      
         | 2397 |  |  |       if (i
 | 
      
         | 2398 |  |  |           && i->preferred_incoming_stack_boundary
 | 
      
         | 2399 |  |  |           && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
 | 
      
         | 2400 |  |  |         preferred_stack_boundary = i->preferred_incoming_stack_boundary;
 | 
      
         | 2401 |  |  |     }
 | 
      
         | 2402 |  |  |  
 | 
      
         | 2403 |  |  |   /* Operand 0 is a pointer-to-function; get the type of the function.  */
 | 
      
         | 2404 |  |  |   funtype = TREE_TYPE (addr);
 | 
      
         | 2405 |  |  |   gcc_assert (POINTER_TYPE_P (funtype));
 | 
      
         | 2406 |  |  |   funtype = TREE_TYPE (funtype);
 | 
      
         | 2407 |  |  |  
 | 
      
         | 2408 |  |  |   /* Count whether there are actual complex arguments that need to be split
 | 
      
         | 2409 |  |  |      into their real and imaginary parts.  Munge the type_arg_types
 | 
      
         | 2410 |  |  |      appropriately here as well.  */
 | 
      
         | 2411 |  |  |   if (targetm.calls.split_complex_arg)
 | 
      
         | 2412 |  |  |     {
 | 
      
         | 2413 |  |  |       call_expr_arg_iterator iter;
 | 
      
         | 2414 |  |  |       tree arg;
 | 
      
         | 2415 |  |  |       FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
 | 
      
         | 2416 |  |  |         {
 | 
      
         | 2417 |  |  |           tree type = TREE_TYPE (arg);
 | 
      
         | 2418 |  |  |           if (type && TREE_CODE (type) == COMPLEX_TYPE
 | 
      
         | 2419 |  |  |               && targetm.calls.split_complex_arg (type))
 | 
      
         | 2420 |  |  |             num_complex_actuals++;
 | 
      
         | 2421 |  |  |         }
 | 
      
         | 2422 |  |  |       type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
 | 
      
         | 2423 |  |  |     }
 | 
      
         | 2424 |  |  |   else
 | 
      
         | 2425 |  |  |     type_arg_types = TYPE_ARG_TYPES (funtype);
 | 
      
         | 2426 |  |  |  
 | 
      
         | 2427 |  |  |   if (flags & ECF_MAY_BE_ALLOCA)
 | 
      
         | 2428 |  |  |     cfun->calls_alloca = 1;
 | 
      
         | 2429 |  |  |  
 | 
      
         | 2430 |  |  |   /* If struct_value_rtx is 0, it means pass the address
 | 
      
         | 2431 |  |  |      as if it were an extra parameter.  Put the argument expression
 | 
      
         | 2432 |  |  |      in structure_value_addr_value.  */
 | 
      
         | 2433 |  |  |   if (structure_value_addr && struct_value == 0)
 | 
      
         | 2434 |  |  |     {
 | 
      
         | 2435 |  |  |       /* If structure_value_addr is a REG other than
 | 
      
         | 2436 |  |  |          virtual_outgoing_args_rtx, we can use always use it.  If it
 | 
      
         | 2437 |  |  |          is not a REG, we must always copy it into a register.
 | 
      
         | 2438 |  |  |          If it is virtual_outgoing_args_rtx, we must copy it to another
 | 
      
         | 2439 |  |  |          register in some cases.  */
 | 
      
         | 2440 |  |  |       rtx temp = (!REG_P (structure_value_addr)
 | 
      
         | 2441 |  |  |                   || (ACCUMULATE_OUTGOING_ARGS
 | 
      
         | 2442 |  |  |                       && stack_arg_under_construction
 | 
      
         | 2443 |  |  |                       && structure_value_addr == virtual_outgoing_args_rtx)
 | 
      
         | 2444 |  |  |                   ? copy_addr_to_reg (convert_memory_address
 | 
      
         | 2445 |  |  |                                       (Pmode, structure_value_addr))
 | 
      
         | 2446 |  |  |                   : structure_value_addr);
 | 
      
         | 2447 |  |  |  
 | 
      
         | 2448 |  |  |       structure_value_addr_value =
 | 
      
         | 2449 |  |  |         make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
 | 
      
         | 2450 |  |  |       structure_value_addr_parm = 1;
 | 
      
         | 2451 |  |  |     }
 | 
      
         | 2452 |  |  |  
 | 
      
         | 2453 |  |  |   /* Count the arguments and set NUM_ACTUALS.  */
 | 
      
         | 2454 |  |  |   num_actuals =
 | 
      
         | 2455 |  |  |     call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
 | 
      
         | 2456 |  |  |  
 | 
      
         | 2457 |  |  |   /* Compute number of named args.
 | 
      
         | 2458 |  |  |      First, do a raw count of the args for INIT_CUMULATIVE_ARGS.  */
 | 
      
         | 2459 |  |  |  
 | 
      
         | 2460 |  |  |   if (type_arg_types != 0)
 | 
      
         | 2461 |  |  |     n_named_args
 | 
      
         | 2462 |  |  |       = (list_length (type_arg_types)
 | 
      
         | 2463 |  |  |          /* Count the struct value address, if it is passed as a parm.  */
 | 
      
         | 2464 |  |  |          + structure_value_addr_parm);
 | 
      
         | 2465 |  |  |   else
 | 
      
         | 2466 |  |  |     /* If we know nothing, treat all args as named.  */
 | 
      
         | 2467 |  |  |     n_named_args = num_actuals;
 | 
      
         | 2468 |  |  |  
 | 
      
         | 2469 |  |  |   /* Start updating where the next arg would go.
 | 
      
         | 2470 |  |  |  
 | 
      
         | 2471 |  |  |      On some machines (such as the PA) indirect calls have a different
 | 
      
         | 2472 |  |  |      calling convention than normal calls.  The fourth argument in
 | 
      
         | 2473 |  |  |      INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
 | 
      
         | 2474 |  |  |      or not.  */
 | 
      
         | 2475 |  |  |   INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
 | 
      
         | 2476 |  |  |   args_so_far = pack_cumulative_args (&args_so_far_v);
 | 
      
         | 2477 |  |  |  
 | 
      
         | 2478 |  |  |   /* Now possibly adjust the number of named args.
 | 
      
         | 2479 |  |  |      Normally, don't include the last named arg if anonymous args follow.
 | 
      
         | 2480 |  |  |      We do include the last named arg if
 | 
      
         | 2481 |  |  |      targetm.calls.strict_argument_naming() returns nonzero.
 | 
      
         | 2482 |  |  |      (If no anonymous args follow, the result of list_length is actually
 | 
      
         | 2483 |  |  |      one too large.  This is harmless.)
 | 
      
         | 2484 |  |  |  
 | 
      
         | 2485 |  |  |      If targetm.calls.pretend_outgoing_varargs_named() returns
 | 
      
         | 2486 |  |  |      nonzero, and targetm.calls.strict_argument_naming() returns zero,
 | 
      
         | 2487 |  |  |      this machine will be able to place unnamed args that were passed
 | 
      
         | 2488 |  |  |      in registers into the stack.  So treat all args as named.  This
 | 
      
         | 2489 |  |  |      allows the insns emitting for a specific argument list to be
 | 
      
         | 2490 |  |  |      independent of the function declaration.
 | 
      
         | 2491 |  |  |  
 | 
      
         | 2492 |  |  |      If targetm.calls.pretend_outgoing_varargs_named() returns zero,
 | 
      
         | 2493 |  |  |      we do not have any reliable way to pass unnamed args in
 | 
      
         | 2494 |  |  |      registers, so we must force them into memory.  */
 | 
      
         | 2495 |  |  |  
 | 
      
         | 2496 |  |  |   if (type_arg_types != 0
 | 
      
         | 2497 |  |  |       && targetm.calls.strict_argument_naming (args_so_far))
 | 
      
         | 2498 |  |  |     ;
 | 
      
         | 2499 |  |  |   else if (type_arg_types != 0
 | 
      
         | 2500 |  |  |            && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
 | 
      
         | 2501 |  |  |     /* Don't include the last named arg.  */
 | 
      
         | 2502 |  |  |     --n_named_args;
 | 
      
         | 2503 |  |  |   else
 | 
      
         | 2504 |  |  |     /* Treat all args as named.  */
 | 
      
         | 2505 |  |  |     n_named_args = num_actuals;
 | 
      
         | 2506 |  |  |  
 | 
      
         | 2507 |  |  |   /* Make a vector to hold all the information about each arg.  */
 | 
      
         | 2508 |  |  |   args = XALLOCAVEC (struct arg_data, num_actuals);
 | 
      
         | 2509 |  |  |   memset (args, 0, num_actuals * sizeof (struct arg_data));
 | 
      
         | 2510 |  |  |  
 | 
      
         | 2511 |  |  |   /* Build up entries in the ARGS array, compute the size of the
 | 
      
         | 2512 |  |  |      arguments into ARGS_SIZE, etc.  */
 | 
      
         | 2513 |  |  |   initialize_argument_information (num_actuals, args, &args_size,
 | 
      
         | 2514 |  |  |                                    n_named_args, exp,
 | 
      
         | 2515 |  |  |                                    structure_value_addr_value, fndecl, fntype,
 | 
      
         | 2516 |  |  |                                    args_so_far, reg_parm_stack_space,
 | 
      
         | 2517 |  |  |                                    &old_stack_level, &old_pending_adj,
 | 
      
         | 2518 |  |  |                                    &must_preallocate, &flags,
 | 
      
         | 2519 |  |  |                                    &try_tail_call, CALL_FROM_THUNK_P (exp));
 | 
      
         | 2520 |  |  |  
 | 
      
         | 2521 |  |  |   if (args_size.var)
 | 
      
         | 2522 |  |  |     must_preallocate = 1;
 | 
      
         | 2523 |  |  |  
 | 
      
         | 2524 |  |  |   /* Now make final decision about preallocating stack space.  */
 | 
      
         | 2525 |  |  |   must_preallocate = finalize_must_preallocate (must_preallocate,
 | 
      
         | 2526 |  |  |                                                 num_actuals, args,
 | 
      
         | 2527 |  |  |                                                 &args_size);
 | 
      
         | 2528 |  |  |  
 | 
      
         | 2529 |  |  |   /* If the structure value address will reference the stack pointer, we
 | 
      
         | 2530 |  |  |      must stabilize it.  We don't need to do this if we know that we are
 | 
      
         | 2531 |  |  |      not going to adjust the stack pointer in processing this call.  */
 | 
      
         | 2532 |  |  |  
 | 
      
         | 2533 |  |  |   if (structure_value_addr
 | 
      
         | 2534 |  |  |       && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
 | 
      
         | 2535 |  |  |           || reg_mentioned_p (virtual_outgoing_args_rtx,
 | 
      
         | 2536 |  |  |                               structure_value_addr))
 | 
      
         | 2537 |  |  |       && (args_size.var
 | 
      
         | 2538 |  |  |           || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
 | 
      
         | 2539 |  |  |     structure_value_addr = copy_to_reg (structure_value_addr);
 | 
      
         | 2540 |  |  |  
 | 
      
         | 2541 |  |  |   /* Tail calls can make things harder to debug, and we've traditionally
 | 
      
         | 2542 |  |  |      pushed these optimizations into -O2.  Don't try if we're already
 | 
      
         | 2543 |  |  |      expanding a call, as that means we're an argument.  Don't try if
 | 
      
         | 2544 |  |  |      there's cleanups, as we know there's code to follow the call.  */
 | 
      
         | 2545 |  |  |  
 | 
      
         | 2546 |  |  |   if (currently_expanding_call++ != 0
 | 
      
         | 2547 |  |  |       || !flag_optimize_sibling_calls
 | 
      
         | 2548 |  |  |       || args_size.var
 | 
      
         | 2549 |  |  |       || dbg_cnt (tail_call) == false)
 | 
      
         | 2550 |  |  |     try_tail_call = 0;
 | 
      
         | 2551 |  |  |  
 | 
      
         | 2552 |  |  |   /*  Rest of purposes for tail call optimizations to fail.  */
 | 
      
         | 2553 |  |  |   if (
 | 
      
         | 2554 |  |  | #ifdef HAVE_sibcall_epilogue
 | 
      
         | 2555 |  |  |       !HAVE_sibcall_epilogue
 | 
      
         | 2556 |  |  | #else
 | 
      
         | 2557 |  |  |       1
 | 
      
         | 2558 |  |  | #endif
 | 
      
         | 2559 |  |  |       || !try_tail_call
 | 
      
         | 2560 |  |  |       /* Doing sibling call optimization needs some work, since
 | 
      
         | 2561 |  |  |          structure_value_addr can be allocated on the stack.
 | 
      
         | 2562 |  |  |          It does not seem worth the effort since few optimizable
 | 
      
         | 2563 |  |  |          sibling calls will return a structure.  */
 | 
      
         | 2564 |  |  |       || structure_value_addr != NULL_RTX
 | 
      
         | 2565 |  |  | #ifdef REG_PARM_STACK_SPACE
 | 
      
         | 2566 |  |  |       /* If outgoing reg parm stack space changes, we can not do sibcall.  */
 | 
      
         | 2567 |  |  |       || (OUTGOING_REG_PARM_STACK_SPACE (funtype)
 | 
      
         | 2568 |  |  |           != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl)))
 | 
      
         | 2569 |  |  |       || (reg_parm_stack_space != REG_PARM_STACK_SPACE (fndecl))
 | 
      
         | 2570 |  |  | #endif
 | 
      
         | 2571 |  |  |       /* Check whether the target is able to optimize the call
 | 
      
         | 2572 |  |  |          into a sibcall.  */
 | 
      
         | 2573 |  |  |       || !targetm.function_ok_for_sibcall (fndecl, exp)
 | 
      
         | 2574 |  |  |       /* Functions that do not return exactly once may not be sibcall
 | 
      
         | 2575 |  |  |          optimized.  */
 | 
      
         | 2576 |  |  |       || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
 | 
      
         | 2577 |  |  |       || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
 | 
      
         | 2578 |  |  |       /* If the called function is nested in the current one, it might access
 | 
      
         | 2579 |  |  |          some of the caller's arguments, but could clobber them beforehand if
 | 
      
         | 2580 |  |  |          the argument areas are shared.  */
 | 
      
         | 2581 |  |  |       || (fndecl && decl_function_context (fndecl) == current_function_decl)
 | 
      
         | 2582 |  |  |       /* If this function requires more stack slots than the current
 | 
      
         | 2583 |  |  |          function, we cannot change it into a sibling call.
 | 
      
         | 2584 |  |  |          crtl->args.pretend_args_size is not part of the
 | 
      
         | 2585 |  |  |          stack allocated by our caller.  */
 | 
      
         | 2586 |  |  |       || args_size.constant > (crtl->args.size
 | 
      
         | 2587 |  |  |                                - crtl->args.pretend_args_size)
 | 
      
         | 2588 |  |  |       /* If the callee pops its own arguments, then it must pop exactly
 | 
      
         | 2589 |  |  |          the same number of arguments as the current function.  */
 | 
      
         | 2590 |  |  |       || (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant)
 | 
      
         | 2591 |  |  |           != targetm.calls.return_pops_args (current_function_decl,
 | 
      
         | 2592 |  |  |                                              TREE_TYPE (current_function_decl),
 | 
      
         | 2593 |  |  |                                              crtl->args.size))
 | 
      
         | 2594 |  |  |       || !lang_hooks.decls.ok_for_sibcall (fndecl))
 | 
      
         | 2595 |  |  |     try_tail_call = 0;
 | 
      
         | 2596 |  |  |  
 | 
      
         | 2597 |  |  |   /* Check if caller and callee disagree in promotion of function
 | 
      
         | 2598 |  |  |      return value.  */
 | 
      
         | 2599 |  |  |   if (try_tail_call)
 | 
      
         | 2600 |  |  |     {
 | 
      
         | 2601 |  |  |       enum machine_mode caller_mode, caller_promoted_mode;
 | 
      
         | 2602 |  |  |       enum machine_mode callee_mode, callee_promoted_mode;
 | 
      
         | 2603 |  |  |       int caller_unsignedp, callee_unsignedp;
 | 
      
         | 2604 |  |  |       tree caller_res = DECL_RESULT (current_function_decl);
 | 
      
         | 2605 |  |  |  
 | 
      
         | 2606 |  |  |       caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
 | 
      
         | 2607 |  |  |       caller_mode = DECL_MODE (caller_res);
 | 
      
         | 2608 |  |  |       callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
 | 
      
         | 2609 |  |  |       callee_mode = TYPE_MODE (TREE_TYPE (funtype));
 | 
      
         | 2610 |  |  |       caller_promoted_mode
 | 
      
         | 2611 |  |  |         = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
 | 
      
         | 2612 |  |  |                                  &caller_unsignedp,
 | 
      
         | 2613 |  |  |                                  TREE_TYPE (current_function_decl), 1);
 | 
      
         | 2614 |  |  |       callee_promoted_mode
 | 
      
         | 2615 |  |  |         = promote_function_mode (TREE_TYPE (funtype), callee_mode,
 | 
      
         | 2616 |  |  |                                  &callee_unsignedp,
 | 
      
         | 2617 |  |  |                                  funtype, 1);
 | 
      
         | 2618 |  |  |       if (caller_mode != VOIDmode
 | 
      
         | 2619 |  |  |           && (caller_promoted_mode != callee_promoted_mode
 | 
      
         | 2620 |  |  |               || ((caller_mode != caller_promoted_mode
 | 
      
         | 2621 |  |  |                    || callee_mode != callee_promoted_mode)
 | 
      
         | 2622 |  |  |                   && (caller_unsignedp != callee_unsignedp
 | 
      
         | 2623 |  |  |                       || GET_MODE_BITSIZE (caller_mode)
 | 
      
         | 2624 |  |  |                          < GET_MODE_BITSIZE (callee_mode)))))
 | 
      
         | 2625 |  |  |         try_tail_call = 0;
 | 
      
         | 2626 |  |  |     }
 | 
      
         | 2627 |  |  |  
 | 
      
         | 2628 |  |  |   /* Ensure current function's preferred stack boundary is at least
 | 
      
         | 2629 |  |  |      what we need.  Stack alignment may also increase preferred stack
 | 
      
         | 2630 |  |  |      boundary.  */
 | 
      
         | 2631 |  |  |   if (crtl->preferred_stack_boundary < preferred_stack_boundary)
 | 
      
         | 2632 |  |  |     crtl->preferred_stack_boundary = preferred_stack_boundary;
 | 
      
         | 2633 |  |  |   else
 | 
      
         | 2634 |  |  |     preferred_stack_boundary = crtl->preferred_stack_boundary;
 | 
      
         | 2635 |  |  |  
 | 
      
         | 2636 |  |  |   preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
 | 
      
         | 2637 |  |  |  
 | 
      
         | 2638 |  |  |   /* We want to make two insn chains; one for a sibling call, the other
 | 
      
         | 2639 |  |  |      for a normal call.  We will select one of the two chains after
 | 
      
         | 2640 |  |  |      initial RTL generation is complete.  */
 | 
      
         | 2641 |  |  |   for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
 | 
      
         | 2642 |  |  |     {
 | 
      
         | 2643 |  |  |       int sibcall_failure = 0;
 | 
      
         | 2644 |  |  |       /* We want to emit any pending stack adjustments before the tail
 | 
      
         | 2645 |  |  |          recursion "call".  That way we know any adjustment after the tail
 | 
      
         | 2646 |  |  |          recursion call can be ignored if we indeed use the tail
 | 
      
         | 2647 |  |  |          call expansion.  */
 | 
      
         | 2648 |  |  |       int save_pending_stack_adjust = 0;
 | 
      
         | 2649 |  |  |       int save_stack_pointer_delta = 0;
 | 
      
         | 2650 |  |  |       rtx insns;
 | 
      
         | 2651 |  |  |       rtx before_call, next_arg_reg, after_args;
 | 
      
         | 2652 |  |  |  
 | 
      
         | 2653 |  |  |       if (pass == 0)
 | 
      
         | 2654 |  |  |         {
 | 
      
         | 2655 |  |  |           /* State variables we need to save and restore between
 | 
      
         | 2656 |  |  |              iterations.  */
 | 
      
         | 2657 |  |  |           save_pending_stack_adjust = pending_stack_adjust;
 | 
      
         | 2658 |  |  |           save_stack_pointer_delta = stack_pointer_delta;
 | 
      
         | 2659 |  |  |         }
 | 
      
         | 2660 |  |  |       if (pass)
 | 
      
         | 2661 |  |  |         flags &= ~ECF_SIBCALL;
 | 
      
         | 2662 |  |  |       else
 | 
      
         | 2663 |  |  |         flags |= ECF_SIBCALL;
 | 
      
         | 2664 |  |  |  
 | 
      
         | 2665 |  |  |       /* Other state variables that we must reinitialize each time
 | 
      
         | 2666 |  |  |          through the loop (that are not initialized by the loop itself).  */
 | 
      
         | 2667 |  |  |       argblock = 0;
 | 
      
         | 2668 |  |  |       call_fusage = 0;
 | 
      
         | 2669 |  |  |  
 | 
      
         | 2670 |  |  |       /* Start a new sequence for the normal call case.
 | 
      
         | 2671 |  |  |  
 | 
      
         | 2672 |  |  |          From this point on, if the sibling call fails, we want to set
 | 
      
         | 2673 |  |  |          sibcall_failure instead of continuing the loop.  */
 | 
      
         | 2674 |  |  |       start_sequence ();
 | 
      
         | 2675 |  |  |  
 | 
      
         | 2676 |  |  |       /* Don't let pending stack adjusts add up to too much.
 | 
      
         | 2677 |  |  |          Also, do all pending adjustments now if there is any chance
 | 
      
         | 2678 |  |  |          this might be a call to alloca or if we are expanding a sibling
 | 
      
         | 2679 |  |  |          call sequence.
 | 
      
         | 2680 |  |  |          Also do the adjustments before a throwing call, otherwise
 | 
      
         | 2681 |  |  |          exception handling can fail; PR 19225. */
 | 
      
         | 2682 |  |  |       if (pending_stack_adjust >= 32
 | 
      
         | 2683 |  |  |           || (pending_stack_adjust > 0
 | 
      
         | 2684 |  |  |               && (flags & ECF_MAY_BE_ALLOCA))
 | 
      
         | 2685 |  |  |           || (pending_stack_adjust > 0
 | 
      
         | 2686 |  |  |               && flag_exceptions && !(flags & ECF_NOTHROW))
 | 
      
         | 2687 |  |  |           || pass == 0)
 | 
      
         | 2688 |  |  |         do_pending_stack_adjust ();
 | 
      
         | 2689 |  |  |  
 | 
      
         | 2690 |  |  |       /* Precompute any arguments as needed.  */
 | 
      
         | 2691 |  |  |       if (pass)
 | 
      
         | 2692 |  |  |         precompute_arguments (num_actuals, args);
 | 
      
         | 2693 |  |  |  
 | 
      
         | 2694 |  |  |       /* Now we are about to start emitting insns that can be deleted
 | 
      
         | 2695 |  |  |          if a libcall is deleted.  */
 | 
      
         | 2696 |  |  |       if (pass && (flags & ECF_MALLOC))
 | 
      
         | 2697 |  |  |         start_sequence ();
 | 
      
         | 2698 |  |  |  
 | 
      
         | 2699 |  |  |       if (pass == 0 && crtl->stack_protect_guard)
 | 
      
         | 2700 |  |  |         stack_protect_epilogue ();
 | 
      
         | 2701 |  |  |  
 | 
      
         | 2702 |  |  |       adjusted_args_size = args_size;
 | 
      
         | 2703 |  |  |       /* Compute the actual size of the argument block required.  The variable
 | 
      
         | 2704 |  |  |          and constant sizes must be combined, the size may have to be rounded,
 | 
      
         | 2705 |  |  |          and there may be a minimum required size.  When generating a sibcall
 | 
      
         | 2706 |  |  |          pattern, do not round up, since we'll be re-using whatever space our
 | 
      
         | 2707 |  |  |          caller provided.  */
 | 
      
         | 2708 |  |  |       unadjusted_args_size
 | 
      
         | 2709 |  |  |         = compute_argument_block_size (reg_parm_stack_space,
 | 
      
         | 2710 |  |  |                                        &adjusted_args_size,
 | 
      
         | 2711 |  |  |                                        fndecl, fntype,
 | 
      
         | 2712 |  |  |                                        (pass == 0 ? 0
 | 
      
         | 2713 |  |  |                                         : preferred_stack_boundary));
 | 
      
         | 2714 |  |  |  
 | 
      
         | 2715 |  |  |       old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
 | 
      
         | 2716 |  |  |  
 | 
      
         | 2717 |  |  |       /* The argument block when performing a sibling call is the
 | 
      
         | 2718 |  |  |          incoming argument block.  */
 | 
      
         | 2719 |  |  |       if (pass == 0)
 | 
      
         | 2720 |  |  |         {
 | 
      
         | 2721 |  |  |           argblock = crtl->args.internal_arg_pointer;
 | 
      
         | 2722 |  |  |           argblock
 | 
      
         | 2723 |  |  | #ifdef STACK_GROWS_DOWNWARD
 | 
      
         | 2724 |  |  |             = plus_constant (argblock, crtl->args.pretend_args_size);
 | 
      
         | 2725 |  |  | #else
 | 
      
         | 2726 |  |  |             = plus_constant (argblock, -crtl->args.pretend_args_size);
 | 
      
         | 2727 |  |  | #endif
 | 
      
         | 2728 |  |  |           stored_args_map = sbitmap_alloc (args_size.constant);
 | 
      
         | 2729 |  |  |           sbitmap_zero (stored_args_map);
 | 
      
         | 2730 |  |  |         }
 | 
      
         | 2731 |  |  |  
 | 
      
         | 2732 |  |  |       /* If we have no actual push instructions, or shouldn't use them,
 | 
      
         | 2733 |  |  |          make space for all args right now.  */
 | 
      
         | 2734 |  |  |       else if (adjusted_args_size.var != 0)
 | 
      
         | 2735 |  |  |         {
 | 
      
         | 2736 |  |  |           if (old_stack_level == 0)
 | 
      
         | 2737 |  |  |             {
 | 
      
         | 2738 |  |  |               emit_stack_save (SAVE_BLOCK, &old_stack_level);
 | 
      
         | 2739 |  |  |               old_stack_pointer_delta = stack_pointer_delta;
 | 
      
         | 2740 |  |  |               old_pending_adj = pending_stack_adjust;
 | 
      
         | 2741 |  |  |               pending_stack_adjust = 0;
 | 
      
         | 2742 |  |  |               /* stack_arg_under_construction says whether a stack arg is
 | 
      
         | 2743 |  |  |                  being constructed at the old stack level.  Pushing the stack
 | 
      
         | 2744 |  |  |                  gets a clean outgoing argument block.  */
 | 
      
         | 2745 |  |  |               old_stack_arg_under_construction = stack_arg_under_construction;
 | 
      
         | 2746 |  |  |               stack_arg_under_construction = 0;
 | 
      
         | 2747 |  |  |             }
 | 
      
         | 2748 |  |  |           argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
 | 
      
         | 2749 |  |  |           if (flag_stack_usage_info)
 | 
      
         | 2750 |  |  |             current_function_has_unbounded_dynamic_stack_size = 1;
 | 
      
         | 2751 |  |  |         }
 | 
      
         | 2752 |  |  |       else
 | 
      
         | 2753 |  |  |         {
 | 
      
         | 2754 |  |  |           /* Note that we must go through the motions of allocating an argument
 | 
      
         | 2755 |  |  |              block even if the size is zero because we may be storing args
 | 
      
         | 2756 |  |  |              in the area reserved for register arguments, which may be part of
 | 
      
         | 2757 |  |  |              the stack frame.  */
 | 
      
         | 2758 |  |  |  
 | 
      
         | 2759 |  |  |           int needed = adjusted_args_size.constant;
 | 
      
         | 2760 |  |  |  
 | 
      
         | 2761 |  |  |           /* Store the maximum argument space used.  It will be pushed by
 | 
      
         | 2762 |  |  |              the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
 | 
      
         | 2763 |  |  |              checking).  */
 | 
      
         | 2764 |  |  |  
 | 
      
         | 2765 |  |  |           if (needed > crtl->outgoing_args_size)
 | 
      
         | 2766 |  |  |             crtl->outgoing_args_size = needed;
 | 
      
         | 2767 |  |  |  
 | 
      
         | 2768 |  |  |           if (must_preallocate)
 | 
      
         | 2769 |  |  |             {
 | 
      
         | 2770 |  |  |               if (ACCUMULATE_OUTGOING_ARGS)
 | 
      
         | 2771 |  |  |                 {
 | 
      
         | 2772 |  |  |                   /* Since the stack pointer will never be pushed, it is
 | 
      
         | 2773 |  |  |                      possible for the evaluation of a parm to clobber
 | 
      
         | 2774 |  |  |                      something we have already written to the stack.
 | 
      
         | 2775 |  |  |                      Since most function calls on RISC machines do not use
 | 
      
         | 2776 |  |  |                      the stack, this is uncommon, but must work correctly.
 | 
      
         | 2777 |  |  |  
 | 
      
         | 2778 |  |  |                      Therefore, we save any area of the stack that was already
 | 
      
         | 2779 |  |  |                      written and that we are using.  Here we set up to do this
 | 
      
         | 2780 |  |  |                      by making a new stack usage map from the old one.  The
 | 
      
         | 2781 |  |  |                      actual save will be done by store_one_arg.
 | 
      
         | 2782 |  |  |  
 | 
      
         | 2783 |  |  |                      Another approach might be to try to reorder the argument
 | 
      
         | 2784 |  |  |                      evaluations to avoid this conflicting stack usage.  */
 | 
      
         | 2785 |  |  |  
 | 
      
         | 2786 |  |  |                   /* Since we will be writing into the entire argument area,
 | 
      
         | 2787 |  |  |                      the map must be allocated for its entire size, not just
 | 
      
         | 2788 |  |  |                      the part that is the responsibility of the caller.  */
 | 
      
         | 2789 |  |  |                   if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
 | 
      
         | 2790 |  |  |                     needed += reg_parm_stack_space;
 | 
      
         | 2791 |  |  |  
 | 
      
         | 2792 |  |  | #ifdef ARGS_GROW_DOWNWARD
 | 
      
         | 2793 |  |  |                   highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
 | 
      
         | 2794 |  |  |                                                      needed + 1);
 | 
      
         | 2795 |  |  | #else
 | 
      
         | 2796 |  |  |                   highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
 | 
      
         | 2797 |  |  |                                                      needed);
 | 
      
         | 2798 |  |  | #endif
 | 
      
         | 2799 |  |  |                   free (stack_usage_map_buf);
 | 
      
         | 2800 |  |  |                   stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
 | 
      
         | 2801 |  |  |                   stack_usage_map = stack_usage_map_buf;
 | 
      
         | 2802 |  |  |  
 | 
      
         | 2803 |  |  |                   if (initial_highest_arg_in_use)
 | 
      
         | 2804 |  |  |                     memcpy (stack_usage_map, initial_stack_usage_map,
 | 
      
         | 2805 |  |  |                             initial_highest_arg_in_use);
 | 
      
         | 2806 |  |  |  
 | 
      
         | 2807 |  |  |                   if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
 | 
      
         | 2808 |  |  |                     memset (&stack_usage_map[initial_highest_arg_in_use], 0,
 | 
      
         | 2809 |  |  |                            (highest_outgoing_arg_in_use
 | 
      
         | 2810 |  |  |                             - initial_highest_arg_in_use));
 | 
      
         | 2811 |  |  |                   needed = 0;
 | 
      
         | 2812 |  |  |  
 | 
      
         | 2813 |  |  |                   /* The address of the outgoing argument list must not be
 | 
      
         | 2814 |  |  |                      copied to a register here, because argblock would be left
 | 
      
         | 2815 |  |  |                      pointing to the wrong place after the call to
 | 
      
         | 2816 |  |  |                      allocate_dynamic_stack_space below.  */
 | 
      
         | 2817 |  |  |  
 | 
      
         | 2818 |  |  |                   argblock = virtual_outgoing_args_rtx;
 | 
      
         | 2819 |  |  |                 }
 | 
      
         | 2820 |  |  |               else
 | 
      
         | 2821 |  |  |                 {
 | 
      
         | 2822 |  |  |                   if (inhibit_defer_pop == 0)
 | 
      
         | 2823 |  |  |                     {
 | 
      
         | 2824 |  |  |                       /* Try to reuse some or all of the pending_stack_adjust
 | 
      
         | 2825 |  |  |                          to get this space.  */
 | 
      
         | 2826 |  |  |                       needed
 | 
      
         | 2827 |  |  |                         = (combine_pending_stack_adjustment_and_call
 | 
      
         | 2828 |  |  |                            (unadjusted_args_size,
 | 
      
         | 2829 |  |  |                             &adjusted_args_size,
 | 
      
         | 2830 |  |  |                             preferred_unit_stack_boundary));
 | 
      
         | 2831 |  |  |  
 | 
      
         | 2832 |  |  |                       /* combine_pending_stack_adjustment_and_call computes
 | 
      
         | 2833 |  |  |                          an adjustment before the arguments are allocated.
 | 
      
         | 2834 |  |  |                          Account for them and see whether or not the stack
 | 
      
         | 2835 |  |  |                          needs to go up or down.  */
 | 
      
         | 2836 |  |  |                       needed = unadjusted_args_size - needed;
 | 
      
         | 2837 |  |  |  
 | 
      
         | 2838 |  |  |                       if (needed < 0)
 | 
      
         | 2839 |  |  |                         {
 | 
      
         | 2840 |  |  |                           /* We're releasing stack space.  */
 | 
      
         | 2841 |  |  |                           /* ??? We can avoid any adjustment at all if we're
 | 
      
         | 2842 |  |  |                              already aligned.  FIXME.  */
 | 
      
         | 2843 |  |  |                           pending_stack_adjust = -needed;
 | 
      
         | 2844 |  |  |                           do_pending_stack_adjust ();
 | 
      
         | 2845 |  |  |                           needed = 0;
 | 
      
         | 2846 |  |  |                         }
 | 
      
         | 2847 |  |  |                       else
 | 
      
         | 2848 |  |  |                         /* We need to allocate space.  We'll do that in
 | 
      
         | 2849 |  |  |                            push_block below.  */
 | 
      
         | 2850 |  |  |                         pending_stack_adjust = 0;
 | 
      
         | 2851 |  |  |                     }
 | 
      
         | 2852 |  |  |  
 | 
      
         | 2853 |  |  |                   /* Special case this because overhead of `push_block' in
 | 
      
         | 2854 |  |  |                      this case is non-trivial.  */
 | 
      
         | 2855 |  |  |                   if (needed == 0)
 | 
      
         | 2856 |  |  |                     argblock = virtual_outgoing_args_rtx;
 | 
      
         | 2857 |  |  |                   else
 | 
      
         | 2858 |  |  |                     {
 | 
      
         | 2859 |  |  |                       argblock = push_block (GEN_INT (needed), 0, 0);
 | 
      
         | 2860 |  |  | #ifdef ARGS_GROW_DOWNWARD
 | 
      
         | 2861 |  |  |                       argblock = plus_constant (argblock, needed);
 | 
      
         | 2862 |  |  | #endif
 | 
      
         | 2863 |  |  |                     }
 | 
      
         | 2864 |  |  |  
 | 
      
         | 2865 |  |  |                   /* We only really need to call `copy_to_reg' in the case
 | 
      
         | 2866 |  |  |                      where push insns are going to be used to pass ARGBLOCK
 | 
      
         | 2867 |  |  |                      to a function call in ARGS.  In that case, the stack
 | 
      
         | 2868 |  |  |                      pointer changes value from the allocation point to the
 | 
      
         | 2869 |  |  |                      call point, and hence the value of
 | 
      
         | 2870 |  |  |                      VIRTUAL_OUTGOING_ARGS_RTX changes as well.  But might
 | 
      
         | 2871 |  |  |                      as well always do it.  */
 | 
      
         | 2872 |  |  |                   argblock = copy_to_reg (argblock);
 | 
      
         | 2873 |  |  |                 }
 | 
      
         | 2874 |  |  |             }
 | 
      
         | 2875 |  |  |         }
 | 
      
         | 2876 |  |  |  
 | 
      
         | 2877 |  |  |       if (ACCUMULATE_OUTGOING_ARGS)
 | 
      
         | 2878 |  |  |         {
 | 
      
         | 2879 |  |  |           /* The save/restore code in store_one_arg handles all
 | 
      
         | 2880 |  |  |              cases except one: a constructor call (including a C
 | 
      
         | 2881 |  |  |              function returning a BLKmode struct) to initialize
 | 
      
         | 2882 |  |  |              an argument.  */
 | 
      
         | 2883 |  |  |           if (stack_arg_under_construction)
 | 
      
         | 2884 |  |  |             {
 | 
      
         | 2885 |  |  |               rtx push_size
 | 
      
         | 2886 |  |  |                 = GEN_INT (adjusted_args_size.constant
 | 
      
         | 2887 |  |  |                            + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
 | 
      
         | 2888 |  |  |                                                                       : TREE_TYPE (fndecl))) ? 0
 | 
      
         | 2889 |  |  |                               : reg_parm_stack_space));
 | 
      
         | 2890 |  |  |               if (old_stack_level == 0)
 | 
      
         | 2891 |  |  |                 {
 | 
      
         | 2892 |  |  |                   emit_stack_save (SAVE_BLOCK, &old_stack_level);
 | 
      
         | 2893 |  |  |                   old_stack_pointer_delta = stack_pointer_delta;
 | 
      
         | 2894 |  |  |                   old_pending_adj = pending_stack_adjust;
 | 
      
         | 2895 |  |  |                   pending_stack_adjust = 0;
 | 
      
         | 2896 |  |  |                   /* stack_arg_under_construction says whether a stack
 | 
      
         | 2897 |  |  |                      arg is being constructed at the old stack level.
 | 
      
         | 2898 |  |  |                      Pushing the stack gets a clean outgoing argument
 | 
      
         | 2899 |  |  |                      block.  */
 | 
      
         | 2900 |  |  |                   old_stack_arg_under_construction
 | 
      
         | 2901 |  |  |                     = stack_arg_under_construction;
 | 
      
         | 2902 |  |  |                   stack_arg_under_construction = 0;
 | 
      
         | 2903 |  |  |                   /* Make a new map for the new argument list.  */
 | 
      
         | 2904 |  |  |                   free (stack_usage_map_buf);
 | 
      
         | 2905 |  |  |                   stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
 | 
      
         | 2906 |  |  |                   stack_usage_map = stack_usage_map_buf;
 | 
      
         | 2907 |  |  |                   highest_outgoing_arg_in_use = 0;
 | 
      
         | 2908 |  |  |                 }
 | 
      
         | 2909 |  |  |               /* We can pass TRUE as the 4th argument because we just
 | 
      
         | 2910 |  |  |                  saved the stack pointer and will restore it right after
 | 
      
         | 2911 |  |  |                  the call.  */
 | 
      
         | 2912 |  |  |               allocate_dynamic_stack_space (push_size, 0,
 | 
      
         | 2913 |  |  |                                             BIGGEST_ALIGNMENT, true);
 | 
      
         | 2914 |  |  |             }
 | 
      
         | 2915 |  |  |  
 | 
      
         | 2916 |  |  |           /* If argument evaluation might modify the stack pointer,
 | 
      
         | 2917 |  |  |              copy the address of the argument list to a register.  */
 | 
      
         | 2918 |  |  |           for (i = 0; i < num_actuals; i++)
 | 
      
         | 2919 |  |  |             if (args[i].pass_on_stack)
 | 
      
         | 2920 |  |  |               {
 | 
      
         | 2921 |  |  |                 argblock = copy_addr_to_reg (argblock);
 | 
      
         | 2922 |  |  |                 break;
 | 
      
         | 2923 |  |  |               }
 | 
      
         | 2924 |  |  |         }
 | 
      
         | 2925 |  |  |  
 | 
      
         | 2926 |  |  |       compute_argument_addresses (args, argblock, num_actuals);
 | 
      
         | 2927 |  |  |  
 | 
      
         | 2928 |  |  |       /* If we push args individually in reverse order, perform stack alignment
 | 
      
         | 2929 |  |  |          before the first push (the last arg).  */
 | 
      
         | 2930 |  |  |       if (PUSH_ARGS_REVERSED && argblock == 0
 | 
      
         | 2931 |  |  |           && adjusted_args_size.constant != unadjusted_args_size)
 | 
      
         | 2932 |  |  |         {
 | 
      
         | 2933 |  |  |           /* When the stack adjustment is pending, we get better code
 | 
      
         | 2934 |  |  |              by combining the adjustments.  */
 | 
      
         | 2935 |  |  |           if (pending_stack_adjust
 | 
      
         | 2936 |  |  |               && ! inhibit_defer_pop)
 | 
      
         | 2937 |  |  |             {
 | 
      
         | 2938 |  |  |               pending_stack_adjust
 | 
      
         | 2939 |  |  |                 = (combine_pending_stack_adjustment_and_call
 | 
      
         | 2940 |  |  |                    (unadjusted_args_size,
 | 
      
         | 2941 |  |  |                     &adjusted_args_size,
 | 
      
         | 2942 |  |  |                     preferred_unit_stack_boundary));
 | 
      
         | 2943 |  |  |               do_pending_stack_adjust ();
 | 
      
         | 2944 |  |  |             }
 | 
      
         | 2945 |  |  |           else if (argblock == 0)
 | 
      
         | 2946 |  |  |             anti_adjust_stack (GEN_INT (adjusted_args_size.constant
 | 
      
         | 2947 |  |  |                                         - unadjusted_args_size));
 | 
      
         | 2948 |  |  |         }
 | 
      
         | 2949 |  |  |       /* Now that the stack is properly aligned, pops can't safely
 | 
      
         | 2950 |  |  |          be deferred during the evaluation of the arguments.  */
 | 
      
         | 2951 |  |  |       NO_DEFER_POP;
 | 
      
         | 2952 |  |  |  
 | 
      
         | 2953 |  |  |       /* Record the maximum pushed stack space size.  We need to delay
 | 
      
         | 2954 |  |  |          doing it this far to take into account the optimization done
 | 
      
         | 2955 |  |  |          by combine_pending_stack_adjustment_and_call.  */
 | 
      
         | 2956 |  |  |       if (flag_stack_usage_info
 | 
      
         | 2957 |  |  |           && !ACCUMULATE_OUTGOING_ARGS
 | 
      
         | 2958 |  |  |           && pass
 | 
      
         | 2959 |  |  |           && adjusted_args_size.var == 0)
 | 
      
         | 2960 |  |  |         {
 | 
      
         | 2961 |  |  |           int pushed = adjusted_args_size.constant + pending_stack_adjust;
 | 
      
         | 2962 |  |  |           if (pushed > current_function_pushed_stack_size)
 | 
      
         | 2963 |  |  |             current_function_pushed_stack_size = pushed;
 | 
      
         | 2964 |  |  |         }
 | 
      
         | 2965 |  |  |  
 | 
      
         | 2966 |  |  |       funexp = rtx_for_function_call (fndecl, addr);
 | 
      
         | 2967 |  |  |  
 | 
      
         | 2968 |  |  |       /* Figure out the register where the value, if any, will come back.  */
 | 
      
         | 2969 |  |  |       valreg = 0;
 | 
      
         | 2970 |  |  |       if (TYPE_MODE (rettype) != VOIDmode
 | 
      
         | 2971 |  |  |           && ! structure_value_addr)
 | 
      
         | 2972 |  |  |         {
 | 
      
         | 2973 |  |  |           if (pcc_struct_value)
 | 
      
         | 2974 |  |  |             valreg = hard_function_value (build_pointer_type (rettype),
 | 
      
         | 2975 |  |  |                                           fndecl, NULL, (pass == 0));
 | 
      
         | 2976 |  |  |           else
 | 
      
         | 2977 |  |  |             valreg = hard_function_value (rettype, fndecl, fntype,
 | 
      
         | 2978 |  |  |                                           (pass == 0));
 | 
      
         | 2979 |  |  |  
 | 
      
         | 2980 |  |  |           /* If VALREG is a PARALLEL whose first member has a zero
 | 
      
         | 2981 |  |  |              offset, use that.  This is for targets such as m68k that
 | 
      
         | 2982 |  |  |              return the same value in multiple places.  */
 | 
      
         | 2983 |  |  |           if (GET_CODE (valreg) == PARALLEL)
 | 
      
         | 2984 |  |  |             {
 | 
      
         | 2985 |  |  |               rtx elem = XVECEXP (valreg, 0, 0);
 | 
      
         | 2986 |  |  |               rtx where = XEXP (elem, 0);
 | 
      
         | 2987 |  |  |               rtx offset = XEXP (elem, 1);
 | 
      
         | 2988 |  |  |               if (offset == const0_rtx
 | 
      
         | 2989 |  |  |                   && GET_MODE (where) == GET_MODE (valreg))
 | 
      
         | 2990 |  |  |                 valreg = where;
 | 
      
         | 2991 |  |  |             }
 | 
      
         | 2992 |  |  |         }
 | 
      
         | 2993 |  |  |  
 | 
      
         | 2994 |  |  |       /* Precompute all register parameters.  It isn't safe to compute anything
 | 
      
         | 2995 |  |  |          once we have started filling any specific hard regs.  */
 | 
      
         | 2996 |  |  |       precompute_register_parameters (num_actuals, args, ®_parm_seen);
 | 
      
         | 2997 |  |  |  
 | 
      
         | 2998 |  |  |       if (CALL_EXPR_STATIC_CHAIN (exp))
 | 
      
         | 2999 |  |  |         static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
 | 
      
         | 3000 |  |  |       else
 | 
      
         | 3001 |  |  |         static_chain_value = 0;
 | 
      
         | 3002 |  |  |  
 | 
      
         | 3003 |  |  | #ifdef REG_PARM_STACK_SPACE
 | 
      
         | 3004 |  |  |       /* Save the fixed argument area if it's part of the caller's frame and
 | 
      
         | 3005 |  |  |          is clobbered by argument setup for this call.  */
 | 
      
         | 3006 |  |  |       if (ACCUMULATE_OUTGOING_ARGS && pass)
 | 
      
         | 3007 |  |  |         save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
 | 
      
         | 3008 |  |  |                                               &low_to_save, &high_to_save);
 | 
      
         | 3009 |  |  | #endif
 | 
      
         | 3010 |  |  |  
 | 
      
         | 3011 |  |  |       /* Now store (and compute if necessary) all non-register parms.
 | 
      
         | 3012 |  |  |          These come before register parms, since they can require block-moves,
 | 
      
         | 3013 |  |  |          which could clobber the registers used for register parms.
 | 
      
         | 3014 |  |  |          Parms which have partial registers are not stored here,
 | 
      
         | 3015 |  |  |          but we do preallocate space here if they want that.  */
 | 
      
         | 3016 |  |  |  
 | 
      
         | 3017 |  |  |       for (i = 0; i < num_actuals; i++)
 | 
      
         | 3018 |  |  |         {
 | 
      
         | 3019 |  |  |           if (args[i].reg == 0 || args[i].pass_on_stack)
 | 
      
         | 3020 |  |  |             {
 | 
      
         | 3021 |  |  |               rtx before_arg = get_last_insn ();
 | 
      
         | 3022 |  |  |  
 | 
      
         | 3023 |  |  |               if (store_one_arg (&args[i], argblock, flags,
 | 
      
         | 3024 |  |  |                                  adjusted_args_size.var != 0,
 | 
      
         | 3025 |  |  |                                  reg_parm_stack_space)
 | 
      
         | 3026 |  |  |                   || (pass == 0
 | 
      
         | 3027 |  |  |                       && check_sibcall_argument_overlap (before_arg,
 | 
      
         | 3028 |  |  |                                                          &args[i], 1)))
 | 
      
         | 3029 |  |  |                 sibcall_failure = 1;
 | 
      
         | 3030 |  |  |               }
 | 
      
         | 3031 |  |  |  
 | 
      
         | 3032 |  |  |           if (args[i].stack)
 | 
      
         | 3033 |  |  |             call_fusage
 | 
      
         | 3034 |  |  |               = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
 | 
      
         | 3035 |  |  |                                    gen_rtx_USE (VOIDmode, args[i].stack),
 | 
      
         | 3036 |  |  |                                    call_fusage);
 | 
      
         | 3037 |  |  |         }
 | 
      
         | 3038 |  |  |  
 | 
      
         | 3039 |  |  |       /* If we have a parm that is passed in registers but not in memory
 | 
      
         | 3040 |  |  |          and whose alignment does not permit a direct copy into registers,
 | 
      
         | 3041 |  |  |          make a group of pseudos that correspond to each register that we
 | 
      
         | 3042 |  |  |          will later fill.  */
 | 
      
         | 3043 |  |  |       if (STRICT_ALIGNMENT)
 | 
      
         | 3044 |  |  |         store_unaligned_arguments_into_pseudos (args, num_actuals);
 | 
      
         | 3045 |  |  |  
 | 
      
         | 3046 |  |  |       /* Now store any partially-in-registers parm.
 | 
      
         | 3047 |  |  |          This is the last place a block-move can happen.  */
 | 
      
         | 3048 |  |  |       if (reg_parm_seen)
 | 
      
         | 3049 |  |  |         for (i = 0; i < num_actuals; i++)
 | 
      
         | 3050 |  |  |           if (args[i].partial != 0 && ! args[i].pass_on_stack)
 | 
      
         | 3051 |  |  |             {
 | 
      
         | 3052 |  |  |               rtx before_arg = get_last_insn ();
 | 
      
         | 3053 |  |  |  
 | 
      
         | 3054 |  |  |               if (store_one_arg (&args[i], argblock, flags,
 | 
      
         | 3055 |  |  |                                  adjusted_args_size.var != 0,
 | 
      
         | 3056 |  |  |                                  reg_parm_stack_space)
 | 
      
         | 3057 |  |  |                   || (pass == 0
 | 
      
         | 3058 |  |  |                       && check_sibcall_argument_overlap (before_arg,
 | 
      
         | 3059 |  |  |                                                          &args[i], 1)))
 | 
      
         | 3060 |  |  |                 sibcall_failure = 1;
 | 
      
         | 3061 |  |  |             }
 | 
      
         | 3062 |  |  |  
 | 
      
         | 3063 |  |  |       /* If we pushed args in forward order, perform stack alignment
 | 
      
         | 3064 |  |  |          after pushing the last arg.  */
 | 
      
         | 3065 |  |  |       if (!PUSH_ARGS_REVERSED && argblock == 0)
 | 
      
         | 3066 |  |  |         anti_adjust_stack (GEN_INT (adjusted_args_size.constant
 | 
      
         | 3067 |  |  |                                     - unadjusted_args_size));
 | 
      
         | 3068 |  |  |  
 | 
      
         | 3069 |  |  |       /* If register arguments require space on the stack and stack space
 | 
      
         | 3070 |  |  |          was not preallocated, allocate stack space here for arguments
 | 
      
         | 3071 |  |  |          passed in registers.  */
 | 
      
         | 3072 |  |  |       if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
 | 
      
         | 3073 |  |  |           && !ACCUMULATE_OUTGOING_ARGS
 | 
      
         | 3074 |  |  |           && must_preallocate == 0 && reg_parm_stack_space > 0)
 | 
      
         | 3075 |  |  |         anti_adjust_stack (GEN_INT (reg_parm_stack_space));
 | 
      
         | 3076 |  |  |  
 | 
      
         | 3077 |  |  |       /* Pass the function the address in which to return a
 | 
      
         | 3078 |  |  |          structure value.  */
 | 
      
         | 3079 |  |  |       if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
 | 
      
         | 3080 |  |  |         {
 | 
      
         | 3081 |  |  |           structure_value_addr
 | 
      
         | 3082 |  |  |             = convert_memory_address (Pmode, structure_value_addr);
 | 
      
         | 3083 |  |  |           emit_move_insn (struct_value,
 | 
      
         | 3084 |  |  |                           force_reg (Pmode,
 | 
      
         | 3085 |  |  |                                      force_operand (structure_value_addr,
 | 
      
         | 3086 |  |  |                                                     NULL_RTX)));
 | 
      
         | 3087 |  |  |  
 | 
      
         | 3088 |  |  |           if (REG_P (struct_value))
 | 
      
         | 3089 |  |  |             use_reg (&call_fusage, struct_value);
 | 
      
         | 3090 |  |  |         }
 | 
      
         | 3091 |  |  |  
 | 
      
         | 3092 |  |  |       after_args = get_last_insn ();
 | 
      
         | 3093 |  |  |       funexp = prepare_call_address (fndecl, funexp, static_chain_value,
 | 
      
         | 3094 |  |  |                                      &call_fusage, reg_parm_seen, pass == 0);
 | 
      
         | 3095 |  |  |  
 | 
      
         | 3096 |  |  |       load_register_parameters (args, num_actuals, &call_fusage, flags,
 | 
      
         | 3097 |  |  |                                 pass == 0, &sibcall_failure);
 | 
      
         | 3098 |  |  |  
 | 
      
         | 3099 |  |  |       /* Save a pointer to the last insn before the call, so that we can
 | 
      
         | 3100 |  |  |          later safely search backwards to find the CALL_INSN.  */
 | 
      
         | 3101 |  |  |       before_call = get_last_insn ();
 | 
      
         | 3102 |  |  |  
 | 
      
         | 3103 |  |  |       /* Set up next argument register.  For sibling calls on machines
 | 
      
         | 3104 |  |  |          with register windows this should be the incoming register.  */
 | 
      
         | 3105 |  |  |       if (pass == 0)
 | 
      
         | 3106 |  |  |         next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
 | 
      
         | 3107 |  |  |                                                             VOIDmode,
 | 
      
         | 3108 |  |  |                                                             void_type_node,
 | 
      
         | 3109 |  |  |                                                             true);
 | 
      
         | 3110 |  |  |       else
 | 
      
         | 3111 |  |  |         next_arg_reg = targetm.calls.function_arg (args_so_far,
 | 
      
         | 3112 |  |  |                                                    VOIDmode, void_type_node,
 | 
      
         | 3113 |  |  |                                                    true);
 | 
      
         | 3114 |  |  |  
 | 
      
         | 3115 |  |  |       /* All arguments and registers used for the call must be set up by
 | 
      
         | 3116 |  |  |          now!  */
 | 
      
         | 3117 |  |  |  
 | 
      
         | 3118 |  |  |       /* Stack must be properly aligned now.  */
 | 
      
         | 3119 |  |  |       gcc_assert (!pass
 | 
      
         | 3120 |  |  |                   || !(stack_pointer_delta % preferred_unit_stack_boundary));
 | 
      
         | 3121 |  |  |  
 | 
      
         | 3122 |  |  |       /* Generate the actual call instruction.  */
 | 
      
         | 3123 |  |  |       emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
 | 
      
         | 3124 |  |  |                    adjusted_args_size.constant, struct_value_size,
 | 
      
         | 3125 |  |  |                    next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
 | 
      
         | 3126 |  |  |                    flags, args_so_far);
 | 
      
         | 3127 |  |  |  
 | 
      
         | 3128 |  |  |       /* If the call setup or the call itself overlaps with anything
 | 
      
         | 3129 |  |  |          of the argument setup we probably clobbered our call address.
 | 
      
         | 3130 |  |  |          In that case we can't do sibcalls.  */
 | 
      
         | 3131 |  |  |       if (pass == 0
 | 
      
         | 3132 |  |  |           && check_sibcall_argument_overlap (after_args, 0, 0))
 | 
      
         | 3133 |  |  |         sibcall_failure = 1;
 | 
      
         | 3134 |  |  |  
 | 
      
         | 3135 |  |  |       /* If a non-BLKmode value is returned at the most significant end
 | 
      
         | 3136 |  |  |          of a register, shift the register right by the appropriate amount
 | 
      
         | 3137 |  |  |          and update VALREG accordingly.  BLKmode values are handled by the
 | 
      
         | 3138 |  |  |          group load/store machinery below.  */
 | 
      
         | 3139 |  |  |       if (!structure_value_addr
 | 
      
         | 3140 |  |  |           && !pcc_struct_value
 | 
      
         | 3141 |  |  |           && TYPE_MODE (rettype) != BLKmode
 | 
      
         | 3142 |  |  |           && targetm.calls.return_in_msb (rettype))
 | 
      
         | 3143 |  |  |         {
 | 
      
         | 3144 |  |  |           if (shift_return_value (TYPE_MODE (rettype), false, valreg))
 | 
      
         | 3145 |  |  |             sibcall_failure = 1;
 | 
      
         | 3146 |  |  |           valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
 | 
      
         | 3147 |  |  |         }
 | 
      
         | 3148 |  |  |  
 | 
      
         | 3149 |  |  |       if (pass && (flags & ECF_MALLOC))
 | 
      
         | 3150 |  |  |         {
 | 
      
         | 3151 |  |  |           rtx temp = gen_reg_rtx (GET_MODE (valreg));
 | 
      
         | 3152 |  |  |           rtx last, insns;
 | 
      
         | 3153 |  |  |  
 | 
      
         | 3154 |  |  |           /* The return value from a malloc-like function is a pointer.  */
 | 
      
         | 3155 |  |  |           if (TREE_CODE (rettype) == POINTER_TYPE)
 | 
      
         | 3156 |  |  |             mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
 | 
      
         | 3157 |  |  |  
 | 
      
         | 3158 |  |  |           emit_move_insn (temp, valreg);
 | 
      
         | 3159 |  |  |  
 | 
      
         | 3160 |  |  |           /* The return value from a malloc-like function can not alias
 | 
      
         | 3161 |  |  |              anything else.  */
 | 
      
         | 3162 |  |  |           last = get_last_insn ();
 | 
      
         | 3163 |  |  |           add_reg_note (last, REG_NOALIAS, temp);
 | 
      
         | 3164 |  |  |  
 | 
      
         | 3165 |  |  |           /* Write out the sequence.  */
 | 
      
         | 3166 |  |  |           insns = get_insns ();
 | 
      
         | 3167 |  |  |           end_sequence ();
 | 
      
         | 3168 |  |  |           emit_insn (insns);
 | 
      
         | 3169 |  |  |           valreg = temp;
 | 
      
         | 3170 |  |  |         }
 | 
      
         | 3171 |  |  |  
 | 
      
         | 3172 |  |  |       /* For calls to `setjmp', etc., inform
 | 
      
         | 3173 |  |  |          function.c:setjmp_warnings that it should complain if
 | 
      
         | 3174 |  |  |          nonvolatile values are live.  For functions that cannot
 | 
      
         | 3175 |  |  |          return, inform flow that control does not fall through.  */
 | 
      
         | 3176 |  |  |  
 | 
      
         | 3177 |  |  |       if ((flags & ECF_NORETURN) || pass == 0)
 | 
      
         | 3178 |  |  |         {
 | 
      
         | 3179 |  |  |           /* The barrier must be emitted
 | 
      
         | 3180 |  |  |              immediately after the CALL_INSN.  Some ports emit more
 | 
      
         | 3181 |  |  |              than just a CALL_INSN above, so we must search for it here.  */
 | 
      
         | 3182 |  |  |  
 | 
      
         | 3183 |  |  |           rtx last = get_last_insn ();
 | 
      
         | 3184 |  |  |           while (!CALL_P (last))
 | 
      
         | 3185 |  |  |             {
 | 
      
         | 3186 |  |  |               last = PREV_INSN (last);
 | 
      
         | 3187 |  |  |               /* There was no CALL_INSN?  */
 | 
      
         | 3188 |  |  |               gcc_assert (last != before_call);
 | 
      
         | 3189 |  |  |             }
 | 
      
         | 3190 |  |  |  
 | 
      
         | 3191 |  |  |           emit_barrier_after (last);
 | 
      
         | 3192 |  |  |  
 | 
      
         | 3193 |  |  |           /* Stack adjustments after a noreturn call are dead code.
 | 
      
         | 3194 |  |  |              However when NO_DEFER_POP is in effect, we must preserve
 | 
      
         | 3195 |  |  |              stack_pointer_delta.  */
 | 
      
         | 3196 |  |  |           if (inhibit_defer_pop == 0)
 | 
      
         | 3197 |  |  |             {
 | 
      
         | 3198 |  |  |               stack_pointer_delta = old_stack_allocated;
 | 
      
         | 3199 |  |  |               pending_stack_adjust = 0;
 | 
      
         | 3200 |  |  |             }
 | 
      
         | 3201 |  |  |         }
 | 
      
         | 3202 |  |  |  
 | 
      
         | 3203 |  |  |       /* If value type not void, return an rtx for the value.  */
 | 
      
         | 3204 |  |  |  
 | 
      
         | 3205 |  |  |       if (TYPE_MODE (rettype) == VOIDmode
 | 
      
         | 3206 |  |  |           || ignore)
 | 
      
         | 3207 |  |  |         target = const0_rtx;
 | 
      
         | 3208 |  |  |       else if (structure_value_addr)
 | 
      
         | 3209 |  |  |         {
 | 
      
         | 3210 |  |  |           if (target == 0 || !MEM_P (target))
 | 
      
         | 3211 |  |  |             {
 | 
      
         | 3212 |  |  |               target
 | 
      
         | 3213 |  |  |                 = gen_rtx_MEM (TYPE_MODE (rettype),
 | 
      
         | 3214 |  |  |                                memory_address (TYPE_MODE (rettype),
 | 
      
         | 3215 |  |  |                                                structure_value_addr));
 | 
      
         | 3216 |  |  |               set_mem_attributes (target, rettype, 1);
 | 
      
         | 3217 |  |  |             }
 | 
      
         | 3218 |  |  |         }
 | 
      
         | 3219 |  |  |       else if (pcc_struct_value)
 | 
      
         | 3220 |  |  |         {
 | 
      
         | 3221 |  |  |           /* This is the special C++ case where we need to
 | 
      
         | 3222 |  |  |              know what the true target was.  We take care to
 | 
      
         | 3223 |  |  |              never use this value more than once in one expression.  */
 | 
      
         | 3224 |  |  |           target = gen_rtx_MEM (TYPE_MODE (rettype),
 | 
      
         | 3225 |  |  |                                 copy_to_reg (valreg));
 | 
      
         | 3226 |  |  |           set_mem_attributes (target, rettype, 1);
 | 
      
         | 3227 |  |  |         }
 | 
      
         | 3228 |  |  |       /* Handle calls that return values in multiple non-contiguous locations.
 | 
      
         | 3229 |  |  |          The Irix 6 ABI has examples of this.  */
 | 
      
         | 3230 |  |  |       else if (GET_CODE (valreg) == PARALLEL)
 | 
      
         | 3231 |  |  |         {
 | 
      
         | 3232 |  |  |           if (target == 0)
 | 
      
         | 3233 |  |  |             {
 | 
      
         | 3234 |  |  |               /* This will only be assigned once, so it can be readonly.  */
 | 
      
         | 3235 |  |  |               tree nt = build_qualified_type (rettype,
 | 
      
         | 3236 |  |  |                                               (TYPE_QUALS (rettype)
 | 
      
         | 3237 |  |  |                                                | TYPE_QUAL_CONST));
 | 
      
         | 3238 |  |  |  
 | 
      
         | 3239 |  |  |               target = assign_temp (nt, 0, 1, 1);
 | 
      
         | 3240 |  |  |             }
 | 
      
         | 3241 |  |  |  
 | 
      
         | 3242 |  |  |           if (! rtx_equal_p (target, valreg))
 | 
      
         | 3243 |  |  |             emit_group_store (target, valreg, rettype,
 | 
      
         | 3244 |  |  |                               int_size_in_bytes (rettype));
 | 
      
         | 3245 |  |  |  
 | 
      
         | 3246 |  |  |           /* We can not support sibling calls for this case.  */
 | 
      
         | 3247 |  |  |           sibcall_failure = 1;
 | 
      
         | 3248 |  |  |         }
 | 
      
         | 3249 |  |  |       else if (target
 | 
      
         | 3250 |  |  |                && GET_MODE (target) == TYPE_MODE (rettype)
 | 
      
         | 3251 |  |  |                && GET_MODE (target) == GET_MODE (valreg))
 | 
      
         | 3252 |  |  |         {
 | 
      
         | 3253 |  |  |           bool may_overlap = false;
 | 
      
         | 3254 |  |  |  
 | 
      
         | 3255 |  |  |           /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
 | 
      
         | 3256 |  |  |              reg to a plain register.  */
 | 
      
         | 3257 |  |  |           if (!REG_P (target) || HARD_REGISTER_P (target))
 | 
      
         | 3258 |  |  |             valreg = avoid_likely_spilled_reg (valreg);
 | 
      
         | 3259 |  |  |  
 | 
      
         | 3260 |  |  |           /* If TARGET is a MEM in the argument area, and we have
 | 
      
         | 3261 |  |  |              saved part of the argument area, then we can't store
 | 
      
         | 3262 |  |  |              directly into TARGET as it may get overwritten when we
 | 
      
         | 3263 |  |  |              restore the argument save area below.  Don't work too
 | 
      
         | 3264 |  |  |              hard though and simply force TARGET to a register if it
 | 
      
         | 3265 |  |  |              is a MEM; the optimizer is quite likely to sort it out.  */
 | 
      
         | 3266 |  |  |           if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
 | 
      
         | 3267 |  |  |             for (i = 0; i < num_actuals; i++)
 | 
      
         | 3268 |  |  |               if (args[i].save_area)
 | 
      
         | 3269 |  |  |                 {
 | 
      
         | 3270 |  |  |                   may_overlap = true;
 | 
      
         | 3271 |  |  |                   break;
 | 
      
         | 3272 |  |  |                 }
 | 
      
         | 3273 |  |  |  
 | 
      
         | 3274 |  |  |           if (may_overlap)
 | 
      
         | 3275 |  |  |             target = copy_to_reg (valreg);
 | 
      
         | 3276 |  |  |           else
 | 
      
         | 3277 |  |  |             {
 | 
      
         | 3278 |  |  |               /* TARGET and VALREG cannot be equal at this point
 | 
      
         | 3279 |  |  |                  because the latter would not have
 | 
      
         | 3280 |  |  |                  REG_FUNCTION_VALUE_P true, while the former would if
 | 
      
         | 3281 |  |  |                  it were referring to the same register.
 | 
      
         | 3282 |  |  |  
 | 
      
         | 3283 |  |  |                  If they refer to the same register, this move will be
 | 
      
         | 3284 |  |  |                  a no-op, except when function inlining is being
 | 
      
         | 3285 |  |  |                  done.  */
 | 
      
         | 3286 |  |  |               emit_move_insn (target, valreg);
 | 
      
         | 3287 |  |  |  
 | 
      
         | 3288 |  |  |               /* If we are setting a MEM, this code must be executed.
 | 
      
         | 3289 |  |  |                  Since it is emitted after the call insn, sibcall
 | 
      
         | 3290 |  |  |                  optimization cannot be performed in that case.  */
 | 
      
         | 3291 |  |  |               if (MEM_P (target))
 | 
      
         | 3292 |  |  |                 sibcall_failure = 1;
 | 
      
         | 3293 |  |  |             }
 | 
      
         | 3294 |  |  |         }
 | 
      
         | 3295 |  |  |       else if (TYPE_MODE (rettype) == BLKmode)
 | 
      
         | 3296 |  |  |         {
 | 
      
         | 3297 |  |  |           rtx val = valreg;
 | 
      
         | 3298 |  |  |           if (GET_MODE (val) != BLKmode)
 | 
      
         | 3299 |  |  |             val = avoid_likely_spilled_reg (val);
 | 
      
         | 3300 |  |  |           target = copy_blkmode_from_reg (target, val, rettype);
 | 
      
         | 3301 |  |  |  
 | 
      
         | 3302 |  |  |           /* We can not support sibling calls for this case.  */
 | 
      
         | 3303 |  |  |           sibcall_failure = 1;
 | 
      
         | 3304 |  |  |         }
 | 
      
         | 3305 |  |  |       else
 | 
      
         | 3306 |  |  |         target = copy_to_reg (avoid_likely_spilled_reg (valreg));
 | 
      
         | 3307 |  |  |  
 | 
      
         | 3308 |  |  |       /* If we promoted this return value, make the proper SUBREG.
 | 
      
         | 3309 |  |  |          TARGET might be const0_rtx here, so be careful.  */
 | 
      
         | 3310 |  |  |       if (REG_P (target)
 | 
      
         | 3311 |  |  |           && TYPE_MODE (rettype) != BLKmode
 | 
      
         | 3312 |  |  |           && GET_MODE (target) != TYPE_MODE (rettype))
 | 
      
         | 3313 |  |  |         {
 | 
      
         | 3314 |  |  |           tree type = rettype;
 | 
      
         | 3315 |  |  |           int unsignedp = TYPE_UNSIGNED (type);
 | 
      
         | 3316 |  |  |           int offset = 0;
 | 
      
         | 3317 |  |  |           enum machine_mode pmode;
 | 
      
         | 3318 |  |  |  
 | 
      
         | 3319 |  |  |           /* Ensure we promote as expected, and get the new unsignedness.  */
 | 
      
         | 3320 |  |  |           pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
 | 
      
         | 3321 |  |  |                                          funtype, 1);
 | 
      
         | 3322 |  |  |           gcc_assert (GET_MODE (target) == pmode);
 | 
      
         | 3323 |  |  |  
 | 
      
         | 3324 |  |  |           if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
 | 
      
         | 3325 |  |  |               && (GET_MODE_SIZE (GET_MODE (target))
 | 
      
         | 3326 |  |  |                   > GET_MODE_SIZE (TYPE_MODE (type))))
 | 
      
         | 3327 |  |  |             {
 | 
      
         | 3328 |  |  |               offset = GET_MODE_SIZE (GET_MODE (target))
 | 
      
         | 3329 |  |  |                 - GET_MODE_SIZE (TYPE_MODE (type));
 | 
      
         | 3330 |  |  |               if (! BYTES_BIG_ENDIAN)
 | 
      
         | 3331 |  |  |                 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
 | 
      
         | 3332 |  |  |               else if (! WORDS_BIG_ENDIAN)
 | 
      
         | 3333 |  |  |                 offset %= UNITS_PER_WORD;
 | 
      
         | 3334 |  |  |             }
 | 
      
         | 3335 |  |  |  
 | 
      
         | 3336 |  |  |           target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
 | 
      
         | 3337 |  |  |           SUBREG_PROMOTED_VAR_P (target) = 1;
 | 
      
         | 3338 |  |  |           SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
 | 
      
         | 3339 |  |  |         }
 | 
      
         | 3340 |  |  |  
 | 
      
         | 3341 |  |  |       /* If size of args is variable or this was a constructor call for a stack
 | 
      
         | 3342 |  |  |          argument, restore saved stack-pointer value.  */
 | 
      
         | 3343 |  |  |  
 | 
      
         | 3344 |  |  |       if (old_stack_level)
 | 
      
         | 3345 |  |  |         {
 | 
      
         | 3346 |  |  |           rtx prev = get_last_insn ();
 | 
      
         | 3347 |  |  |  
 | 
      
         | 3348 |  |  |           emit_stack_restore (SAVE_BLOCK, old_stack_level);
 | 
      
         | 3349 |  |  |           stack_pointer_delta = old_stack_pointer_delta;
 | 
      
         | 3350 |  |  |  
 | 
      
         | 3351 |  |  |           fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
 | 
      
         | 3352 |  |  |  
 | 
      
         | 3353 |  |  |           pending_stack_adjust = old_pending_adj;
 | 
      
         | 3354 |  |  |           old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
 | 
      
         | 3355 |  |  |           stack_arg_under_construction = old_stack_arg_under_construction;
 | 
      
         | 3356 |  |  |           highest_outgoing_arg_in_use = initial_highest_arg_in_use;
 | 
      
         | 3357 |  |  |           stack_usage_map = initial_stack_usage_map;
 | 
      
         | 3358 |  |  |           sibcall_failure = 1;
 | 
      
         | 3359 |  |  |         }
 | 
      
         | 3360 |  |  |       else if (ACCUMULATE_OUTGOING_ARGS && pass)
 | 
      
         | 3361 |  |  |         {
 | 
      
         | 3362 |  |  | #ifdef REG_PARM_STACK_SPACE
 | 
      
         | 3363 |  |  |           if (save_area)
 | 
      
         | 3364 |  |  |             restore_fixed_argument_area (save_area, argblock,
 | 
      
         | 3365 |  |  |                                          high_to_save, low_to_save);
 | 
      
         | 3366 |  |  | #endif
 | 
      
         | 3367 |  |  |  
 | 
      
         | 3368 |  |  |           /* If we saved any argument areas, restore them.  */
 | 
      
         | 3369 |  |  |           for (i = 0; i < num_actuals; i++)
 | 
      
         | 3370 |  |  |             if (args[i].save_area)
 | 
      
         | 3371 |  |  |               {
 | 
      
         | 3372 |  |  |                 enum machine_mode save_mode = GET_MODE (args[i].save_area);
 | 
      
         | 3373 |  |  |                 rtx stack_area
 | 
      
         | 3374 |  |  |                   = gen_rtx_MEM (save_mode,
 | 
      
         | 3375 |  |  |                                  memory_address (save_mode,
 | 
      
         | 3376 |  |  |                                                  XEXP (args[i].stack_slot, 0)));
 | 
      
         | 3377 |  |  |  
 | 
      
         | 3378 |  |  |                 if (save_mode != BLKmode)
 | 
      
         | 3379 |  |  |                   emit_move_insn (stack_area, args[i].save_area);
 | 
      
         | 3380 |  |  |                 else
 | 
      
         | 3381 |  |  |                   emit_block_move (stack_area, args[i].save_area,
 | 
      
         | 3382 |  |  |                                    GEN_INT (args[i].locate.size.constant),
 | 
      
         | 3383 |  |  |                                    BLOCK_OP_CALL_PARM);
 | 
      
         | 3384 |  |  |               }
 | 
      
         | 3385 |  |  |  
 | 
      
         | 3386 |  |  |           highest_outgoing_arg_in_use = initial_highest_arg_in_use;
 | 
      
         | 3387 |  |  |           stack_usage_map = initial_stack_usage_map;
 | 
      
         | 3388 |  |  |         }
 | 
      
         | 3389 |  |  |  
 | 
      
         | 3390 |  |  |       /* If this was alloca, record the new stack level for nonlocal gotos.
 | 
      
         | 3391 |  |  |          Check for the handler slots since we might not have a save area
 | 
      
         | 3392 |  |  |          for non-local gotos.  */
 | 
      
         | 3393 |  |  |  
 | 
      
         | 3394 |  |  |       if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
 | 
      
         | 3395 |  |  |         update_nonlocal_goto_save_area ();
 | 
      
         | 3396 |  |  |  
 | 
      
         | 3397 |  |  |       /* Free up storage we no longer need.  */
 | 
      
         | 3398 |  |  |       for (i = 0; i < num_actuals; ++i)
 | 
      
         | 3399 |  |  |         free (args[i].aligned_regs);
 | 
      
         | 3400 |  |  |  
 | 
      
         | 3401 |  |  |       insns = get_insns ();
 | 
      
         | 3402 |  |  |       end_sequence ();
 | 
      
         | 3403 |  |  |  
 | 
      
         | 3404 |  |  |       if (pass == 0)
 | 
      
         | 3405 |  |  |         {
 | 
      
         | 3406 |  |  |           tail_call_insns = insns;
 | 
      
         | 3407 |  |  |  
 | 
      
         | 3408 |  |  |           /* Restore the pending stack adjustment now that we have
 | 
      
         | 3409 |  |  |              finished generating the sibling call sequence.  */
 | 
      
         | 3410 |  |  |  
 | 
      
         | 3411 |  |  |           pending_stack_adjust = save_pending_stack_adjust;
 | 
      
         | 3412 |  |  |           stack_pointer_delta = save_stack_pointer_delta;
 | 
      
         | 3413 |  |  |  
 | 
      
         | 3414 |  |  |           /* Prepare arg structure for next iteration.  */
 | 
      
         | 3415 |  |  |           for (i = 0; i < num_actuals; i++)
 | 
      
         | 3416 |  |  |             {
 | 
      
         | 3417 |  |  |               args[i].value = 0;
 | 
      
         | 3418 |  |  |               args[i].aligned_regs = 0;
 | 
      
         | 3419 |  |  |               args[i].stack = 0;
 | 
      
         | 3420 |  |  |             }
 | 
      
         | 3421 |  |  |  
 | 
      
         | 3422 |  |  |           sbitmap_free (stored_args_map);
 | 
      
         | 3423 |  |  |           internal_arg_pointer_exp_state.scan_start = NULL_RTX;
 | 
      
         | 3424 |  |  |           VEC_free (rtx, heap, internal_arg_pointer_exp_state.cache);
 | 
      
         | 3425 |  |  |         }
 | 
      
         | 3426 |  |  |       else
 | 
      
         | 3427 |  |  |         {
 | 
      
         | 3428 |  |  |           normal_call_insns = insns;
 | 
      
         | 3429 |  |  |  
 | 
      
         | 3430 |  |  |           /* Verify that we've deallocated all the stack we used.  */
 | 
      
         | 3431 |  |  |           gcc_assert ((flags & ECF_NORETURN)
 | 
      
         | 3432 |  |  |                       || (old_stack_allocated
 | 
      
         | 3433 |  |  |                           == stack_pointer_delta - pending_stack_adjust));
 | 
      
         | 3434 |  |  |         }
 | 
      
         | 3435 |  |  |  
 | 
      
         | 3436 |  |  |       /* If something prevents making this a sibling call,
 | 
      
         | 3437 |  |  |          zero out the sequence.  */
 | 
      
         | 3438 |  |  |       if (sibcall_failure)
 | 
      
         | 3439 |  |  |         tail_call_insns = NULL_RTX;
 | 
      
         | 3440 |  |  |       else
 | 
      
         | 3441 |  |  |         break;
 | 
      
         | 3442 |  |  |     }
 | 
      
         | 3443 |  |  |  
 | 
      
         | 3444 |  |  |   /* If tail call production succeeded, we need to remove REG_EQUIV notes on
 | 
      
         | 3445 |  |  |      arguments too, as argument area is now clobbered by the call.  */
 | 
      
         | 3446 |  |  |   if (tail_call_insns)
 | 
      
         | 3447 |  |  |     {
 | 
      
         | 3448 |  |  |       emit_insn (tail_call_insns);
 | 
      
         | 3449 |  |  |       crtl->tail_call_emit = true;
 | 
      
         | 3450 |  |  |     }
 | 
      
         | 3451 |  |  |   else
 | 
      
         | 3452 |  |  |     emit_insn (normal_call_insns);
 | 
      
         | 3453 |  |  |  
 | 
      
         | 3454 |  |  |   currently_expanding_call--;
 | 
      
         | 3455 |  |  |  
 | 
      
         | 3456 |  |  |   free (stack_usage_map_buf);
 | 
      
         | 3457 |  |  |  
 | 
      
         | 3458 |  |  |   return target;
 | 
      
         | 3459 |  |  | }
 | 
      
         | 3460 |  |  |  
 | 
      
         | 3461 |  |  | /* A sibling call sequence invalidates any REG_EQUIV notes made for
 | 
      
         | 3462 |  |  |    this function's incoming arguments.
 | 
      
         | 3463 |  |  |  
 | 
      
         | 3464 |  |  |    At the start of RTL generation we know the only REG_EQUIV notes
 | 
      
         | 3465 |  |  |    in the rtl chain are those for incoming arguments, so we can look
 | 
      
         | 3466 |  |  |    for REG_EQUIV notes between the start of the function and the
 | 
      
         | 3467 |  |  |    NOTE_INSN_FUNCTION_BEG.
 | 
      
         | 3468 |  |  |  
 | 
      
         | 3469 |  |  |    This is (slight) overkill.  We could keep track of the highest
 | 
      
         | 3470 |  |  |    argument we clobber and be more selective in removing notes, but it
 | 
      
         | 3471 |  |  |    does not seem to be worth the effort.  */
 | 
      
         | 3472 |  |  |  
 | 
      
         | 3473 |  |  | void
 | 
      
         | 3474 |  |  | fixup_tail_calls (void)
 | 
      
         | 3475 |  |  | {
 | 
      
         | 3476 |  |  |   rtx insn;
 | 
      
         | 3477 |  |  |  
 | 
      
         | 3478 |  |  |   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
 | 
      
         | 3479 |  |  |     {
 | 
      
         | 3480 |  |  |       rtx note;
 | 
      
         | 3481 |  |  |  
 | 
      
         | 3482 |  |  |       /* There are never REG_EQUIV notes for the incoming arguments
 | 
      
         | 3483 |  |  |          after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it.  */
 | 
      
         | 3484 |  |  |       if (NOTE_P (insn)
 | 
      
         | 3485 |  |  |           && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
 | 
      
         | 3486 |  |  |         break;
 | 
      
         | 3487 |  |  |  
 | 
      
         | 3488 |  |  |       note = find_reg_note (insn, REG_EQUIV, 0);
 | 
      
         | 3489 |  |  |       if (note)
 | 
      
         | 3490 |  |  |         remove_note (insn, note);
 | 
      
         | 3491 |  |  |       note = find_reg_note (insn, REG_EQUIV, 0);
 | 
      
         | 3492 |  |  |       gcc_assert (!note);
 | 
      
         | 3493 |  |  |     }
 | 
      
         | 3494 |  |  | }
 | 
      
         | 3495 |  |  |  
 | 
      
         | 3496 |  |  | /* Traverse a list of TYPES and expand all complex types into their
 | 
      
         | 3497 |  |  |    components.  */
 | 
      
         | 3498 |  |  | static tree
 | 
      
         | 3499 |  |  | split_complex_types (tree types)
 | 
      
         | 3500 |  |  | {
 | 
      
         | 3501 |  |  |   tree p;
 | 
      
         | 3502 |  |  |  
 | 
      
         | 3503 |  |  |   /* Before allocating memory, check for the common case of no complex.  */
 | 
      
         | 3504 |  |  |   for (p = types; p; p = TREE_CHAIN (p))
 | 
      
         | 3505 |  |  |     {
 | 
      
         | 3506 |  |  |       tree type = TREE_VALUE (p);
 | 
      
         | 3507 |  |  |       if (TREE_CODE (type) == COMPLEX_TYPE
 | 
      
         | 3508 |  |  |           && targetm.calls.split_complex_arg (type))
 | 
      
         | 3509 |  |  |         goto found;
 | 
      
         | 3510 |  |  |     }
 | 
      
         | 3511 |  |  |   return types;
 | 
      
         | 3512 |  |  |  
 | 
      
         | 3513 |  |  |  found:
 | 
      
         | 3514 |  |  |   types = copy_list (types);
 | 
      
         | 3515 |  |  |  
 | 
      
         | 3516 |  |  |   for (p = types; p; p = TREE_CHAIN (p))
 | 
      
         | 3517 |  |  |     {
 | 
      
         | 3518 |  |  |       tree complex_type = TREE_VALUE (p);
 | 
      
         | 3519 |  |  |  
 | 
      
         | 3520 |  |  |       if (TREE_CODE (complex_type) == COMPLEX_TYPE
 | 
      
         | 3521 |  |  |           && targetm.calls.split_complex_arg (complex_type))
 | 
      
         | 3522 |  |  |         {
 | 
      
         | 3523 |  |  |           tree next, imag;
 | 
      
         | 3524 |  |  |  
 | 
      
         | 3525 |  |  |           /* Rewrite complex type with component type.  */
 | 
      
         | 3526 |  |  |           TREE_VALUE (p) = TREE_TYPE (complex_type);
 | 
      
         | 3527 |  |  |           next = TREE_CHAIN (p);
 | 
      
         | 3528 |  |  |  
 | 
      
         | 3529 |  |  |           /* Add another component type for the imaginary part.  */
 | 
      
         | 3530 |  |  |           imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
 | 
      
         | 3531 |  |  |           TREE_CHAIN (p) = imag;
 | 
      
         | 3532 |  |  |           TREE_CHAIN (imag) = next;
 | 
      
         | 3533 |  |  |  
 | 
      
         | 3534 |  |  |           /* Skip the newly created node.  */
 | 
      
         | 3535 |  |  |           p = TREE_CHAIN (p);
 | 
      
         | 3536 |  |  |         }
 | 
      
         | 3537 |  |  |     }
 | 
      
         | 3538 |  |  |  
 | 
      
         | 3539 |  |  |   return types;
 | 
      
         | 3540 |  |  | }
 | 
      
         | 3541 |  |  |  
 | 
      
         | 3542 |  |  | /* Output a library call to function FUN (a SYMBOL_REF rtx).
 | 
      
         | 3543 |  |  |    The RETVAL parameter specifies whether return value needs to be saved, other
 | 
      
         | 3544 |  |  |    parameters are documented in the emit_library_call function below.  */
 | 
      
         | 3545 |  |  |  
 | 
      
         | 3546 |  |  | static rtx
 | 
      
         | 3547 |  |  | emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
 | 
      
         | 3548 |  |  |                            enum libcall_type fn_type,
 | 
      
         | 3549 |  |  |                            enum machine_mode outmode, int nargs, va_list p)
 | 
      
         | 3550 |  |  | {
 | 
      
         | 3551 |  |  |   /* Total size in bytes of all the stack-parms scanned so far.  */
 | 
      
         | 3552 |  |  |   struct args_size args_size;
 | 
      
         | 3553 |  |  |   /* Size of arguments before any adjustments (such as rounding).  */
 | 
      
         | 3554 |  |  |   struct args_size original_args_size;
 | 
      
         | 3555 |  |  |   int argnum;
 | 
      
         | 3556 |  |  |   rtx fun;
 | 
      
         | 3557 |  |  |   /* Todo, choose the correct decl type of orgfun. Sadly this information
 | 
      
         | 3558 |  |  |      isn't present here, so we default to native calling abi here.  */
 | 
      
         | 3559 |  |  |   tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
 | 
      
         | 3560 |  |  |   tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
 | 
      
         | 3561 |  |  |   int inc;
 | 
      
         | 3562 |  |  |   int count;
 | 
      
         | 3563 |  |  |   rtx argblock = 0;
 | 
      
         | 3564 |  |  |   CUMULATIVE_ARGS args_so_far_v;
 | 
      
         | 3565 |  |  |   cumulative_args_t args_so_far;
 | 
      
         | 3566 |  |  |   struct arg
 | 
      
         | 3567 |  |  |   {
 | 
      
         | 3568 |  |  |     rtx value;
 | 
      
         | 3569 |  |  |     enum machine_mode mode;
 | 
      
         | 3570 |  |  |     rtx reg;
 | 
      
         | 3571 |  |  |     int partial;
 | 
      
         | 3572 |  |  |     struct locate_and_pad_arg_data locate;
 | 
      
         | 3573 |  |  |     rtx save_area;
 | 
      
         | 3574 |  |  |   };
 | 
      
         | 3575 |  |  |   struct arg *argvec;
 | 
      
         | 3576 |  |  |   int old_inhibit_defer_pop = inhibit_defer_pop;
 | 
      
         | 3577 |  |  |   rtx call_fusage = 0;
 | 
      
         | 3578 |  |  |   rtx mem_value = 0;
 | 
      
         | 3579 |  |  |   rtx valreg;
 | 
      
         | 3580 |  |  |   int pcc_struct_value = 0;
 | 
      
         | 3581 |  |  |   int struct_value_size = 0;
 | 
      
         | 3582 |  |  |   int flags;
 | 
      
         | 3583 |  |  |   int reg_parm_stack_space = 0;
 | 
      
         | 3584 |  |  |   int needed;
 | 
      
         | 3585 |  |  |   rtx before_call;
 | 
      
         | 3586 |  |  |   tree tfom;                    /* type_for_mode (outmode, 0) */
 | 
      
         | 3587 |  |  |  
 | 
      
         | 3588 |  |  | #ifdef REG_PARM_STACK_SPACE
 | 
      
         | 3589 |  |  |   /* Define the boundary of the register parm stack space that needs to be
 | 
      
         | 3590 |  |  |      save, if any.  */
 | 
      
         | 3591 |  |  |   int low_to_save = 0, high_to_save = 0;
 | 
      
         | 3592 |  |  |   rtx save_area = 0;            /* Place that it is saved.  */
 | 
      
         | 3593 |  |  | #endif
 | 
      
         | 3594 |  |  |  
 | 
      
         | 3595 |  |  |   /* Size of the stack reserved for parameter registers.  */
 | 
      
         | 3596 |  |  |   int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
 | 
      
         | 3597 |  |  |   char *initial_stack_usage_map = stack_usage_map;
 | 
      
         | 3598 |  |  |   char *stack_usage_map_buf = NULL;
 | 
      
         | 3599 |  |  |  
 | 
      
         | 3600 |  |  |   rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
 | 
      
         | 3601 |  |  |  
 | 
      
         | 3602 |  |  | #ifdef REG_PARM_STACK_SPACE
 | 
      
         | 3603 |  |  |   reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
 | 
      
         | 3604 |  |  | #endif
 | 
      
         | 3605 |  |  |  
 | 
      
         | 3606 |  |  |   /* By default, library functions can not throw.  */
 | 
      
         | 3607 |  |  |   flags = ECF_NOTHROW;
 | 
      
         | 3608 |  |  |  
 | 
      
         | 3609 |  |  |   switch (fn_type)
 | 
      
         | 3610 |  |  |     {
 | 
      
         | 3611 |  |  |     case LCT_NORMAL:
 | 
      
         | 3612 |  |  |       break;
 | 
      
         | 3613 |  |  |     case LCT_CONST:
 | 
      
         | 3614 |  |  |       flags |= ECF_CONST;
 | 
      
         | 3615 |  |  |       break;
 | 
      
         | 3616 |  |  |     case LCT_PURE:
 | 
      
         | 3617 |  |  |       flags |= ECF_PURE;
 | 
      
         | 3618 |  |  |       break;
 | 
      
         | 3619 |  |  |     case LCT_NORETURN:
 | 
      
         | 3620 |  |  |       flags |= ECF_NORETURN;
 | 
      
         | 3621 |  |  |       break;
 | 
      
         | 3622 |  |  |     case LCT_THROW:
 | 
      
         | 3623 |  |  |       flags = ECF_NORETURN;
 | 
      
         | 3624 |  |  |       break;
 | 
      
         | 3625 |  |  |     case LCT_RETURNS_TWICE:
 | 
      
         | 3626 |  |  |       flags = ECF_RETURNS_TWICE;
 | 
      
         | 3627 |  |  |       break;
 | 
      
         | 3628 |  |  |     }
 | 
      
         | 3629 |  |  |   fun = orgfun;
 | 
      
         | 3630 |  |  |  
 | 
      
         | 3631 |  |  |   /* Ensure current function's preferred stack boundary is at least
 | 
      
         | 3632 |  |  |      what we need.  */
 | 
      
         | 3633 |  |  |   if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
 | 
      
         | 3634 |  |  |     crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
 | 
      
         | 3635 |  |  |  
 | 
      
         | 3636 |  |  |   /* If this kind of value comes back in memory,
 | 
      
         | 3637 |  |  |      decide where in memory it should come back.  */
 | 
      
         | 3638 |  |  |   if (outmode != VOIDmode)
 | 
      
         | 3639 |  |  |     {
 | 
      
         | 3640 |  |  |       tfom = lang_hooks.types.type_for_mode (outmode, 0);
 | 
      
         | 3641 |  |  |       if (aggregate_value_p (tfom, 0))
 | 
      
         | 3642 |  |  |         {
 | 
      
         | 3643 |  |  | #ifdef PCC_STATIC_STRUCT_RETURN
 | 
      
         | 3644 |  |  |           rtx pointer_reg
 | 
      
         | 3645 |  |  |             = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
 | 
      
         | 3646 |  |  |           mem_value = gen_rtx_MEM (outmode, pointer_reg);
 | 
      
         | 3647 |  |  |           pcc_struct_value = 1;
 | 
      
         | 3648 |  |  |           if (value == 0)
 | 
      
         | 3649 |  |  |             value = gen_reg_rtx (outmode);
 | 
      
         | 3650 |  |  | #else /* not PCC_STATIC_STRUCT_RETURN */
 | 
      
         | 3651 |  |  |           struct_value_size = GET_MODE_SIZE (outmode);
 | 
      
         | 3652 |  |  |           if (value != 0 && MEM_P (value))
 | 
      
         | 3653 |  |  |             mem_value = value;
 | 
      
         | 3654 |  |  |           else
 | 
      
         | 3655 |  |  |             mem_value = assign_temp (tfom, 0, 1, 1);
 | 
      
         | 3656 |  |  | #endif
 | 
      
         | 3657 |  |  |           /* This call returns a big structure.  */
 | 
      
         | 3658 |  |  |           flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
 | 
      
         | 3659 |  |  |         }
 | 
      
         | 3660 |  |  |     }
 | 
      
         | 3661 |  |  |   else
 | 
      
         | 3662 |  |  |     tfom = void_type_node;
 | 
      
         | 3663 |  |  |  
 | 
      
         | 3664 |  |  |   /* ??? Unfinished: must pass the memory address as an argument.  */
 | 
      
         | 3665 |  |  |  
 | 
      
         | 3666 |  |  |   /* Copy all the libcall-arguments out of the varargs data
 | 
      
         | 3667 |  |  |      and into a vector ARGVEC.
 | 
      
         | 3668 |  |  |  
 | 
      
         | 3669 |  |  |      Compute how to pass each argument.  We only support a very small subset
 | 
      
         | 3670 |  |  |      of the full argument passing conventions to limit complexity here since
 | 
      
         | 3671 |  |  |      library functions shouldn't have many args.  */
 | 
      
         | 3672 |  |  |  
 | 
      
         | 3673 |  |  |   argvec = XALLOCAVEC (struct arg, nargs + 1);
 | 
      
         | 3674 |  |  |   memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
 | 
      
         | 3675 |  |  |  
 | 
      
         | 3676 |  |  | #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
 | 
      
         | 3677 |  |  |   INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
 | 
      
         | 3678 |  |  | #else
 | 
      
         | 3679 |  |  |   INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
 | 
      
         | 3680 |  |  | #endif
 | 
      
         | 3681 |  |  |   args_so_far = pack_cumulative_args (&args_so_far_v);
 | 
      
         | 3682 |  |  |  
 | 
      
         | 3683 |  |  |   args_size.constant = 0;
 | 
      
         | 3684 |  |  |   args_size.var = 0;
 | 
      
         | 3685 |  |  |  
 | 
      
         | 3686 |  |  |   count = 0;
 | 
      
         | 3687 |  |  |  
 | 
      
         | 3688 |  |  |   push_temp_slots ();
 | 
      
         | 3689 |  |  |  
 | 
      
         | 3690 |  |  |   /* If there's a structure value address to be passed,
 | 
      
         | 3691 |  |  |      either pass it in the special place, or pass it as an extra argument.  */
 | 
      
         | 3692 |  |  |   if (mem_value && struct_value == 0 && ! pcc_struct_value)
 | 
      
         | 3693 |  |  |     {
 | 
      
         | 3694 |  |  |       rtx addr = XEXP (mem_value, 0);
 | 
      
         | 3695 |  |  |  
 | 
      
         | 3696 |  |  |       nargs++;
 | 
      
         | 3697 |  |  |  
 | 
      
         | 3698 |  |  |       /* Make sure it is a reasonable operand for a move or push insn.  */
 | 
      
         | 3699 |  |  |       if (!REG_P (addr) && !MEM_P (addr)
 | 
      
         | 3700 |  |  |           && !(CONSTANT_P (addr)
 | 
      
         | 3701 |  |  |                && targetm.legitimate_constant_p (Pmode, addr)))
 | 
      
         | 3702 |  |  |         addr = force_operand (addr, NULL_RTX);
 | 
      
         | 3703 |  |  |  
 | 
      
         | 3704 |  |  |       argvec[count].value = addr;
 | 
      
         | 3705 |  |  |       argvec[count].mode = Pmode;
 | 
      
         | 3706 |  |  |       argvec[count].partial = 0;
 | 
      
         | 3707 |  |  |  
 | 
      
         | 3708 |  |  |       argvec[count].reg = targetm.calls.function_arg (args_so_far,
 | 
      
         | 3709 |  |  |                                                       Pmode, NULL_TREE, true);
 | 
      
         | 3710 |  |  |       gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
 | 
      
         | 3711 |  |  |                                                    NULL_TREE, 1) == 0);
 | 
      
         | 3712 |  |  |  
 | 
      
         | 3713 |  |  |       locate_and_pad_parm (Pmode, NULL_TREE,
 | 
      
         | 3714 |  |  | #ifdef STACK_PARMS_IN_REG_PARM_AREA
 | 
      
         | 3715 |  |  |                            1,
 | 
      
         | 3716 |  |  | #else
 | 
      
         | 3717 |  |  |                            argvec[count].reg != 0,
 | 
      
         | 3718 |  |  | #endif
 | 
      
         | 3719 |  |  |                            0, NULL_TREE, &args_size, &argvec[count].locate);
 | 
      
         | 3720 |  |  |  
 | 
      
         | 3721 |  |  |       if (argvec[count].reg == 0 || argvec[count].partial != 0
 | 
      
         | 3722 |  |  |           || reg_parm_stack_space > 0)
 | 
      
         | 3723 |  |  |         args_size.constant += argvec[count].locate.size.constant;
 | 
      
         | 3724 |  |  |  
 | 
      
         | 3725 |  |  |       targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
 | 
      
         | 3726 |  |  |  
 | 
      
         | 3727 |  |  |       count++;
 | 
      
         | 3728 |  |  |     }
 | 
      
         | 3729 |  |  |  
 | 
      
         | 3730 |  |  |   for (; count < nargs; count++)
 | 
      
         | 3731 |  |  |     {
 | 
      
         | 3732 |  |  |       rtx val = va_arg (p, rtx);
 | 
      
         | 3733 |  |  |       enum machine_mode mode = (enum machine_mode) va_arg (p, int);
 | 
      
         | 3734 |  |  |       int unsigned_p = 0;
 | 
      
         | 3735 |  |  |  
 | 
      
         | 3736 |  |  |       /* We cannot convert the arg value to the mode the library wants here;
 | 
      
         | 3737 |  |  |          must do it earlier where we know the signedness of the arg.  */
 | 
      
         | 3738 |  |  |       gcc_assert (mode != BLKmode
 | 
      
         | 3739 |  |  |                   && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
 | 
      
         | 3740 |  |  |  
 | 
      
         | 3741 |  |  |       /* Make sure it is a reasonable operand for a move or push insn.  */
 | 
      
         | 3742 |  |  |       if (!REG_P (val) && !MEM_P (val)
 | 
      
         | 3743 |  |  |           && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
 | 
      
         | 3744 |  |  |         val = force_operand (val, NULL_RTX);
 | 
      
         | 3745 |  |  |  
 | 
      
         | 3746 |  |  |       if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
 | 
      
         | 3747 |  |  |         {
 | 
      
         | 3748 |  |  |           rtx slot;
 | 
      
         | 3749 |  |  |           int must_copy
 | 
      
         | 3750 |  |  |             = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
 | 
      
         | 3751 |  |  |  
 | 
      
         | 3752 |  |  |           /* If this was a CONST function, it is now PURE since it now
 | 
      
         | 3753 |  |  |              reads memory.  */
 | 
      
         | 3754 |  |  |           if (flags & ECF_CONST)
 | 
      
         | 3755 |  |  |             {
 | 
      
         | 3756 |  |  |               flags &= ~ECF_CONST;
 | 
      
         | 3757 |  |  |               flags |= ECF_PURE;
 | 
      
         | 3758 |  |  |             }
 | 
      
         | 3759 |  |  |  
 | 
      
         | 3760 |  |  |           if (MEM_P (val) && !must_copy)
 | 
      
         | 3761 |  |  |             {
 | 
      
         | 3762 |  |  |               tree val_expr = MEM_EXPR (val);
 | 
      
         | 3763 |  |  |               if (val_expr)
 | 
      
         | 3764 |  |  |                 mark_addressable (val_expr);
 | 
      
         | 3765 |  |  |               slot = val;
 | 
      
         | 3766 |  |  |             }
 | 
      
         | 3767 |  |  |           else
 | 
      
         | 3768 |  |  |             {
 | 
      
         | 3769 |  |  |               slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
 | 
      
         | 3770 |  |  |                                   0, 1, 1);
 | 
      
         | 3771 |  |  |               emit_move_insn (slot, val);
 | 
      
         | 3772 |  |  |             }
 | 
      
         | 3773 |  |  |  
 | 
      
         | 3774 |  |  |           call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
 | 
      
         | 3775 |  |  |                                            gen_rtx_USE (VOIDmode, slot),
 | 
      
         | 3776 |  |  |                                            call_fusage);
 | 
      
         | 3777 |  |  |           if (must_copy)
 | 
      
         | 3778 |  |  |             call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
 | 
      
         | 3779 |  |  |                                              gen_rtx_CLOBBER (VOIDmode,
 | 
      
         | 3780 |  |  |                                                               slot),
 | 
      
         | 3781 |  |  |                                              call_fusage);
 | 
      
         | 3782 |  |  |  
 | 
      
         | 3783 |  |  |           mode = Pmode;
 | 
      
         | 3784 |  |  |           val = force_operand (XEXP (slot, 0), NULL_RTX);
 | 
      
         | 3785 |  |  |         }
 | 
      
         | 3786 |  |  |  
 | 
      
         | 3787 |  |  |       mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
 | 
      
         | 3788 |  |  |       argvec[count].mode = mode;
 | 
      
         | 3789 |  |  |       argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
 | 
      
         | 3790 |  |  |       argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
 | 
      
         | 3791 |  |  |                                                       NULL_TREE, true);
 | 
      
         | 3792 |  |  |  
 | 
      
         | 3793 |  |  |       argvec[count].partial
 | 
      
         | 3794 |  |  |         = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
 | 
      
         | 3795 |  |  |  
 | 
      
         | 3796 |  |  |       if (argvec[count].reg == 0
 | 
      
         | 3797 |  |  |           || argvec[count].partial != 0
 | 
      
         | 3798 |  |  |           || reg_parm_stack_space > 0)
 | 
      
         | 3799 |  |  |         {
 | 
      
         | 3800 |  |  |           locate_and_pad_parm (mode, NULL_TREE,
 | 
      
         | 3801 |  |  | #ifdef STACK_PARMS_IN_REG_PARM_AREA
 | 
      
         | 3802 |  |  |                                1,
 | 
      
         | 3803 |  |  | #else
 | 
      
         | 3804 |  |  |                                argvec[count].reg != 0,
 | 
      
         | 3805 |  |  | #endif
 | 
      
         | 3806 |  |  |                                argvec[count].partial,
 | 
      
         | 3807 |  |  |                                NULL_TREE, &args_size, &argvec[count].locate);
 | 
      
         | 3808 |  |  |           args_size.constant += argvec[count].locate.size.constant;
 | 
      
         | 3809 |  |  |           gcc_assert (!argvec[count].locate.size.var);
 | 
      
         | 3810 |  |  |         }
 | 
      
         | 3811 |  |  | #ifdef BLOCK_REG_PADDING
 | 
      
         | 3812 |  |  |       else
 | 
      
         | 3813 |  |  |         /* The argument is passed entirely in registers.  See at which
 | 
      
         | 3814 |  |  |            end it should be padded.  */
 | 
      
         | 3815 |  |  |         argvec[count].locate.where_pad =
 | 
      
         | 3816 |  |  |           BLOCK_REG_PADDING (mode, NULL_TREE,
 | 
      
         | 3817 |  |  |                              GET_MODE_SIZE (mode) <= UNITS_PER_WORD);
 | 
      
         | 3818 |  |  | #endif
 | 
      
         | 3819 |  |  |  
 | 
      
         | 3820 |  |  |       targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
 | 
      
         | 3821 |  |  |     }
 | 
      
         | 3822 |  |  |  
 | 
      
         | 3823 |  |  |   /* If this machine requires an external definition for library
 | 
      
         | 3824 |  |  |      functions, write one out.  */
 | 
      
         | 3825 |  |  |   assemble_external_libcall (fun);
 | 
      
         | 3826 |  |  |  
 | 
      
         | 3827 |  |  |   original_args_size = args_size;
 | 
      
         | 3828 |  |  |   args_size.constant = (((args_size.constant
 | 
      
         | 3829 |  |  |                           + stack_pointer_delta
 | 
      
         | 3830 |  |  |                           + STACK_BYTES - 1)
 | 
      
         | 3831 |  |  |                           / STACK_BYTES
 | 
      
         | 3832 |  |  |                           * STACK_BYTES)
 | 
      
         | 3833 |  |  |                          - stack_pointer_delta);
 | 
      
         | 3834 |  |  |  
 | 
      
         | 3835 |  |  |   args_size.constant = MAX (args_size.constant,
 | 
      
         | 3836 |  |  |                             reg_parm_stack_space);
 | 
      
         | 3837 |  |  |  
 | 
      
         | 3838 |  |  |   if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
 | 
      
         | 3839 |  |  |     args_size.constant -= reg_parm_stack_space;
 | 
      
         | 3840 |  |  |  
 | 
      
         | 3841 |  |  |   if (args_size.constant > crtl->outgoing_args_size)
 | 
      
         | 3842 |  |  |     crtl->outgoing_args_size = args_size.constant;
 | 
      
         | 3843 |  |  |  
 | 
      
         | 3844 |  |  |   if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
 | 
      
         | 3845 |  |  |     {
 | 
      
         | 3846 |  |  |       int pushed = args_size.constant + pending_stack_adjust;
 | 
      
         | 3847 |  |  |       if (pushed > current_function_pushed_stack_size)
 | 
      
         | 3848 |  |  |         current_function_pushed_stack_size = pushed;
 | 
      
         | 3849 |  |  |     }
 | 
      
         | 3850 |  |  |  
 | 
      
         | 3851 |  |  |   if (ACCUMULATE_OUTGOING_ARGS)
 | 
      
         | 3852 |  |  |     {
 | 
      
         | 3853 |  |  |       /* Since the stack pointer will never be pushed, it is possible for
 | 
      
         | 3854 |  |  |          the evaluation of a parm to clobber something we have already
 | 
      
         | 3855 |  |  |          written to the stack.  Since most function calls on RISC machines
 | 
      
         | 3856 |  |  |          do not use the stack, this is uncommon, but must work correctly.
 | 
      
         | 3857 |  |  |  
 | 
      
         | 3858 |  |  |          Therefore, we save any area of the stack that was already written
 | 
      
         | 3859 |  |  |          and that we are using.  Here we set up to do this by making a new
 | 
      
         | 3860 |  |  |          stack usage map from the old one.
 | 
      
         | 3861 |  |  |  
 | 
      
         | 3862 |  |  |          Another approach might be to try to reorder the argument
 | 
      
         | 3863 |  |  |          evaluations to avoid this conflicting stack usage.  */
 | 
      
         | 3864 |  |  |  
 | 
      
         | 3865 |  |  |       needed = args_size.constant;
 | 
      
         | 3866 |  |  |  
 | 
      
         | 3867 |  |  |       /* Since we will be writing into the entire argument area, the
 | 
      
         | 3868 |  |  |          map must be allocated for its entire size, not just the part that
 | 
      
         | 3869 |  |  |          is the responsibility of the caller.  */
 | 
      
         | 3870 |  |  |       if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
 | 
      
         | 3871 |  |  |         needed += reg_parm_stack_space;
 | 
      
         | 3872 |  |  |  
 | 
      
         | 3873 |  |  | #ifdef ARGS_GROW_DOWNWARD
 | 
      
         | 3874 |  |  |       highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
 | 
      
         | 3875 |  |  |                                          needed + 1);
 | 
      
         | 3876 |  |  | #else
 | 
      
         | 3877 |  |  |       highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
 | 
      
         | 3878 |  |  |                                          needed);
 | 
      
         | 3879 |  |  | #endif
 | 
      
         | 3880 |  |  |       stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
 | 
      
         | 3881 |  |  |       stack_usage_map = stack_usage_map_buf;
 | 
      
         | 3882 |  |  |  
 | 
      
         | 3883 |  |  |       if (initial_highest_arg_in_use)
 | 
      
         | 3884 |  |  |         memcpy (stack_usage_map, initial_stack_usage_map,
 | 
      
         | 3885 |  |  |                 initial_highest_arg_in_use);
 | 
      
         | 3886 |  |  |  
 | 
      
         | 3887 |  |  |       if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
 | 
      
         | 3888 |  |  |         memset (&stack_usage_map[initial_highest_arg_in_use], 0,
 | 
      
         | 3889 |  |  |                highest_outgoing_arg_in_use - initial_highest_arg_in_use);
 | 
      
         | 3890 |  |  |       needed = 0;
 | 
      
         | 3891 |  |  |  
 | 
      
         | 3892 |  |  |       /* We must be careful to use virtual regs before they're instantiated,
 | 
      
         | 3893 |  |  |          and real regs afterwards.  Loop optimization, for example, can create
 | 
      
         | 3894 |  |  |          new libcalls after we've instantiated the virtual regs, and if we
 | 
      
         | 3895 |  |  |          use virtuals anyway, they won't match the rtl patterns.  */
 | 
      
         | 3896 |  |  |  
 | 
      
         | 3897 |  |  |       if (virtuals_instantiated)
 | 
      
         | 3898 |  |  |         argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
 | 
      
         | 3899 |  |  |       else
 | 
      
         | 3900 |  |  |         argblock = virtual_outgoing_args_rtx;
 | 
      
         | 3901 |  |  |     }
 | 
      
         | 3902 |  |  |   else
 | 
      
         | 3903 |  |  |     {
 | 
      
         | 3904 |  |  |       if (!PUSH_ARGS)
 | 
      
         | 3905 |  |  |         argblock = push_block (GEN_INT (args_size.constant), 0, 0);
 | 
      
         | 3906 |  |  |     }
 | 
      
         | 3907 |  |  |  
 | 
      
         | 3908 |  |  |   /* If we push args individually in reverse order, perform stack alignment
 | 
      
         | 3909 |  |  |      before the first push (the last arg).  */
 | 
      
         | 3910 |  |  |   if (argblock == 0 && PUSH_ARGS_REVERSED)
 | 
      
         | 3911 |  |  |     anti_adjust_stack (GEN_INT (args_size.constant
 | 
      
         | 3912 |  |  |                                 - original_args_size.constant));
 | 
      
         | 3913 |  |  |  
 | 
      
         | 3914 |  |  |   if (PUSH_ARGS_REVERSED)
 | 
      
         | 3915 |  |  |     {
 | 
      
         | 3916 |  |  |       inc = -1;
 | 
      
         | 3917 |  |  |       argnum = nargs - 1;
 | 
      
         | 3918 |  |  |     }
 | 
      
         | 3919 |  |  |   else
 | 
      
         | 3920 |  |  |     {
 | 
      
         | 3921 |  |  |       inc = 1;
 | 
      
         | 3922 |  |  |       argnum = 0;
 | 
      
         | 3923 |  |  |     }
 | 
      
         | 3924 |  |  |  
 | 
      
         | 3925 |  |  | #ifdef REG_PARM_STACK_SPACE
 | 
      
         | 3926 |  |  |   if (ACCUMULATE_OUTGOING_ARGS)
 | 
      
         | 3927 |  |  |     {
 | 
      
         | 3928 |  |  |       /* The argument list is the property of the called routine and it
 | 
      
         | 3929 |  |  |          may clobber it.  If the fixed area has been used for previous
 | 
      
         | 3930 |  |  |          parameters, we must save and restore it.  */
 | 
      
         | 3931 |  |  |       save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
 | 
      
         | 3932 |  |  |                                             &low_to_save, &high_to_save);
 | 
      
         | 3933 |  |  |     }
 | 
      
         | 3934 |  |  | #endif
 | 
      
         | 3935 |  |  |  
 | 
      
         | 3936 |  |  |   /* Push the args that need to be pushed.  */
 | 
      
         | 3937 |  |  |  
 | 
      
         | 3938 |  |  |   /* ARGNUM indexes the ARGVEC array in the order in which the arguments
 | 
      
         | 3939 |  |  |      are to be pushed.  */
 | 
      
         | 3940 |  |  |   for (count = 0; count < nargs; count++, argnum += inc)
 | 
      
         | 3941 |  |  |     {
 | 
      
         | 3942 |  |  |       enum machine_mode mode = argvec[argnum].mode;
 | 
      
         | 3943 |  |  |       rtx val = argvec[argnum].value;
 | 
      
         | 3944 |  |  |       rtx reg = argvec[argnum].reg;
 | 
      
         | 3945 |  |  |       int partial = argvec[argnum].partial;
 | 
      
         | 3946 |  |  |       unsigned int parm_align = argvec[argnum].locate.boundary;
 | 
      
         | 3947 |  |  |       int lower_bound = 0, upper_bound = 0, i;
 | 
      
         | 3948 |  |  |  
 | 
      
         | 3949 |  |  |       if (! (reg != 0 && partial == 0))
 | 
      
         | 3950 |  |  |         {
 | 
      
         | 3951 |  |  |           rtx use;
 | 
      
         | 3952 |  |  |  
 | 
      
         | 3953 |  |  |           if (ACCUMULATE_OUTGOING_ARGS)
 | 
      
         | 3954 |  |  |             {
 | 
      
         | 3955 |  |  |               /* If this is being stored into a pre-allocated, fixed-size,
 | 
      
         | 3956 |  |  |                  stack area, save any previous data at that location.  */
 | 
      
         | 3957 |  |  |  
 | 
      
         | 3958 |  |  | #ifdef ARGS_GROW_DOWNWARD
 | 
      
         | 3959 |  |  |               /* stack_slot is negative, but we want to index stack_usage_map
 | 
      
         | 3960 |  |  |                  with positive values.  */
 | 
      
         | 3961 |  |  |               upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
 | 
      
         | 3962 |  |  |               lower_bound = upper_bound - argvec[argnum].locate.size.constant;
 | 
      
         | 3963 |  |  | #else
 | 
      
         | 3964 |  |  |               lower_bound = argvec[argnum].locate.slot_offset.constant;
 | 
      
         | 3965 |  |  |               upper_bound = lower_bound + argvec[argnum].locate.size.constant;
 | 
      
         | 3966 |  |  | #endif
 | 
      
         | 3967 |  |  |  
 | 
      
         | 3968 |  |  |               i = lower_bound;
 | 
      
         | 3969 |  |  |               /* Don't worry about things in the fixed argument area;
 | 
      
         | 3970 |  |  |                  it has already been saved.  */
 | 
      
         | 3971 |  |  |               if (i < reg_parm_stack_space)
 | 
      
         | 3972 |  |  |                 i = reg_parm_stack_space;
 | 
      
         | 3973 |  |  |               while (i < upper_bound && stack_usage_map[i] == 0)
 | 
      
         | 3974 |  |  |                 i++;
 | 
      
         | 3975 |  |  |  
 | 
      
         | 3976 |  |  |               if (i < upper_bound)
 | 
      
         | 3977 |  |  |                 {
 | 
      
         | 3978 |  |  |                   /* We need to make a save area.  */
 | 
      
         | 3979 |  |  |                   unsigned int size
 | 
      
         | 3980 |  |  |                     = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
 | 
      
         | 3981 |  |  |                   enum machine_mode save_mode
 | 
      
         | 3982 |  |  |                     = mode_for_size (size, MODE_INT, 1);
 | 
      
         | 3983 |  |  |                   rtx adr
 | 
      
         | 3984 |  |  |                     = plus_constant (argblock,
 | 
      
         | 3985 |  |  |                                      argvec[argnum].locate.offset.constant);
 | 
      
         | 3986 |  |  |                   rtx stack_area
 | 
      
         | 3987 |  |  |                     = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
 | 
      
         | 3988 |  |  |  
 | 
      
         | 3989 |  |  |                   if (save_mode == BLKmode)
 | 
      
         | 3990 |  |  |                     {
 | 
      
         | 3991 |  |  |                       argvec[argnum].save_area
 | 
      
         | 3992 |  |  |                         = assign_stack_temp (BLKmode,
 | 
      
         | 3993 |  |  |                                              argvec[argnum].locate.size.constant,
 | 
      
         | 3994 |  |  |                                              0);
 | 
      
         | 3995 |  |  |  
 | 
      
         | 3996 |  |  |                       emit_block_move (validize_mem (argvec[argnum].save_area),
 | 
      
         | 3997 |  |  |                                        stack_area,
 | 
      
         | 3998 |  |  |                                        GEN_INT (argvec[argnum].locate.size.constant),
 | 
      
         | 3999 |  |  |                                        BLOCK_OP_CALL_PARM);
 | 
      
         | 4000 |  |  |                     }
 | 
      
         | 4001 |  |  |                   else
 | 
      
         | 4002 |  |  |                     {
 | 
      
         | 4003 |  |  |                       argvec[argnum].save_area = gen_reg_rtx (save_mode);
 | 
      
         | 4004 |  |  |  
 | 
      
         | 4005 |  |  |                       emit_move_insn (argvec[argnum].save_area, stack_area);
 | 
      
         | 4006 |  |  |                     }
 | 
      
         | 4007 |  |  |                 }
 | 
      
         | 4008 |  |  |             }
 | 
      
         | 4009 |  |  |  
 | 
      
         | 4010 |  |  |           emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
 | 
      
         | 4011 |  |  |                           partial, reg, 0, argblock,
 | 
      
         | 4012 |  |  |                           GEN_INT (argvec[argnum].locate.offset.constant),
 | 
      
         | 4013 |  |  |                           reg_parm_stack_space,
 | 
      
         | 4014 |  |  |                           ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
 | 
      
         | 4015 |  |  |  
 | 
      
         | 4016 |  |  |           /* Now mark the segment we just used.  */
 | 
      
         | 4017 |  |  |           if (ACCUMULATE_OUTGOING_ARGS)
 | 
      
         | 4018 |  |  |             for (i = lower_bound; i < upper_bound; i++)
 | 
      
         | 4019 |  |  |               stack_usage_map[i] = 1;
 | 
      
         | 4020 |  |  |  
 | 
      
         | 4021 |  |  |           NO_DEFER_POP;
 | 
      
         | 4022 |  |  |  
 | 
      
         | 4023 |  |  |           /* Indicate argument access so that alias.c knows that these
 | 
      
         | 4024 |  |  |              values are live.  */
 | 
      
         | 4025 |  |  |           if (argblock)
 | 
      
         | 4026 |  |  |             use = plus_constant (argblock,
 | 
      
         | 4027 |  |  |                                  argvec[argnum].locate.offset.constant);
 | 
      
         | 4028 |  |  |           else
 | 
      
         | 4029 |  |  |             /* When arguments are pushed, trying to tell alias.c where
 | 
      
         | 4030 |  |  |                exactly this argument is won't work, because the
 | 
      
         | 4031 |  |  |                auto-increment causes confusion.  So we merely indicate
 | 
      
         | 4032 |  |  |                that we access something with a known mode somewhere on
 | 
      
         | 4033 |  |  |                the stack.  */
 | 
      
         | 4034 |  |  |             use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
 | 
      
         | 4035 |  |  |                                 gen_rtx_SCRATCH (Pmode));
 | 
      
         | 4036 |  |  |           use = gen_rtx_MEM (argvec[argnum].mode, use);
 | 
      
         | 4037 |  |  |           use = gen_rtx_USE (VOIDmode, use);
 | 
      
         | 4038 |  |  |           call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
 | 
      
         | 4039 |  |  |         }
 | 
      
         | 4040 |  |  |     }
 | 
      
         | 4041 |  |  |  
 | 
      
         | 4042 |  |  |   /* If we pushed args in forward order, perform stack alignment
 | 
      
         | 4043 |  |  |      after pushing the last arg.  */
 | 
      
         | 4044 |  |  |   if (argblock == 0 && !PUSH_ARGS_REVERSED)
 | 
      
         | 4045 |  |  |     anti_adjust_stack (GEN_INT (args_size.constant
 | 
      
         | 4046 |  |  |                                 - original_args_size.constant));
 | 
      
         | 4047 |  |  |  
 | 
      
         | 4048 |  |  |   if (PUSH_ARGS_REVERSED)
 | 
      
         | 4049 |  |  |     argnum = nargs - 1;
 | 
      
         | 4050 |  |  |   else
 | 
      
         | 4051 |  |  |     argnum = 0;
 | 
      
         | 4052 |  |  |  
 | 
      
         | 4053 |  |  |   fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
 | 
      
         | 4054 |  |  |  
 | 
      
         | 4055 |  |  |   /* Now load any reg parms into their regs.  */
 | 
      
         | 4056 |  |  |  
 | 
      
         | 4057 |  |  |   /* ARGNUM indexes the ARGVEC array in the order in which the arguments
 | 
      
         | 4058 |  |  |      are to be pushed.  */
 | 
      
         | 4059 |  |  |   for (count = 0; count < nargs; count++, argnum += inc)
 | 
      
         | 4060 |  |  |     {
 | 
      
         | 4061 |  |  |       enum machine_mode mode = argvec[argnum].mode;
 | 
      
         | 4062 |  |  |       rtx val = argvec[argnum].value;
 | 
      
         | 4063 |  |  |       rtx reg = argvec[argnum].reg;
 | 
      
         | 4064 |  |  |       int partial = argvec[argnum].partial;
 | 
      
         | 4065 |  |  | #ifdef BLOCK_REG_PADDING
 | 
      
         | 4066 |  |  |       int size = 0;
 | 
      
         | 4067 |  |  | #endif
 | 
      
         | 4068 |  |  |  
 | 
      
         | 4069 |  |  |       /* Handle calls that pass values in multiple non-contiguous
 | 
      
         | 4070 |  |  |          locations.  The PA64 has examples of this for library calls.  */
 | 
      
         | 4071 |  |  |       if (reg != 0 && GET_CODE (reg) == PARALLEL)
 | 
      
         | 4072 |  |  |         emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
 | 
      
         | 4073 |  |  |       else if (reg != 0 && partial == 0)
 | 
      
         | 4074 |  |  |         {
 | 
      
         | 4075 |  |  |           emit_move_insn (reg, val);
 | 
      
         | 4076 |  |  | #ifdef BLOCK_REG_PADDING
 | 
      
         | 4077 |  |  |           size = GET_MODE_SIZE (argvec[argnum].mode);
 | 
      
         | 4078 |  |  |  
 | 
      
         | 4079 |  |  |           /* Copied from load_register_parameters.  */
 | 
      
         | 4080 |  |  |  
 | 
      
         | 4081 |  |  |           /* Handle case where we have a value that needs shifting
 | 
      
         | 4082 |  |  |              up to the msb.  eg. a QImode value and we're padding
 | 
      
         | 4083 |  |  |              upward on a BYTES_BIG_ENDIAN machine.  */
 | 
      
         | 4084 |  |  |           if (size < UNITS_PER_WORD
 | 
      
         | 4085 |  |  |               && (argvec[argnum].locate.where_pad
 | 
      
         | 4086 |  |  |                   == (BYTES_BIG_ENDIAN ? upward : downward)))
 | 
      
         | 4087 |  |  |             {
 | 
      
         | 4088 |  |  |               rtx x;
 | 
      
         | 4089 |  |  |               int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
 | 
      
         | 4090 |  |  |  
 | 
      
         | 4091 |  |  |               /* Assigning REG here rather than a temp makes CALL_FUSAGE
 | 
      
         | 4092 |  |  |                  report the whole reg as used.  Strictly speaking, the
 | 
      
         | 4093 |  |  |                  call only uses SIZE bytes at the msb end, but it doesn't
 | 
      
         | 4094 |  |  |                  seem worth generating rtl to say that.  */
 | 
      
         | 4095 |  |  |               reg = gen_rtx_REG (word_mode, REGNO (reg));
 | 
      
         | 4096 |  |  |               x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
 | 
      
         | 4097 |  |  |               if (x != reg)
 | 
      
         | 4098 |  |  |                 emit_move_insn (reg, x);
 | 
      
         | 4099 |  |  |             }
 | 
      
         | 4100 |  |  | #endif
 | 
      
         | 4101 |  |  |         }
 | 
      
         | 4102 |  |  |  
 | 
      
         | 4103 |  |  |       NO_DEFER_POP;
 | 
      
         | 4104 |  |  |     }
 | 
      
         | 4105 |  |  |  
 | 
      
         | 4106 |  |  |   /* Any regs containing parms remain in use through the call.  */
 | 
      
         | 4107 |  |  |   for (count = 0; count < nargs; count++)
 | 
      
         | 4108 |  |  |     {
 | 
      
         | 4109 |  |  |       rtx reg = argvec[count].reg;
 | 
      
         | 4110 |  |  |       if (reg != 0 && GET_CODE (reg) == PARALLEL)
 | 
      
         | 4111 |  |  |         use_group_regs (&call_fusage, reg);
 | 
      
         | 4112 |  |  |       else if (reg != 0)
 | 
      
         | 4113 |  |  |         {
 | 
      
         | 4114 |  |  |           int partial = argvec[count].partial;
 | 
      
         | 4115 |  |  |           if (partial)
 | 
      
         | 4116 |  |  |             {
 | 
      
         | 4117 |  |  |               int nregs;
 | 
      
         | 4118 |  |  |               gcc_assert (partial % UNITS_PER_WORD == 0);
 | 
      
         | 4119 |  |  |               nregs = partial / UNITS_PER_WORD;
 | 
      
         | 4120 |  |  |               use_regs (&call_fusage, REGNO (reg), nregs);
 | 
      
         | 4121 |  |  |             }
 | 
      
         | 4122 |  |  |           else
 | 
      
         | 4123 |  |  |             use_reg (&call_fusage, reg);
 | 
      
         | 4124 |  |  |         }
 | 
      
         | 4125 |  |  |     }
 | 
      
         | 4126 |  |  |  
 | 
      
         | 4127 |  |  |   /* Pass the function the address in which to return a structure value.  */
 | 
      
         | 4128 |  |  |   if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
 | 
      
         | 4129 |  |  |     {
 | 
      
         | 4130 |  |  |       emit_move_insn (struct_value,
 | 
      
         | 4131 |  |  |                       force_reg (Pmode,
 | 
      
         | 4132 |  |  |                                  force_operand (XEXP (mem_value, 0),
 | 
      
         | 4133 |  |  |                                                 NULL_RTX)));
 | 
      
         | 4134 |  |  |       if (REG_P (struct_value))
 | 
      
         | 4135 |  |  |         use_reg (&call_fusage, struct_value);
 | 
      
         | 4136 |  |  |     }
 | 
      
         | 4137 |  |  |  
 | 
      
         | 4138 |  |  |   /* Don't allow popping to be deferred, since then
 | 
      
         | 4139 |  |  |      cse'ing of library calls could delete a call and leave the pop.  */
 | 
      
         | 4140 |  |  |   NO_DEFER_POP;
 | 
      
         | 4141 |  |  |   valreg = (mem_value == 0 && outmode != VOIDmode
 | 
      
         | 4142 |  |  |             ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
 | 
      
         | 4143 |  |  |  
 | 
      
         | 4144 |  |  |   /* Stack must be properly aligned now.  */
 | 
      
         | 4145 |  |  |   gcc_assert (!(stack_pointer_delta
 | 
      
         | 4146 |  |  |                 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
 | 
      
         | 4147 |  |  |  
 | 
      
         | 4148 |  |  |   before_call = get_last_insn ();
 | 
      
         | 4149 |  |  |  
 | 
      
         | 4150 |  |  |   /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
 | 
      
         | 4151 |  |  |      will set inhibit_defer_pop to that value.  */
 | 
      
         | 4152 |  |  |   /* The return type is needed to decide how many bytes the function pops.
 | 
      
         | 4153 |  |  |      Signedness plays no role in that, so for simplicity, we pretend it's
 | 
      
         | 4154 |  |  |      always signed.  We also assume that the list of arguments passed has
 | 
      
         | 4155 |  |  |      no impact, so we pretend it is unknown.  */
 | 
      
         | 4156 |  |  |  
 | 
      
         | 4157 |  |  |   emit_call_1 (fun, NULL,
 | 
      
         | 4158 |  |  |                get_identifier (XSTR (orgfun, 0)),
 | 
      
         | 4159 |  |  |                build_function_type (tfom, NULL_TREE),
 | 
      
         | 4160 |  |  |                original_args_size.constant, args_size.constant,
 | 
      
         | 4161 |  |  |                struct_value_size,
 | 
      
         | 4162 |  |  |                targetm.calls.function_arg (args_so_far,
 | 
      
         | 4163 |  |  |                                            VOIDmode, void_type_node, true),
 | 
      
         | 4164 |  |  |                valreg,
 | 
      
         | 4165 |  |  |                old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
 | 
      
         | 4166 |  |  |  
 | 
      
         | 4167 |  |  |   /* Right-shift returned value if necessary.  */
 | 
      
         | 4168 |  |  |   if (!pcc_struct_value
 | 
      
         | 4169 |  |  |       && TYPE_MODE (tfom) != BLKmode
 | 
      
         | 4170 |  |  |       && targetm.calls.return_in_msb (tfom))
 | 
      
         | 4171 |  |  |     {
 | 
      
         | 4172 |  |  |       shift_return_value (TYPE_MODE (tfom), false, valreg);
 | 
      
         | 4173 |  |  |       valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
 | 
      
         | 4174 |  |  |     }
 | 
      
         | 4175 |  |  |  
 | 
      
         | 4176 |  |  |   /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
 | 
      
         | 4177 |  |  |      that it should complain if nonvolatile values are live.  For
 | 
      
         | 4178 |  |  |      functions that cannot return, inform flow that control does not
 | 
      
         | 4179 |  |  |      fall through.  */
 | 
      
         | 4180 |  |  |  
 | 
      
         | 4181 |  |  |   if (flags & ECF_NORETURN)
 | 
      
         | 4182 |  |  |     {
 | 
      
         | 4183 |  |  |       /* The barrier note must be emitted
 | 
      
         | 4184 |  |  |          immediately after the CALL_INSN.  Some ports emit more than
 | 
      
         | 4185 |  |  |          just a CALL_INSN above, so we must search for it here.  */
 | 
      
         | 4186 |  |  |  
 | 
      
         | 4187 |  |  |       rtx last = get_last_insn ();
 | 
      
         | 4188 |  |  |       while (!CALL_P (last))
 | 
      
         | 4189 |  |  |         {
 | 
      
         | 4190 |  |  |           last = PREV_INSN (last);
 | 
      
         | 4191 |  |  |           /* There was no CALL_INSN?  */
 | 
      
         | 4192 |  |  |           gcc_assert (last != before_call);
 | 
      
         | 4193 |  |  |         }
 | 
      
         | 4194 |  |  |  
 | 
      
         | 4195 |  |  |       emit_barrier_after (last);
 | 
      
         | 4196 |  |  |     }
 | 
      
         | 4197 |  |  |  
 | 
      
         | 4198 |  |  |   /* Now restore inhibit_defer_pop to its actual original value.  */
 | 
      
         | 4199 |  |  |   OK_DEFER_POP;
 | 
      
         | 4200 |  |  |  
 | 
      
         | 4201 |  |  |   pop_temp_slots ();
 | 
      
         | 4202 |  |  |  
 | 
      
         | 4203 |  |  |   /* Copy the value to the right place.  */
 | 
      
         | 4204 |  |  |   if (outmode != VOIDmode && retval)
 | 
      
         | 4205 |  |  |     {
 | 
      
         | 4206 |  |  |       if (mem_value)
 | 
      
         | 4207 |  |  |         {
 | 
      
         | 4208 |  |  |           if (value == 0)
 | 
      
         | 4209 |  |  |             value = mem_value;
 | 
      
         | 4210 |  |  |           if (value != mem_value)
 | 
      
         | 4211 |  |  |             emit_move_insn (value, mem_value);
 | 
      
         | 4212 |  |  |         }
 | 
      
         | 4213 |  |  |       else if (GET_CODE (valreg) == PARALLEL)
 | 
      
         | 4214 |  |  |         {
 | 
      
         | 4215 |  |  |           if (value == 0)
 | 
      
         | 4216 |  |  |             value = gen_reg_rtx (outmode);
 | 
      
         | 4217 |  |  |           emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
 | 
      
         | 4218 |  |  |         }
 | 
      
         | 4219 |  |  |       else
 | 
      
         | 4220 |  |  |         {
 | 
      
         | 4221 |  |  |           /* Convert to the proper mode if a promotion has been active.  */
 | 
      
         | 4222 |  |  |           if (GET_MODE (valreg) != outmode)
 | 
      
         | 4223 |  |  |             {
 | 
      
         | 4224 |  |  |               int unsignedp = TYPE_UNSIGNED (tfom);
 | 
      
         | 4225 |  |  |  
 | 
      
         | 4226 |  |  |               gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
 | 
      
         | 4227 |  |  |                                                  fndecl ? TREE_TYPE (fndecl) : fntype, 1)
 | 
      
         | 4228 |  |  |                           == GET_MODE (valreg));
 | 
      
         | 4229 |  |  |               valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
 | 
      
         | 4230 |  |  |             }
 | 
      
         | 4231 |  |  |  
 | 
      
         | 4232 |  |  |           if (value != 0)
 | 
      
         | 4233 |  |  |             emit_move_insn (value, valreg);
 | 
      
         | 4234 |  |  |           else
 | 
      
         | 4235 |  |  |             value = valreg;
 | 
      
         | 4236 |  |  |         }
 | 
      
         | 4237 |  |  |     }
 | 
      
         | 4238 |  |  |  
 | 
      
         | 4239 |  |  |   if (ACCUMULATE_OUTGOING_ARGS)
 | 
      
         | 4240 |  |  |     {
 | 
      
         | 4241 |  |  | #ifdef REG_PARM_STACK_SPACE
 | 
      
         | 4242 |  |  |       if (save_area)
 | 
      
         | 4243 |  |  |         restore_fixed_argument_area (save_area, argblock,
 | 
      
         | 4244 |  |  |                                      high_to_save, low_to_save);
 | 
      
         | 4245 |  |  | #endif
 | 
      
         | 4246 |  |  |  
 | 
      
         | 4247 |  |  |       /* If we saved any argument areas, restore them.  */
 | 
      
         | 4248 |  |  |       for (count = 0; count < nargs; count++)
 | 
      
         | 4249 |  |  |         if (argvec[count].save_area)
 | 
      
         | 4250 |  |  |           {
 | 
      
         | 4251 |  |  |             enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
 | 
      
         | 4252 |  |  |             rtx adr = plus_constant (argblock,
 | 
      
         | 4253 |  |  |                                      argvec[count].locate.offset.constant);
 | 
      
         | 4254 |  |  |             rtx stack_area = gen_rtx_MEM (save_mode,
 | 
      
         | 4255 |  |  |                                           memory_address (save_mode, adr));
 | 
      
         | 4256 |  |  |  
 | 
      
         | 4257 |  |  |             if (save_mode == BLKmode)
 | 
      
         | 4258 |  |  |               emit_block_move (stack_area,
 | 
      
         | 4259 |  |  |                                validize_mem (argvec[count].save_area),
 | 
      
         | 4260 |  |  |                                GEN_INT (argvec[count].locate.size.constant),
 | 
      
         | 4261 |  |  |                                BLOCK_OP_CALL_PARM);
 | 
      
         | 4262 |  |  |             else
 | 
      
         | 4263 |  |  |               emit_move_insn (stack_area, argvec[count].save_area);
 | 
      
         | 4264 |  |  |           }
 | 
      
         | 4265 |  |  |  
 | 
      
         | 4266 |  |  |       highest_outgoing_arg_in_use = initial_highest_arg_in_use;
 | 
      
         | 4267 |  |  |       stack_usage_map = initial_stack_usage_map;
 | 
      
         | 4268 |  |  |     }
 | 
      
         | 4269 |  |  |  
 | 
      
         | 4270 |  |  |   free (stack_usage_map_buf);
 | 
      
         | 4271 |  |  |  
 | 
      
         | 4272 |  |  |   return value;
 | 
      
         | 4273 |  |  |  
 | 
      
         | 4274 |  |  | }
 | 
      
         | 4275 |  |  |  
 | 
      
         | 4276 |  |  | /* Output a library call to function FUN (a SYMBOL_REF rtx)
 | 
      
         | 4277 |  |  |    (emitting the queue unless NO_QUEUE is nonzero),
 | 
      
         | 4278 |  |  |    for a value of mode OUTMODE,
 | 
      
         | 4279 |  |  |    with NARGS different arguments, passed as alternating rtx values
 | 
      
         | 4280 |  |  |    and machine_modes to convert them to.
 | 
      
         | 4281 |  |  |  
 | 
      
         | 4282 |  |  |    FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
 | 
      
         | 4283 |  |  |    `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
 | 
      
         | 4284 |  |  |    other types of library calls.  */
 | 
      
         | 4285 |  |  |  
 | 
      
         | 4286 |  |  | void
 | 
      
         | 4287 |  |  | emit_library_call (rtx orgfun, enum libcall_type fn_type,
 | 
      
         | 4288 |  |  |                    enum machine_mode outmode, int nargs, ...)
 | 
      
         | 4289 |  |  | {
 | 
      
         | 4290 |  |  |   va_list p;
 | 
      
         | 4291 |  |  |  
 | 
      
         | 4292 |  |  |   va_start (p, nargs);
 | 
      
         | 4293 |  |  |   emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
 | 
      
         | 4294 |  |  |   va_end (p);
 | 
      
         | 4295 |  |  | }
 | 
      
         | 4296 |  |  |  
 | 
      
         | 4297 |  |  | /* Like emit_library_call except that an extra argument, VALUE,
 | 
      
         | 4298 |  |  |    comes second and says where to store the result.
 | 
      
         | 4299 |  |  |    (If VALUE is zero, this function chooses a convenient way
 | 
      
         | 4300 |  |  |    to return the value.
 | 
      
         | 4301 |  |  |  
 | 
      
         | 4302 |  |  |    This function returns an rtx for where the value is to be found.
 | 
      
         | 4303 |  |  |    If VALUE is nonzero, VALUE is returned.  */
 | 
      
         | 4304 |  |  |  
 | 
      
         | 4305 |  |  | rtx
 | 
      
         | 4306 |  |  | emit_library_call_value (rtx orgfun, rtx value,
 | 
      
         | 4307 |  |  |                          enum libcall_type fn_type,
 | 
      
         | 4308 |  |  |                          enum machine_mode outmode, int nargs, ...)
 | 
      
         | 4309 |  |  | {
 | 
      
         | 4310 |  |  |   rtx result;
 | 
      
         | 4311 |  |  |   va_list p;
 | 
      
         | 4312 |  |  |  
 | 
      
         | 4313 |  |  |   va_start (p, nargs);
 | 
      
         | 4314 |  |  |   result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
 | 
      
         | 4315 |  |  |                                       nargs, p);
 | 
      
         | 4316 |  |  |   va_end (p);
 | 
      
         | 4317 |  |  |  
 | 
      
         | 4318 |  |  |   return result;
 | 
      
         | 4319 |  |  | }
 | 
      
         | 4320 |  |  |  
 | 
      
         | 4321 |  |  | /* Store a single argument for a function call
 | 
      
         | 4322 |  |  |    into the register or memory area where it must be passed.
 | 
      
         | 4323 |  |  |    *ARG describes the argument value and where to pass it.
 | 
      
         | 4324 |  |  |  
 | 
      
         | 4325 |  |  |    ARGBLOCK is the address of the stack-block for all the arguments,
 | 
      
         | 4326 |  |  |    or 0 on a machine where arguments are pushed individually.
 | 
      
         | 4327 |  |  |  
 | 
      
         | 4328 |  |  |    MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
 | 
      
         | 4329 |  |  |    so must be careful about how the stack is used.
 | 
      
         | 4330 |  |  |  
 | 
      
         | 4331 |  |  |    VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
 | 
      
         | 4332 |  |  |    argument stack.  This is used if ACCUMULATE_OUTGOING_ARGS to indicate
 | 
      
         | 4333 |  |  |    that we need not worry about saving and restoring the stack.
 | 
      
         | 4334 |  |  |  
 | 
      
         | 4335 |  |  |    FNDECL is the declaration of the function we are calling.
 | 
      
         | 4336 |  |  |  
 | 
      
         | 4337 |  |  |    Return nonzero if this arg should cause sibcall failure,
 | 
      
         | 4338 |  |  |    zero otherwise.  */
 | 
      
         | 4339 |  |  |  
 | 
      
         | 4340 |  |  | static int
 | 
      
         | 4341 |  |  | store_one_arg (struct arg_data *arg, rtx argblock, int flags,
 | 
      
         | 4342 |  |  |                int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
 | 
      
         | 4343 |  |  | {
 | 
      
         | 4344 |  |  |   tree pval = arg->tree_value;
 | 
      
         | 4345 |  |  |   rtx reg = 0;
 | 
      
         | 4346 |  |  |   int partial = 0;
 | 
      
         | 4347 |  |  |   int used = 0;
 | 
      
         | 4348 |  |  |   int i, lower_bound = 0, upper_bound = 0;
 | 
      
         | 4349 |  |  |   int sibcall_failure = 0;
 | 
      
         | 4350 |  |  |  
 | 
      
         | 4351 |  |  |   if (TREE_CODE (pval) == ERROR_MARK)
 | 
      
         | 4352 |  |  |     return 1;
 | 
      
         | 4353 |  |  |  
 | 
      
         | 4354 |  |  |   /* Push a new temporary level for any temporaries we make for
 | 
      
         | 4355 |  |  |      this argument.  */
 | 
      
         | 4356 |  |  |   push_temp_slots ();
 | 
      
         | 4357 |  |  |  
 | 
      
         | 4358 |  |  |   if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
 | 
      
         | 4359 |  |  |     {
 | 
      
         | 4360 |  |  |       /* If this is being stored into a pre-allocated, fixed-size, stack area,
 | 
      
         | 4361 |  |  |          save any previous data at that location.  */
 | 
      
         | 4362 |  |  |       if (argblock && ! variable_size && arg->stack)
 | 
      
         | 4363 |  |  |         {
 | 
      
         | 4364 |  |  | #ifdef ARGS_GROW_DOWNWARD
 | 
      
         | 4365 |  |  |           /* stack_slot is negative, but we want to index stack_usage_map
 | 
      
         | 4366 |  |  |              with positive values.  */
 | 
      
         | 4367 |  |  |           if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
 | 
      
         | 4368 |  |  |             upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
 | 
      
         | 4369 |  |  |           else
 | 
      
         | 4370 |  |  |             upper_bound = 0;
 | 
      
         | 4371 |  |  |  
 | 
      
         | 4372 |  |  |           lower_bound = upper_bound - arg->locate.size.constant;
 | 
      
         | 4373 |  |  | #else
 | 
      
         | 4374 |  |  |           if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
 | 
      
         | 4375 |  |  |             lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
 | 
      
         | 4376 |  |  |           else
 | 
      
         | 4377 |  |  |             lower_bound = 0;
 | 
      
         | 4378 |  |  |  
 | 
      
         | 4379 |  |  |           upper_bound = lower_bound + arg->locate.size.constant;
 | 
      
         | 4380 |  |  | #endif
 | 
      
         | 4381 |  |  |  
 | 
      
         | 4382 |  |  |           i = lower_bound;
 | 
      
         | 4383 |  |  |           /* Don't worry about things in the fixed argument area;
 | 
      
         | 4384 |  |  |              it has already been saved.  */
 | 
      
         | 4385 |  |  |           if (i < reg_parm_stack_space)
 | 
      
         | 4386 |  |  |             i = reg_parm_stack_space;
 | 
      
         | 4387 |  |  |           while (i < upper_bound && stack_usage_map[i] == 0)
 | 
      
         | 4388 |  |  |             i++;
 | 
      
         | 4389 |  |  |  
 | 
      
         | 4390 |  |  |           if (i < upper_bound)
 | 
      
         | 4391 |  |  |             {
 | 
      
         | 4392 |  |  |               /* We need to make a save area.  */
 | 
      
         | 4393 |  |  |               unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
 | 
      
         | 4394 |  |  |               enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
 | 
      
         | 4395 |  |  |               rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
 | 
      
         | 4396 |  |  |               rtx stack_area = gen_rtx_MEM (save_mode, adr);
 | 
      
         | 4397 |  |  |  
 | 
      
         | 4398 |  |  |               if (save_mode == BLKmode)
 | 
      
         | 4399 |  |  |                 {
 | 
      
         | 4400 |  |  |                   tree ot = TREE_TYPE (arg->tree_value);
 | 
      
         | 4401 |  |  |                   tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
 | 
      
         | 4402 |  |  |                                                        | TYPE_QUAL_CONST));
 | 
      
         | 4403 |  |  |  
 | 
      
         | 4404 |  |  |                   arg->save_area = assign_temp (nt, 0, 1, 1);
 | 
      
         | 4405 |  |  |                   preserve_temp_slots (arg->save_area);
 | 
      
         | 4406 |  |  |                   emit_block_move (validize_mem (arg->save_area), stack_area,
 | 
      
         | 4407 |  |  |                                    GEN_INT (arg->locate.size.constant),
 | 
      
         | 4408 |  |  |                                    BLOCK_OP_CALL_PARM);
 | 
      
         | 4409 |  |  |                 }
 | 
      
         | 4410 |  |  |               else
 | 
      
         | 4411 |  |  |                 {
 | 
      
         | 4412 |  |  |                   arg->save_area = gen_reg_rtx (save_mode);
 | 
      
         | 4413 |  |  |                   emit_move_insn (arg->save_area, stack_area);
 | 
      
         | 4414 |  |  |                 }
 | 
      
         | 4415 |  |  |             }
 | 
      
         | 4416 |  |  |         }
 | 
      
         | 4417 |  |  |     }
 | 
      
         | 4418 |  |  |  
 | 
      
         | 4419 |  |  |   /* If this isn't going to be placed on both the stack and in registers,
 | 
      
         | 4420 |  |  |      set up the register and number of words.  */
 | 
      
         | 4421 |  |  |   if (! arg->pass_on_stack)
 | 
      
         | 4422 |  |  |     {
 | 
      
         | 4423 |  |  |       if (flags & ECF_SIBCALL)
 | 
      
         | 4424 |  |  |         reg = arg->tail_call_reg;
 | 
      
         | 4425 |  |  |       else
 | 
      
         | 4426 |  |  |         reg = arg->reg;
 | 
      
         | 4427 |  |  |       partial = arg->partial;
 | 
      
         | 4428 |  |  |     }
 | 
      
         | 4429 |  |  |  
 | 
      
         | 4430 |  |  |   /* Being passed entirely in a register.  We shouldn't be called in
 | 
      
         | 4431 |  |  |      this case.  */
 | 
      
         | 4432 |  |  |   gcc_assert (reg == 0 || partial != 0);
 | 
      
         | 4433 |  |  |  
 | 
      
         | 4434 |  |  |   /* If this arg needs special alignment, don't load the registers
 | 
      
         | 4435 |  |  |      here.  */
 | 
      
         | 4436 |  |  |   if (arg->n_aligned_regs != 0)
 | 
      
         | 4437 |  |  |     reg = 0;
 | 
      
         | 4438 |  |  |  
 | 
      
         | 4439 |  |  |   /* If this is being passed partially in a register, we can't evaluate
 | 
      
         | 4440 |  |  |      it directly into its stack slot.  Otherwise, we can.  */
 | 
      
         | 4441 |  |  |   if (arg->value == 0)
 | 
      
         | 4442 |  |  |     {
 | 
      
         | 4443 |  |  |       /* stack_arg_under_construction is nonzero if a function argument is
 | 
      
         | 4444 |  |  |          being evaluated directly into the outgoing argument list and
 | 
      
         | 4445 |  |  |          expand_call must take special action to preserve the argument list
 | 
      
         | 4446 |  |  |          if it is called recursively.
 | 
      
         | 4447 |  |  |  
 | 
      
         | 4448 |  |  |          For scalar function arguments stack_usage_map is sufficient to
 | 
      
         | 4449 |  |  |          determine which stack slots must be saved and restored.  Scalar
 | 
      
         | 4450 |  |  |          arguments in general have pass_on_stack == 0.
 | 
      
         | 4451 |  |  |  
 | 
      
         | 4452 |  |  |          If this argument is initialized by a function which takes the
 | 
      
         | 4453 |  |  |          address of the argument (a C++ constructor or a C function
 | 
      
         | 4454 |  |  |          returning a BLKmode structure), then stack_usage_map is
 | 
      
         | 4455 |  |  |          insufficient and expand_call must push the stack around the
 | 
      
         | 4456 |  |  |          function call.  Such arguments have pass_on_stack == 1.
 | 
      
         | 4457 |  |  |  
 | 
      
         | 4458 |  |  |          Note that it is always safe to set stack_arg_under_construction,
 | 
      
         | 4459 |  |  |          but this generates suboptimal code if set when not needed.  */
 | 
      
         | 4460 |  |  |  
 | 
      
         | 4461 |  |  |       if (arg->pass_on_stack)
 | 
      
         | 4462 |  |  |         stack_arg_under_construction++;
 | 
      
         | 4463 |  |  |  
 | 
      
         | 4464 |  |  |       arg->value = expand_expr (pval,
 | 
      
         | 4465 |  |  |                                 (partial
 | 
      
         | 4466 |  |  |                                  || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
 | 
      
         | 4467 |  |  |                                 ? NULL_RTX : arg->stack,
 | 
      
         | 4468 |  |  |                                 VOIDmode, EXPAND_STACK_PARM);
 | 
      
         | 4469 |  |  |  
 | 
      
         | 4470 |  |  |       /* If we are promoting object (or for any other reason) the mode
 | 
      
         | 4471 |  |  |          doesn't agree, convert the mode.  */
 | 
      
         | 4472 |  |  |  
 | 
      
         | 4473 |  |  |       if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
 | 
      
         | 4474 |  |  |         arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
 | 
      
         | 4475 |  |  |                                     arg->value, arg->unsignedp);
 | 
      
         | 4476 |  |  |  
 | 
      
         | 4477 |  |  |       if (arg->pass_on_stack)
 | 
      
         | 4478 |  |  |         stack_arg_under_construction--;
 | 
      
         | 4479 |  |  |     }
 | 
      
         | 4480 |  |  |  
 | 
      
         | 4481 |  |  |   /* Check for overlap with already clobbered argument area.  */
 | 
      
         | 4482 |  |  |   if ((flags & ECF_SIBCALL)
 | 
      
         | 4483 |  |  |       && MEM_P (arg->value)
 | 
      
         | 4484 |  |  |       && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
 | 
      
         | 4485 |  |  |                                                arg->locate.size.constant))
 | 
      
         | 4486 |  |  |     sibcall_failure = 1;
 | 
      
         | 4487 |  |  |  
 | 
      
         | 4488 |  |  |   /* Don't allow anything left on stack from computation
 | 
      
         | 4489 |  |  |      of argument to alloca.  */
 | 
      
         | 4490 |  |  |   if (flags & ECF_MAY_BE_ALLOCA)
 | 
      
         | 4491 |  |  |     do_pending_stack_adjust ();
 | 
      
         | 4492 |  |  |  
 | 
      
         | 4493 |  |  |   if (arg->value == arg->stack)
 | 
      
         | 4494 |  |  |     /* If the value is already in the stack slot, we are done.  */
 | 
      
         | 4495 |  |  |     ;
 | 
      
         | 4496 |  |  |   else if (arg->mode != BLKmode)
 | 
      
         | 4497 |  |  |     {
 | 
      
         | 4498 |  |  |       int size;
 | 
      
         | 4499 |  |  |       unsigned int parm_align;
 | 
      
         | 4500 |  |  |  
 | 
      
         | 4501 |  |  |       /* Argument is a scalar, not entirely passed in registers.
 | 
      
         | 4502 |  |  |          (If part is passed in registers, arg->partial says how much
 | 
      
         | 4503 |  |  |          and emit_push_insn will take care of putting it there.)
 | 
      
         | 4504 |  |  |  
 | 
      
         | 4505 |  |  |          Push it, and if its size is less than the
 | 
      
         | 4506 |  |  |          amount of space allocated to it,
 | 
      
         | 4507 |  |  |          also bump stack pointer by the additional space.
 | 
      
         | 4508 |  |  |          Note that in C the default argument promotions
 | 
      
         | 4509 |  |  |          will prevent such mismatches.  */
 | 
      
         | 4510 |  |  |  
 | 
      
         | 4511 |  |  |       size = GET_MODE_SIZE (arg->mode);
 | 
      
         | 4512 |  |  |       /* Compute how much space the push instruction will push.
 | 
      
         | 4513 |  |  |          On many machines, pushing a byte will advance the stack
 | 
      
         | 4514 |  |  |          pointer by a halfword.  */
 | 
      
         | 4515 |  |  | #ifdef PUSH_ROUNDING
 | 
      
         | 4516 |  |  |       size = PUSH_ROUNDING (size);
 | 
      
         | 4517 |  |  | #endif
 | 
      
         | 4518 |  |  |       used = size;
 | 
      
         | 4519 |  |  |  
 | 
      
         | 4520 |  |  |       /* Compute how much space the argument should get:
 | 
      
         | 4521 |  |  |          round up to a multiple of the alignment for arguments.  */
 | 
      
         | 4522 |  |  |       if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
 | 
      
         | 4523 |  |  |         used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
 | 
      
         | 4524 |  |  |                  / (PARM_BOUNDARY / BITS_PER_UNIT))
 | 
      
         | 4525 |  |  |                 * (PARM_BOUNDARY / BITS_PER_UNIT));
 | 
      
         | 4526 |  |  |  
 | 
      
         | 4527 |  |  |       /* Compute the alignment of the pushed argument.  */
 | 
      
         | 4528 |  |  |       parm_align = arg->locate.boundary;
 | 
      
         | 4529 |  |  |       if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
 | 
      
         | 4530 |  |  |         {
 | 
      
         | 4531 |  |  |           int pad = used - size;
 | 
      
         | 4532 |  |  |           if (pad)
 | 
      
         | 4533 |  |  |             {
 | 
      
         | 4534 |  |  |               unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
 | 
      
         | 4535 |  |  |               parm_align = MIN (parm_align, pad_align);
 | 
      
         | 4536 |  |  |             }
 | 
      
         | 4537 |  |  |         }
 | 
      
         | 4538 |  |  |  
 | 
      
         | 4539 |  |  |       /* This isn't already where we want it on the stack, so put it there.
 | 
      
         | 4540 |  |  |          This can either be done with push or copy insns.  */
 | 
      
         | 4541 |  |  |       emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
 | 
      
         | 4542 |  |  |                       parm_align, partial, reg, used - size, argblock,
 | 
      
         | 4543 |  |  |                       ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
 | 
      
         | 4544 |  |  |                       ARGS_SIZE_RTX (arg->locate.alignment_pad));
 | 
      
         | 4545 |  |  |  
 | 
      
         | 4546 |  |  |       /* Unless this is a partially-in-register argument, the argument is now
 | 
      
         | 4547 |  |  |          in the stack.  */
 | 
      
         | 4548 |  |  |       if (partial == 0)
 | 
      
         | 4549 |  |  |         arg->value = arg->stack;
 | 
      
         | 4550 |  |  |     }
 | 
      
         | 4551 |  |  |   else
 | 
      
         | 4552 |  |  |     {
 | 
      
         | 4553 |  |  |       /* BLKmode, at least partly to be pushed.  */
 | 
      
         | 4554 |  |  |  
 | 
      
         | 4555 |  |  |       unsigned int parm_align;
 | 
      
         | 4556 |  |  |       int excess;
 | 
      
         | 4557 |  |  |       rtx size_rtx;
 | 
      
         | 4558 |  |  |  
 | 
      
         | 4559 |  |  |       /* Pushing a nonscalar.
 | 
      
         | 4560 |  |  |          If part is passed in registers, PARTIAL says how much
 | 
      
         | 4561 |  |  |          and emit_push_insn will take care of putting it there.  */
 | 
      
         | 4562 |  |  |  
 | 
      
         | 4563 |  |  |       /* Round its size up to a multiple
 | 
      
         | 4564 |  |  |          of the allocation unit for arguments.  */
 | 
      
         | 4565 |  |  |  
 | 
      
         | 4566 |  |  |       if (arg->locate.size.var != 0)
 | 
      
         | 4567 |  |  |         {
 | 
      
         | 4568 |  |  |           excess = 0;
 | 
      
         | 4569 |  |  |           size_rtx = ARGS_SIZE_RTX (arg->locate.size);
 | 
      
         | 4570 |  |  |         }
 | 
      
         | 4571 |  |  |       else
 | 
      
         | 4572 |  |  |         {
 | 
      
         | 4573 |  |  |           /* PUSH_ROUNDING has no effect on us, because emit_push_insn
 | 
      
         | 4574 |  |  |              for BLKmode is careful to avoid it.  */
 | 
      
         | 4575 |  |  |           excess = (arg->locate.size.constant
 | 
      
         | 4576 |  |  |                     - int_size_in_bytes (TREE_TYPE (pval))
 | 
      
         | 4577 |  |  |                     + partial);
 | 
      
         | 4578 |  |  |           size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
 | 
      
         | 4579 |  |  |                                   NULL_RTX, TYPE_MODE (sizetype),
 | 
      
         | 4580 |  |  |                                   EXPAND_NORMAL);
 | 
      
         | 4581 |  |  |         }
 | 
      
         | 4582 |  |  |  
 | 
      
         | 4583 |  |  |       parm_align = arg->locate.boundary;
 | 
      
         | 4584 |  |  |  
 | 
      
         | 4585 |  |  |       /* When an argument is padded down, the block is aligned to
 | 
      
         | 4586 |  |  |          PARM_BOUNDARY, but the actual argument isn't.  */
 | 
      
         | 4587 |  |  |       if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
 | 
      
         | 4588 |  |  |         {
 | 
      
         | 4589 |  |  |           if (arg->locate.size.var)
 | 
      
         | 4590 |  |  |             parm_align = BITS_PER_UNIT;
 | 
      
         | 4591 |  |  |           else if (excess)
 | 
      
         | 4592 |  |  |             {
 | 
      
         | 4593 |  |  |               unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
 | 
      
         | 4594 |  |  |               parm_align = MIN (parm_align, excess_align);
 | 
      
         | 4595 |  |  |             }
 | 
      
         | 4596 |  |  |         }
 | 
      
         | 4597 |  |  |  
 | 
      
         | 4598 |  |  |       if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
 | 
      
         | 4599 |  |  |         {
 | 
      
         | 4600 |  |  |           /* emit_push_insn might not work properly if arg->value and
 | 
      
         | 4601 |  |  |              argblock + arg->locate.offset areas overlap.  */
 | 
      
         | 4602 |  |  |           rtx x = arg->value;
 | 
      
         | 4603 |  |  |           int i = 0;
 | 
      
         | 4604 |  |  |  
 | 
      
         | 4605 |  |  |           if (XEXP (x, 0) == crtl->args.internal_arg_pointer
 | 
      
         | 4606 |  |  |               || (GET_CODE (XEXP (x, 0)) == PLUS
 | 
      
         | 4607 |  |  |                   && XEXP (XEXP (x, 0), 0) ==
 | 
      
         | 4608 |  |  |                      crtl->args.internal_arg_pointer
 | 
      
         | 4609 |  |  |                   && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
 | 
      
         | 4610 |  |  |             {
 | 
      
         | 4611 |  |  |               if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
 | 
      
         | 4612 |  |  |                 i = INTVAL (XEXP (XEXP (x, 0), 1));
 | 
      
         | 4613 |  |  |  
 | 
      
         | 4614 |  |  |               /* expand_call should ensure this.  */
 | 
      
         | 4615 |  |  |               gcc_assert (!arg->locate.offset.var
 | 
      
         | 4616 |  |  |                           && arg->locate.size.var == 0
 | 
      
         | 4617 |  |  |                           && CONST_INT_P (size_rtx));
 | 
      
         | 4618 |  |  |  
 | 
      
         | 4619 |  |  |               if (arg->locate.offset.constant > i)
 | 
      
         | 4620 |  |  |                 {
 | 
      
         | 4621 |  |  |                   if (arg->locate.offset.constant < i + INTVAL (size_rtx))
 | 
      
         | 4622 |  |  |                     sibcall_failure = 1;
 | 
      
         | 4623 |  |  |                 }
 | 
      
         | 4624 |  |  |               else if (arg->locate.offset.constant < i)
 | 
      
         | 4625 |  |  |                 {
 | 
      
         | 4626 |  |  |                   /* Use arg->locate.size.constant instead of size_rtx
 | 
      
         | 4627 |  |  |                      because we only care about the part of the argument
 | 
      
         | 4628 |  |  |                      on the stack.  */
 | 
      
         | 4629 |  |  |                   if (i < (arg->locate.offset.constant
 | 
      
         | 4630 |  |  |                            + arg->locate.size.constant))
 | 
      
         | 4631 |  |  |                     sibcall_failure = 1;
 | 
      
         | 4632 |  |  |                 }
 | 
      
         | 4633 |  |  |               else
 | 
      
         | 4634 |  |  |                 {
 | 
      
         | 4635 |  |  |                   /* Even though they appear to be at the same location,
 | 
      
         | 4636 |  |  |                      if part of the outgoing argument is in registers,
 | 
      
         | 4637 |  |  |                      they aren't really at the same location.  Check for
 | 
      
         | 4638 |  |  |                      this by making sure that the incoming size is the
 | 
      
         | 4639 |  |  |                      same as the outgoing size.  */
 | 
      
         | 4640 |  |  |                   if (arg->locate.size.constant != INTVAL (size_rtx))
 | 
      
         | 4641 |  |  |                     sibcall_failure = 1;
 | 
      
         | 4642 |  |  |                 }
 | 
      
         | 4643 |  |  |             }
 | 
      
         | 4644 |  |  |         }
 | 
      
         | 4645 |  |  |  
 | 
      
         | 4646 |  |  |       emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
 | 
      
         | 4647 |  |  |                       parm_align, partial, reg, excess, argblock,
 | 
      
         | 4648 |  |  |                       ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
 | 
      
         | 4649 |  |  |                       ARGS_SIZE_RTX (arg->locate.alignment_pad));
 | 
      
         | 4650 |  |  |  
 | 
      
         | 4651 |  |  |       /* Unless this is a partially-in-register argument, the argument is now
 | 
      
         | 4652 |  |  |          in the stack.
 | 
      
         | 4653 |  |  |  
 | 
      
         | 4654 |  |  |          ??? Unlike the case above, in which we want the actual
 | 
      
         | 4655 |  |  |          address of the data, so that we can load it directly into a
 | 
      
         | 4656 |  |  |          register, here we want the address of the stack slot, so that
 | 
      
         | 4657 |  |  |          it's properly aligned for word-by-word copying or something
 | 
      
         | 4658 |  |  |          like that.  It's not clear that this is always correct.  */
 | 
      
         | 4659 |  |  |       if (partial == 0)
 | 
      
         | 4660 |  |  |         arg->value = arg->stack_slot;
 | 
      
         | 4661 |  |  |     }
 | 
      
         | 4662 |  |  |  
 | 
      
         | 4663 |  |  |   if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
 | 
      
         | 4664 |  |  |     {
 | 
      
         | 4665 |  |  |       tree type = TREE_TYPE (arg->tree_value);
 | 
      
         | 4666 |  |  |       arg->parallel_value
 | 
      
         | 4667 |  |  |         = emit_group_load_into_temps (arg->reg, arg->value, type,
 | 
      
         | 4668 |  |  |                                       int_size_in_bytes (type));
 | 
      
         | 4669 |  |  |     }
 | 
      
         | 4670 |  |  |  
 | 
      
         | 4671 |  |  |   /* Mark all slots this store used.  */
 | 
      
         | 4672 |  |  |   if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
 | 
      
         | 4673 |  |  |       && argblock && ! variable_size && arg->stack)
 | 
      
         | 4674 |  |  |     for (i = lower_bound; i < upper_bound; i++)
 | 
      
         | 4675 |  |  |       stack_usage_map[i] = 1;
 | 
      
         | 4676 |  |  |  
 | 
      
         | 4677 |  |  |   /* Once we have pushed something, pops can't safely
 | 
      
         | 4678 |  |  |      be deferred during the rest of the arguments.  */
 | 
      
         | 4679 |  |  |   NO_DEFER_POP;
 | 
      
         | 4680 |  |  |  
 | 
      
         | 4681 |  |  |   /* Free any temporary slots made in processing this argument.  Show
 | 
      
         | 4682 |  |  |      that we might have taken the address of something and pushed that
 | 
      
         | 4683 |  |  |      as an operand.  */
 | 
      
         | 4684 |  |  |   preserve_temp_slots (NULL_RTX);
 | 
      
         | 4685 |  |  |   free_temp_slots ();
 | 
      
         | 4686 |  |  |   pop_temp_slots ();
 | 
      
         | 4687 |  |  |  
 | 
      
         | 4688 |  |  |   return sibcall_failure;
 | 
      
         | 4689 |  |  | }
 | 
      
         | 4690 |  |  |  
 | 
      
         | 4691 |  |  | /* Nonzero if we do not know how to pass TYPE solely in registers.  */
 | 
      
         | 4692 |  |  |  
 | 
      
         | 4693 |  |  | bool
 | 
      
         | 4694 |  |  | must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
 | 
      
         | 4695 |  |  |                              const_tree type)
 | 
      
         | 4696 |  |  | {
 | 
      
         | 4697 |  |  |   if (!type)
 | 
      
         | 4698 |  |  |     return false;
 | 
      
         | 4699 |  |  |  
 | 
      
         | 4700 |  |  |   /* If the type has variable size...  */
 | 
      
         | 4701 |  |  |   if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
 | 
      
         | 4702 |  |  |     return true;
 | 
      
         | 4703 |  |  |  
 | 
      
         | 4704 |  |  |   /* If the type is marked as addressable (it is required
 | 
      
         | 4705 |  |  |      to be constructed into the stack)...  */
 | 
      
         | 4706 |  |  |   if (TREE_ADDRESSABLE (type))
 | 
      
         | 4707 |  |  |     return true;
 | 
      
         | 4708 |  |  |  
 | 
      
         | 4709 |  |  |   return false;
 | 
      
         | 4710 |  |  | }
 | 
      
         | 4711 |  |  |  
 | 
      
         | 4712 |  |  | /* Another version of the TARGET_MUST_PASS_IN_STACK hook.  This one
 | 
      
         | 4713 |  |  |    takes trailing padding of a structure into account.  */
 | 
      
         | 4714 |  |  | /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING.  */
 | 
      
         | 4715 |  |  |  
 | 
      
         | 4716 |  |  | bool
 | 
      
         | 4717 |  |  | must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type)
 | 
      
         | 4718 |  |  | {
 | 
      
         | 4719 |  |  |   if (!type)
 | 
      
         | 4720 |  |  |     return false;
 | 
      
         | 4721 |  |  |  
 | 
      
         | 4722 |  |  |   /* If the type has variable size...  */
 | 
      
         | 4723 |  |  |   if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
 | 
      
         | 4724 |  |  |     return true;
 | 
      
         | 4725 |  |  |  
 | 
      
         | 4726 |  |  |   /* If the type is marked as addressable (it is required
 | 
      
         | 4727 |  |  |      to be constructed into the stack)...  */
 | 
      
         | 4728 |  |  |   if (TREE_ADDRESSABLE (type))
 | 
      
         | 4729 |  |  |     return true;
 | 
      
         | 4730 |  |  |  
 | 
      
         | 4731 |  |  |   /* If the padding and mode of the type is such that a copy into
 | 
      
         | 4732 |  |  |      a register would put it into the wrong part of the register.  */
 | 
      
         | 4733 |  |  |   if (mode == BLKmode
 | 
      
         | 4734 |  |  |       && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
 | 
      
         | 4735 |  |  |       && (FUNCTION_ARG_PADDING (mode, type)
 | 
      
         | 4736 |  |  |           == (BYTES_BIG_ENDIAN ? upward : downward)))
 | 
      
         | 4737 |  |  |     return true;
 | 
      
         | 4738 |  |  |  
 | 
      
         | 4739 |  |  |   return false;
 | 
      
         | 4740 |  |  | }
 |