OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [explow.c] - Blame information for rev 461

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Subroutines for manipulating rtx's in semantically interesting ways.
2
   Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
 
23
#include "config.h"
24
#include "system.h"
25
#include "coretypes.h"
26
#include "tm.h"
27
#include "toplev.h"
28
#include "rtl.h"
29
#include "tree.h"
30
#include "tm_p.h"
31
#include "flags.h"
32
#include "except.h"
33
#include "function.h"
34
#include "expr.h"
35
#include "optabs.h"
36
#include "hard-reg-set.h"
37
#include "insn-config.h"
38
#include "ggc.h"
39
#include "recog.h"
40
#include "langhooks.h"
41
#include "target.h"
42
#include "output.h"
43
 
44
static rtx break_out_memory_refs (rtx);
45
static void emit_stack_probe (rtx);
46
 
47
 
48
/* Truncate and perhaps sign-extend C as appropriate for MODE.  */
49
 
50
HOST_WIDE_INT
51
trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
52
{
53
  int width = GET_MODE_BITSIZE (mode);
54
 
55
  /* You want to truncate to a _what_?  */
56
  gcc_assert (SCALAR_INT_MODE_P (mode));
57
 
58
  /* Canonicalize BImode to 0 and STORE_FLAG_VALUE.  */
59
  if (mode == BImode)
60
    return c & 1 ? STORE_FLAG_VALUE : 0;
61
 
62
  /* Sign-extend for the requested mode.  */
63
 
64
  if (width < HOST_BITS_PER_WIDE_INT)
65
    {
66
      HOST_WIDE_INT sign = 1;
67
      sign <<= width - 1;
68
      c &= (sign << 1) - 1;
69
      c ^= sign;
70
      c -= sign;
71
    }
72
 
73
  return c;
74
}
75
 
76
/* Return an rtx for the sum of X and the integer C.  */
77
 
78
rtx
79
plus_constant (rtx x, HOST_WIDE_INT c)
80
{
81
  RTX_CODE code;
82
  rtx y;
83
  enum machine_mode mode;
84
  rtx tem;
85
  int all_constant = 0;
86
 
87
  if (c == 0)
88
    return x;
89
 
90
 restart:
91
 
92
  code = GET_CODE (x);
93
  mode = GET_MODE (x);
94
  y = x;
95
 
96
  switch (code)
97
    {
98
    case CONST_INT:
99
      return GEN_INT (INTVAL (x) + c);
100
 
101
    case CONST_DOUBLE:
102
      {
103
        unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
104
        HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
105
        unsigned HOST_WIDE_INT l2 = c;
106
        HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
107
        unsigned HOST_WIDE_INT lv;
108
        HOST_WIDE_INT hv;
109
 
110
        add_double (l1, h1, l2, h2, &lv, &hv);
111
 
112
        return immed_double_const (lv, hv, VOIDmode);
113
      }
114
 
115
    case MEM:
116
      /* If this is a reference to the constant pool, try replacing it with
117
         a reference to a new constant.  If the resulting address isn't
118
         valid, don't return it because we have no way to validize it.  */
119
      if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
120
          && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
121
        {
122
          tem
123
            = force_const_mem (GET_MODE (x),
124
                               plus_constant (get_pool_constant (XEXP (x, 0)),
125
                                              c));
126
          if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
127
            return tem;
128
        }
129
      break;
130
 
131
    case CONST:
132
      /* If adding to something entirely constant, set a flag
133
         so that we can add a CONST around the result.  */
134
      x = XEXP (x, 0);
135
      all_constant = 1;
136
      goto restart;
137
 
138
    case SYMBOL_REF:
139
    case LABEL_REF:
140
      all_constant = 1;
141
      break;
142
 
143
    case PLUS:
144
      /* The interesting case is adding the integer to a sum.
145
         Look for constant term in the sum and combine
146
         with C.  For an integer constant term, we make a combined
147
         integer.  For a constant term that is not an explicit integer,
148
         we cannot really combine, but group them together anyway.
149
 
150
         Restart or use a recursive call in case the remaining operand is
151
         something that we handle specially, such as a SYMBOL_REF.
152
 
153
         We may not immediately return from the recursive call here, lest
154
         all_constant gets lost.  */
155
 
156
      if (CONST_INT_P (XEXP (x, 1)))
157
        {
158
          c += INTVAL (XEXP (x, 1));
159
 
160
          if (GET_MODE (x) != VOIDmode)
161
            c = trunc_int_for_mode (c, GET_MODE (x));
162
 
163
          x = XEXP (x, 0);
164
          goto restart;
165
        }
166
      else if (CONSTANT_P (XEXP (x, 1)))
167
        {
168
          x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
169
          c = 0;
170
        }
171
      else if (find_constant_term_loc (&y))
172
        {
173
          /* We need to be careful since X may be shared and we can't
174
             modify it in place.  */
175
          rtx copy = copy_rtx (x);
176
          rtx *const_loc = find_constant_term_loc (&copy);
177
 
178
          *const_loc = plus_constant (*const_loc, c);
179
          x = copy;
180
          c = 0;
181
        }
182
      break;
183
 
184
    default:
185
      break;
186
    }
187
 
188
  if (c != 0)
189
    x = gen_rtx_PLUS (mode, x, GEN_INT (c));
190
 
191
  if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
192
    return x;
193
  else if (all_constant)
194
    return gen_rtx_CONST (mode, x);
195
  else
196
    return x;
197
}
198
 
199
/* If X is a sum, return a new sum like X but lacking any constant terms.
200
   Add all the removed constant terms into *CONSTPTR.
201
   X itself is not altered.  The result != X if and only if
202
   it is not isomorphic to X.  */
203
 
204
rtx
205
eliminate_constant_term (rtx x, rtx *constptr)
206
{
207
  rtx x0, x1;
208
  rtx tem;
209
 
210
  if (GET_CODE (x) != PLUS)
211
    return x;
212
 
213
  /* First handle constants appearing at this level explicitly.  */
214
  if (CONST_INT_P (XEXP (x, 1))
215
      && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
216
                                                XEXP (x, 1)))
217
      && CONST_INT_P (tem))
218
    {
219
      *constptr = tem;
220
      return eliminate_constant_term (XEXP (x, 0), constptr);
221
    }
222
 
223
  tem = const0_rtx;
224
  x0 = eliminate_constant_term (XEXP (x, 0), &tem);
225
  x1 = eliminate_constant_term (XEXP (x, 1), &tem);
226
  if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
227
      && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
228
                                                *constptr, tem))
229
      && CONST_INT_P (tem))
230
    {
231
      *constptr = tem;
232
      return gen_rtx_PLUS (GET_MODE (x), x0, x1);
233
    }
234
 
235
  return x;
236
}
237
 
238
/* Return an rtx for the size in bytes of the value of EXP.  */
239
 
240
rtx
241
expr_size (tree exp)
242
{
243
  tree size;
244
 
245
  if (TREE_CODE (exp) == WITH_SIZE_EXPR)
246
    size = TREE_OPERAND (exp, 1);
247
  else
248
    {
249
      size = tree_expr_size (exp);
250
      gcc_assert (size);
251
      gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
252
    }
253
 
254
  return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
255
}
256
 
257
/* Return a wide integer for the size in bytes of the value of EXP, or -1
258
   if the size can vary or is larger than an integer.  */
259
 
260
HOST_WIDE_INT
261
int_expr_size (tree exp)
262
{
263
  tree size;
264
 
265
  if (TREE_CODE (exp) == WITH_SIZE_EXPR)
266
    size = TREE_OPERAND (exp, 1);
267
  else
268
    {
269
      size = tree_expr_size (exp);
270
      gcc_assert (size);
271
    }
272
 
273
  if (size == 0 || !host_integerp (size, 0))
274
    return -1;
275
 
276
  return tree_low_cst (size, 0);
277
}
278
 
279
/* Return a copy of X in which all memory references
280
   and all constants that involve symbol refs
281
   have been replaced with new temporary registers.
282
   Also emit code to load the memory locations and constants
283
   into those registers.
284
 
285
   If X contains no such constants or memory references,
286
   X itself (not a copy) is returned.
287
 
288
   If a constant is found in the address that is not a legitimate constant
289
   in an insn, it is left alone in the hope that it might be valid in the
290
   address.
291
 
292
   X may contain no arithmetic except addition, subtraction and multiplication.
293
   Values returned by expand_expr with 1 for sum_ok fit this constraint.  */
294
 
295
static rtx
296
break_out_memory_refs (rtx x)
297
{
298
  if (MEM_P (x)
299
      || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
300
          && GET_MODE (x) != VOIDmode))
301
    x = force_reg (GET_MODE (x), x);
302
  else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
303
           || GET_CODE (x) == MULT)
304
    {
305
      rtx op0 = break_out_memory_refs (XEXP (x, 0));
306
      rtx op1 = break_out_memory_refs (XEXP (x, 1));
307
 
308
      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
309
        x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
310
    }
311
 
312
  return x;
313
}
314
 
315
/* Given X, a memory address in address space AS' pointer mode, convert it to
316
   an address in the address space's address mode, or vice versa (TO_MODE says
317
   which way).  We take advantage of the fact that pointers are not allowed to
318
   overflow by commuting arithmetic operations over conversions so that address
319
   arithmetic insns can be used.  */
320
 
321
rtx
322
convert_memory_address_addr_space (enum machine_mode to_mode ATTRIBUTE_UNUSED,
323
                                   rtx x, addr_space_t as ATTRIBUTE_UNUSED)
324
{
325
#ifndef POINTERS_EXTEND_UNSIGNED
326
  gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
327
  return x;
328
#else /* defined(POINTERS_EXTEND_UNSIGNED) */
329
  enum machine_mode pointer_mode, address_mode, from_mode;
330
  rtx temp;
331
  enum rtx_code code;
332
 
333
  /* If X already has the right mode, just return it.  */
334
  if (GET_MODE (x) == to_mode)
335
    return x;
336
 
337
  pointer_mode = targetm.addr_space.pointer_mode (as);
338
  address_mode = targetm.addr_space.address_mode (as);
339
  from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
340
 
341
  /* Here we handle some special cases.  If none of them apply, fall through
342
     to the default case.  */
343
  switch (GET_CODE (x))
344
    {
345
    case CONST_INT:
346
    case CONST_DOUBLE:
347
      if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
348
        code = TRUNCATE;
349
      else if (POINTERS_EXTEND_UNSIGNED < 0)
350
        break;
351
      else if (POINTERS_EXTEND_UNSIGNED > 0)
352
        code = ZERO_EXTEND;
353
      else
354
        code = SIGN_EXTEND;
355
      temp = simplify_unary_operation (code, to_mode, x, from_mode);
356
      if (temp)
357
        return temp;
358
      break;
359
 
360
    case SUBREG:
361
      if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
362
          && GET_MODE (SUBREG_REG (x)) == to_mode)
363
        return SUBREG_REG (x);
364
      break;
365
 
366
    case LABEL_REF:
367
      temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
368
      LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
369
      return temp;
370
      break;
371
 
372
    case SYMBOL_REF:
373
      temp = shallow_copy_rtx (x);
374
      PUT_MODE (temp, to_mode);
375
      return temp;
376
      break;
377
 
378
    case CONST:
379
      return gen_rtx_CONST (to_mode,
380
                            convert_memory_address_addr_space
381
                              (to_mode, XEXP (x, 0), as));
382
      break;
383
 
384
    case PLUS:
385
    case MULT:
386
      /* For addition we can safely permute the conversion and addition
387
         operation if one operand is a constant and converting the constant
388
         does not change it or if one operand is a constant and we are
389
         using a ptr_extend instruction  (POINTERS_EXTEND_UNSIGNED < 0).
390
         We can always safely permute them if we are making the address
391
         narrower.  */
392
      if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
393
          || (GET_CODE (x) == PLUS
394
              && CONST_INT_P (XEXP (x, 1))
395
              && (XEXP (x, 1) == convert_memory_address_addr_space
396
                                   (to_mode, XEXP (x, 1), as)
397
                 || POINTERS_EXTEND_UNSIGNED < 0)))
398
        return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
399
                               convert_memory_address_addr_space
400
                                 (to_mode, XEXP (x, 0), as),
401
                               XEXP (x, 1));
402
      break;
403
 
404
    default:
405
      break;
406
    }
407
 
408
  return convert_modes (to_mode, from_mode,
409
                        x, POINTERS_EXTEND_UNSIGNED);
410
#endif /* defined(POINTERS_EXTEND_UNSIGNED) */
411
}
412
 
413
/* Return something equivalent to X but valid as a memory address for something
414
   of mode MODE in the named address space AS.  When X is not itself valid,
415
   this works by copying X or subexpressions of it into registers.  */
416
 
417
rtx
418
memory_address_addr_space (enum machine_mode mode, rtx x, addr_space_t as)
419
{
420
  rtx oldx = x;
421
  enum machine_mode address_mode = targetm.addr_space.address_mode (as);
422
 
423
  x = convert_memory_address_addr_space (address_mode, x, as);
424
 
425
  /* By passing constant addresses through registers
426
     we get a chance to cse them.  */
427
  if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
428
    x = force_reg (address_mode, x);
429
 
430
  /* We get better cse by rejecting indirect addressing at this stage.
431
     Let the combiner create indirect addresses where appropriate.
432
     For now, generate the code so that the subexpressions useful to share
433
     are visible.  But not if cse won't be done!  */
434
  else
435
    {
436
      if (! cse_not_expected && !REG_P (x))
437
        x = break_out_memory_refs (x);
438
 
439
      /* At this point, any valid address is accepted.  */
440
      if (memory_address_addr_space_p (mode, x, as))
441
        goto done;
442
 
443
      /* If it was valid before but breaking out memory refs invalidated it,
444
         use it the old way.  */
445
      if (memory_address_addr_space_p (mode, oldx, as))
446
        {
447
          x = oldx;
448
          goto done;
449
        }
450
 
451
      /* Perform machine-dependent transformations on X
452
         in certain cases.  This is not necessary since the code
453
         below can handle all possible cases, but machine-dependent
454
         transformations can make better code.  */
455
      {
456
        rtx orig_x = x;
457
        x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
458
        if (orig_x != x && memory_address_addr_space_p (mode, x, as))
459
          goto done;
460
      }
461
 
462
      /* PLUS and MULT can appear in special ways
463
         as the result of attempts to make an address usable for indexing.
464
         Usually they are dealt with by calling force_operand, below.
465
         But a sum containing constant terms is special
466
         if removing them makes the sum a valid address:
467
         then we generate that address in a register
468
         and index off of it.  We do this because it often makes
469
         shorter code, and because the addresses thus generated
470
         in registers often become common subexpressions.  */
471
      if (GET_CODE (x) == PLUS)
472
        {
473
          rtx constant_term = const0_rtx;
474
          rtx y = eliminate_constant_term (x, &constant_term);
475
          if (constant_term == const0_rtx
476
              || ! memory_address_addr_space_p (mode, y, as))
477
            x = force_operand (x, NULL_RTX);
478
          else
479
            {
480
              y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
481
              if (! memory_address_addr_space_p (mode, y, as))
482
                x = force_operand (x, NULL_RTX);
483
              else
484
                x = y;
485
            }
486
        }
487
 
488
      else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
489
        x = force_operand (x, NULL_RTX);
490
 
491
      /* If we have a register that's an invalid address,
492
         it must be a hard reg of the wrong class.  Copy it to a pseudo.  */
493
      else if (REG_P (x))
494
        x = copy_to_reg (x);
495
 
496
      /* Last resort: copy the value to a register, since
497
         the register is a valid address.  */
498
      else
499
        x = force_reg (address_mode, x);
500
    }
501
 
502
 done:
503
 
504
  gcc_assert (memory_address_addr_space_p (mode, x, as));
505
  /* If we didn't change the address, we are done.  Otherwise, mark
506
     a reg as a pointer if we have REG or REG + CONST_INT.  */
507
  if (oldx == x)
508
    return x;
509
  else if (REG_P (x))
510
    mark_reg_pointer (x, BITS_PER_UNIT);
511
  else if (GET_CODE (x) == PLUS
512
           && REG_P (XEXP (x, 0))
513
           && CONST_INT_P (XEXP (x, 1)))
514
    mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
515
 
516
  /* OLDX may have been the address on a temporary.  Update the address
517
     to indicate that X is now used.  */
518
  update_temp_slot_address (oldx, x);
519
 
520
  return x;
521
}
522
 
523
/* Convert a mem ref into one with a valid memory address.
524
   Pass through anything else unchanged.  */
525
 
526
rtx
527
validize_mem (rtx ref)
528
{
529
  if (!MEM_P (ref))
530
    return ref;
531
  ref = use_anchored_address (ref);
532
  if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
533
                                   MEM_ADDR_SPACE (ref)))
534
    return ref;
535
 
536
  /* Don't alter REF itself, since that is probably a stack slot.  */
537
  return replace_equiv_address (ref, XEXP (ref, 0));
538
}
539
 
540
/* If X is a memory reference to a member of an object block, try rewriting
541
   it to use an anchor instead.  Return the new memory reference on success
542
   and the old one on failure.  */
543
 
544
rtx
545
use_anchored_address (rtx x)
546
{
547
  rtx base;
548
  HOST_WIDE_INT offset;
549
 
550
  if (!flag_section_anchors)
551
    return x;
552
 
553
  if (!MEM_P (x))
554
    return x;
555
 
556
  /* Split the address into a base and offset.  */
557
  base = XEXP (x, 0);
558
  offset = 0;
559
  if (GET_CODE (base) == CONST
560
      && GET_CODE (XEXP (base, 0)) == PLUS
561
      && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
562
    {
563
      offset += INTVAL (XEXP (XEXP (base, 0), 1));
564
      base = XEXP (XEXP (base, 0), 0);
565
    }
566
 
567
  /* Check whether BASE is suitable for anchors.  */
568
  if (GET_CODE (base) != SYMBOL_REF
569
      || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
570
      || SYMBOL_REF_ANCHOR_P (base)
571
      || SYMBOL_REF_BLOCK (base) == NULL
572
      || !targetm.use_anchors_for_symbol_p (base))
573
    return x;
574
 
575
  /* Decide where BASE is going to be.  */
576
  place_block_symbol (base);
577
 
578
  /* Get the anchor we need to use.  */
579
  offset += SYMBOL_REF_BLOCK_OFFSET (base);
580
  base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
581
                             SYMBOL_REF_TLS_MODEL (base));
582
 
583
  /* Work out the offset from the anchor.  */
584
  offset -= SYMBOL_REF_BLOCK_OFFSET (base);
585
 
586
  /* If we're going to run a CSE pass, force the anchor into a register.
587
     We will then be able to reuse registers for several accesses, if the
588
     target costs say that that's worthwhile.  */
589
  if (!cse_not_expected)
590
    base = force_reg (GET_MODE (base), base);
591
 
592
  return replace_equiv_address (x, plus_constant (base, offset));
593
}
594
 
595
/* Copy the value or contents of X to a new temp reg and return that reg.  */
596
 
597
rtx
598
copy_to_reg (rtx x)
599
{
600
  rtx temp = gen_reg_rtx (GET_MODE (x));
601
 
602
  /* If not an operand, must be an address with PLUS and MULT so
603
     do the computation.  */
604
  if (! general_operand (x, VOIDmode))
605
    x = force_operand (x, temp);
606
 
607
  if (x != temp)
608
    emit_move_insn (temp, x);
609
 
610
  return temp;
611
}
612
 
613
/* Like copy_to_reg but always give the new register mode Pmode
614
   in case X is a constant.  */
615
 
616
rtx
617
copy_addr_to_reg (rtx x)
618
{
619
  return copy_to_mode_reg (Pmode, x);
620
}
621
 
622
/* Like copy_to_reg but always give the new register mode MODE
623
   in case X is a constant.  */
624
 
625
rtx
626
copy_to_mode_reg (enum machine_mode mode, rtx x)
627
{
628
  rtx temp = gen_reg_rtx (mode);
629
 
630
  /* If not an operand, must be an address with PLUS and MULT so
631
     do the computation.  */
632
  if (! general_operand (x, VOIDmode))
633
    x = force_operand (x, temp);
634
 
635
  gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
636
  if (x != temp)
637
    emit_move_insn (temp, x);
638
  return temp;
639
}
640
 
641
/* Load X into a register if it is not already one.
642
   Use mode MODE for the register.
643
   X should be valid for mode MODE, but it may be a constant which
644
   is valid for all integer modes; that's why caller must specify MODE.
645
 
646
   The caller must not alter the value in the register we return,
647
   since we mark it as a "constant" register.  */
648
 
649
rtx
650
force_reg (enum machine_mode mode, rtx x)
651
{
652
  rtx temp, insn, set;
653
 
654
  if (REG_P (x))
655
    return x;
656
 
657
  if (general_operand (x, mode))
658
    {
659
      temp = gen_reg_rtx (mode);
660
      insn = emit_move_insn (temp, x);
661
    }
662
  else
663
    {
664
      temp = force_operand (x, NULL_RTX);
665
      if (REG_P (temp))
666
        insn = get_last_insn ();
667
      else
668
        {
669
          rtx temp2 = gen_reg_rtx (mode);
670
          insn = emit_move_insn (temp2, temp);
671
          temp = temp2;
672
        }
673
    }
674
 
675
  /* Let optimizers know that TEMP's value never changes
676
     and that X can be substituted for it.  Don't get confused
677
     if INSN set something else (such as a SUBREG of TEMP).  */
678
  if (CONSTANT_P (x)
679
      && (set = single_set (insn)) != 0
680
      && SET_DEST (set) == temp
681
      && ! rtx_equal_p (x, SET_SRC (set)))
682
    set_unique_reg_note (insn, REG_EQUAL, x);
683
 
684
  /* Let optimizers know that TEMP is a pointer, and if so, the
685
     known alignment of that pointer.  */
686
  {
687
    unsigned align = 0;
688
    if (GET_CODE (x) == SYMBOL_REF)
689
      {
690
        align = BITS_PER_UNIT;
691
        if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
692
          align = DECL_ALIGN (SYMBOL_REF_DECL (x));
693
      }
694
    else if (GET_CODE (x) == LABEL_REF)
695
      align = BITS_PER_UNIT;
696
    else if (GET_CODE (x) == CONST
697
             && GET_CODE (XEXP (x, 0)) == PLUS
698
             && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
699
             && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
700
      {
701
        rtx s = XEXP (XEXP (x, 0), 0);
702
        rtx c = XEXP (XEXP (x, 0), 1);
703
        unsigned sa, ca;
704
 
705
        sa = BITS_PER_UNIT;
706
        if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
707
          sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
708
 
709
        ca = exact_log2 (INTVAL (c) & -INTVAL (c)) * BITS_PER_UNIT;
710
 
711
        align = MIN (sa, ca);
712
      }
713
 
714
    if (align || (MEM_P (x) && MEM_POINTER (x)))
715
      mark_reg_pointer (temp, align);
716
  }
717
 
718
  return temp;
719
}
720
 
721
/* If X is a memory ref, copy its contents to a new temp reg and return
722
   that reg.  Otherwise, return X.  */
723
 
724
rtx
725
force_not_mem (rtx x)
726
{
727
  rtx temp;
728
 
729
  if (!MEM_P (x) || GET_MODE (x) == BLKmode)
730
    return x;
731
 
732
  temp = gen_reg_rtx (GET_MODE (x));
733
 
734
  if (MEM_POINTER (x))
735
    REG_POINTER (temp) = 1;
736
 
737
  emit_move_insn (temp, x);
738
  return temp;
739
}
740
 
741
/* Copy X to TARGET (if it's nonzero and a reg)
742
   or to a new temp reg and return that reg.
743
   MODE is the mode to use for X in case it is a constant.  */
744
 
745
rtx
746
copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
747
{
748
  rtx temp;
749
 
750
  if (target && REG_P (target))
751
    temp = target;
752
  else
753
    temp = gen_reg_rtx (mode);
754
 
755
  emit_move_insn (temp, x);
756
  return temp;
757
}
758
 
759
/* Return the mode to use to pass or return a scalar of TYPE and MODE.
760
   PUNSIGNEDP points to the signedness of the type and may be adjusted
761
   to show what signedness to use on extension operations.
762
 
763
   FOR_RETURN is nonzero if the caller is promoting the return value
764
   of FNDECL, else it is for promoting args.  */
765
 
766
enum machine_mode
767
promote_function_mode (const_tree type, enum machine_mode mode, int *punsignedp,
768
                       const_tree funtype, int for_return)
769
{
770
  switch (TREE_CODE (type))
771
    {
772
    case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
773
    case REAL_TYPE:      case OFFSET_TYPE:     case FIXED_POINT_TYPE:
774
    case POINTER_TYPE:   case REFERENCE_TYPE:
775
      return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
776
                                                  for_return);
777
 
778
    default:
779
      return mode;
780
    }
781
}
782
/* Return the mode to use to store a scalar of TYPE and MODE.
783
   PUNSIGNEDP points to the signedness of the type and may be adjusted
784
   to show what signedness to use on extension operations.  */
785
 
786
enum machine_mode
787
promote_mode (const_tree type ATTRIBUTE_UNUSED, enum machine_mode mode,
788
              int *punsignedp ATTRIBUTE_UNUSED)
789
{
790
  /* FIXME: this is the same logic that was there until GCC 4.4, but we
791
     probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
792
     is not defined.  The affected targets are M32C, S390, SPARC.  */
793
#ifdef PROMOTE_MODE
794
  const enum tree_code code = TREE_CODE (type);
795
  int unsignedp = *punsignedp;
796
 
797
  switch (code)
798
    {
799
    case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
800
    case REAL_TYPE:      case OFFSET_TYPE:     case FIXED_POINT_TYPE:
801
      PROMOTE_MODE (mode, unsignedp, type);
802
      *punsignedp = unsignedp;
803
      return mode;
804
      break;
805
 
806
#ifdef POINTERS_EXTEND_UNSIGNED
807
    case REFERENCE_TYPE:
808
    case POINTER_TYPE:
809
      *punsignedp = POINTERS_EXTEND_UNSIGNED;
810
      return targetm.addr_space.address_mode
811
               (TYPE_ADDR_SPACE (TREE_TYPE (type)));
812
      break;
813
#endif
814
 
815
    default:
816
      return mode;
817
    }
818
#else
819
  return mode;
820
#endif
821
}
822
 
823
 
824
/* Use one of promote_mode or promote_function_mode to find the promoted
825
   mode of DECL.  If PUNSIGNEDP is not NULL, store there the unsignedness
826
   of DECL after promotion.  */
827
 
828
enum machine_mode
829
promote_decl_mode (const_tree decl, int *punsignedp)
830
{
831
  tree type = TREE_TYPE (decl);
832
  int unsignedp = TYPE_UNSIGNED (type);
833
  enum machine_mode mode = DECL_MODE (decl);
834
  enum machine_mode pmode;
835
 
836
  if (TREE_CODE (decl) == RESULT_DECL
837
      || TREE_CODE (decl) == PARM_DECL)
838
    pmode = promote_function_mode (type, mode, &unsignedp,
839
                                   TREE_TYPE (current_function_decl), 2);
840
  else
841
    pmode = promote_mode (type, mode, &unsignedp);
842
 
843
  if (punsignedp)
844
    *punsignedp = unsignedp;
845
  return pmode;
846
}
847
 
848
 
849
/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
850
   This pops when ADJUST is positive.  ADJUST need not be constant.  */
851
 
852
void
853
adjust_stack (rtx adjust)
854
{
855
  rtx temp;
856
 
857
  if (adjust == const0_rtx)
858
    return;
859
 
860
  /* We expect all variable sized adjustments to be multiple of
861
     PREFERRED_STACK_BOUNDARY.  */
862
  if (CONST_INT_P (adjust))
863
    stack_pointer_delta -= INTVAL (adjust);
864
 
865
  temp = expand_binop (Pmode,
866
#ifdef STACK_GROWS_DOWNWARD
867
                       add_optab,
868
#else
869
                       sub_optab,
870
#endif
871
                       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
872
                       OPTAB_LIB_WIDEN);
873
 
874
  if (temp != stack_pointer_rtx)
875
    emit_move_insn (stack_pointer_rtx, temp);
876
}
877
 
878
/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
879
   This pushes when ADJUST is positive.  ADJUST need not be constant.  */
880
 
881
void
882
anti_adjust_stack (rtx adjust)
883
{
884
  rtx temp;
885
 
886
  if (adjust == const0_rtx)
887
    return;
888
 
889
  /* We expect all variable sized adjustments to be multiple of
890
     PREFERRED_STACK_BOUNDARY.  */
891
  if (CONST_INT_P (adjust))
892
    stack_pointer_delta += INTVAL (adjust);
893
 
894
  temp = expand_binop (Pmode,
895
#ifdef STACK_GROWS_DOWNWARD
896
                       sub_optab,
897
#else
898
                       add_optab,
899
#endif
900
                       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
901
                       OPTAB_LIB_WIDEN);
902
 
903
  if (temp != stack_pointer_rtx)
904
    emit_move_insn (stack_pointer_rtx, temp);
905
}
906
 
907
/* Round the size of a block to be pushed up to the boundary required
908
   by this machine.  SIZE is the desired size, which need not be constant.  */
909
 
910
static rtx
911
round_push (rtx size)
912
{
913
  int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
914
 
915
  if (align == 1)
916
    return size;
917
 
918
  if (CONST_INT_P (size))
919
    {
920
      HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
921
 
922
      if (INTVAL (size) != new_size)
923
        size = GEN_INT (new_size);
924
    }
925
  else
926
    {
927
      /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
928
         but we know it can't.  So add ourselves and then do
929
         TRUNC_DIV_EXPR.  */
930
      size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
931
                           NULL_RTX, 1, OPTAB_LIB_WIDEN);
932
      size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
933
                            NULL_RTX, 1);
934
      size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
935
    }
936
 
937
  return size;
938
}
939
 
940
/* Save the stack pointer for the purpose in SAVE_LEVEL.  PSAVE is a pointer
941
   to a previously-created save area.  If no save area has been allocated,
942
   this function will allocate one.  If a save area is specified, it
943
   must be of the proper mode.
944
 
945
   The insns are emitted after insn AFTER, if nonzero, otherwise the insns
946
   are emitted at the current position.  */
947
 
948
void
949
emit_stack_save (enum save_level save_level, rtx *psave, rtx after)
950
{
951
  rtx sa = *psave;
952
  /* The default is that we use a move insn and save in a Pmode object.  */
953
  rtx (*fcn) (rtx, rtx) = gen_move_insn;
954
  enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
955
 
956
  /* See if this machine has anything special to do for this kind of save.  */
957
  switch (save_level)
958
    {
959
#ifdef HAVE_save_stack_block
960
    case SAVE_BLOCK:
961
      if (HAVE_save_stack_block)
962
        fcn = gen_save_stack_block;
963
      break;
964
#endif
965
#ifdef HAVE_save_stack_function
966
    case SAVE_FUNCTION:
967
      if (HAVE_save_stack_function)
968
        fcn = gen_save_stack_function;
969
      break;
970
#endif
971
#ifdef HAVE_save_stack_nonlocal
972
    case SAVE_NONLOCAL:
973
      if (HAVE_save_stack_nonlocal)
974
        fcn = gen_save_stack_nonlocal;
975
      break;
976
#endif
977
    default:
978
      break;
979
    }
980
 
981
  /* If there is no save area and we have to allocate one, do so.  Otherwise
982
     verify the save area is the proper mode.  */
983
 
984
  if (sa == 0)
985
    {
986
      if (mode != VOIDmode)
987
        {
988
          if (save_level == SAVE_NONLOCAL)
989
            *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
990
          else
991
            *psave = sa = gen_reg_rtx (mode);
992
        }
993
    }
994
 
995
  if (after)
996
    {
997
      rtx seq;
998
 
999
      start_sequence ();
1000
      do_pending_stack_adjust ();
1001
      /* We must validize inside the sequence, to ensure that any instructions
1002
         created by the validize call also get moved to the right place.  */
1003
      if (sa != 0)
1004
        sa = validize_mem (sa);
1005
      emit_insn (fcn (sa, stack_pointer_rtx));
1006
      seq = get_insns ();
1007
      end_sequence ();
1008
      emit_insn_after (seq, after);
1009
    }
1010
  else
1011
    {
1012
      do_pending_stack_adjust ();
1013
      if (sa != 0)
1014
        sa = validize_mem (sa);
1015
      emit_insn (fcn (sa, stack_pointer_rtx));
1016
    }
1017
}
1018
 
1019
/* Restore the stack pointer for the purpose in SAVE_LEVEL.  SA is the save
1020
   area made by emit_stack_save.  If it is zero, we have nothing to do.
1021
 
1022
   Put any emitted insns after insn AFTER, if nonzero, otherwise at
1023
   current position.  */
1024
 
1025
void
1026
emit_stack_restore (enum save_level save_level, rtx sa, rtx after)
1027
{
1028
  /* The default is that we use a move insn.  */
1029
  rtx (*fcn) (rtx, rtx) = gen_move_insn;
1030
 
1031
  /* See if this machine has anything special to do for this kind of save.  */
1032
  switch (save_level)
1033
    {
1034
#ifdef HAVE_restore_stack_block
1035
    case SAVE_BLOCK:
1036
      if (HAVE_restore_stack_block)
1037
        fcn = gen_restore_stack_block;
1038
      break;
1039
#endif
1040
#ifdef HAVE_restore_stack_function
1041
    case SAVE_FUNCTION:
1042
      if (HAVE_restore_stack_function)
1043
        fcn = gen_restore_stack_function;
1044
      break;
1045
#endif
1046
#ifdef HAVE_restore_stack_nonlocal
1047
    case SAVE_NONLOCAL:
1048
      if (HAVE_restore_stack_nonlocal)
1049
        fcn = gen_restore_stack_nonlocal;
1050
      break;
1051
#endif
1052
    default:
1053
      break;
1054
    }
1055
 
1056
  if (sa != 0)
1057
    {
1058
      sa = validize_mem (sa);
1059
      /* These clobbers prevent the scheduler from moving
1060
         references to variable arrays below the code
1061
         that deletes (pops) the arrays.  */
1062
      emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1063
      emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
1064
    }
1065
 
1066
  discard_pending_stack_adjust ();
1067
 
1068
  if (after)
1069
    {
1070
      rtx seq;
1071
 
1072
      start_sequence ();
1073
      emit_insn (fcn (stack_pointer_rtx, sa));
1074
      seq = get_insns ();
1075
      end_sequence ();
1076
      emit_insn_after (seq, after);
1077
    }
1078
  else
1079
    emit_insn (fcn (stack_pointer_rtx, sa));
1080
}
1081
 
1082
/* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1083
   function.  This function should be called whenever we allocate or
1084
   deallocate dynamic stack space.  */
1085
 
1086
void
1087
update_nonlocal_goto_save_area (void)
1088
{
1089
  tree t_save;
1090
  rtx r_save;
1091
 
1092
  /* The nonlocal_goto_save_area object is an array of N pointers.  The
1093
     first one is used for the frame pointer save; the rest are sized by
1094
     STACK_SAVEAREA_MODE.  Create a reference to array index 1, the first
1095
     of the stack save area slots.  */
1096
  t_save = build4 (ARRAY_REF, ptr_type_node, cfun->nonlocal_goto_save_area,
1097
                   integer_one_node, NULL_TREE, NULL_TREE);
1098
  r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1099
 
1100
  emit_stack_save (SAVE_NONLOCAL, &r_save, NULL_RTX);
1101
}
1102
 
1103
/* Return an rtx representing the address of an area of memory dynamically
1104
   pushed on the stack.  This region of memory is always aligned to
1105
   a multiple of BIGGEST_ALIGNMENT.
1106
 
1107
   Any required stack pointer alignment is preserved.
1108
 
1109
   SIZE is an rtx representing the size of the area.
1110
   TARGET is a place in which the address can be placed.
1111
 
1112
   KNOWN_ALIGN is the alignment (in bits) that we know SIZE has.  */
1113
 
1114
rtx
1115
allocate_dynamic_stack_space (rtx size, rtx target, int known_align)
1116
{
1117
  /* If we're asking for zero bytes, it doesn't matter what we point
1118
     to since we can't dereference it.  But return a reasonable
1119
     address anyway.  */
1120
  if (size == const0_rtx)
1121
    return virtual_stack_dynamic_rtx;
1122
 
1123
  /* Otherwise, show we're calling alloca or equivalent.  */
1124
  cfun->calls_alloca = 1;
1125
 
1126
  /* Ensure the size is in the proper mode.  */
1127
  if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1128
    size = convert_to_mode (Pmode, size, 1);
1129
 
1130
  /* We can't attempt to minimize alignment necessary, because we don't
1131
     know the final value of preferred_stack_boundary yet while executing
1132
     this code.  */
1133
  crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1134
 
1135
  /* We will need to ensure that the address we return is aligned to
1136
     BIGGEST_ALIGNMENT.  If STACK_DYNAMIC_OFFSET is defined, we don't
1137
     always know its final value at this point in the compilation (it
1138
     might depend on the size of the outgoing parameter lists, for
1139
     example), so we must align the value to be returned in that case.
1140
     (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1141
     STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1142
     We must also do an alignment operation on the returned value if
1143
     the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1144
 
1145
     If we have to align, we must leave space in SIZE for the hole
1146
     that might result from the alignment operation.  */
1147
 
1148
#if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1149
#define MUST_ALIGN 1
1150
#else
1151
#define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
1152
#endif
1153
 
1154
  if (MUST_ALIGN)
1155
    size
1156
      = force_operand (plus_constant (size,
1157
                                      BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1158
                       NULL_RTX);
1159
 
1160
#ifdef SETJMP_VIA_SAVE_AREA
1161
  /* If setjmp restores regs from a save area in the stack frame,
1162
     avoid clobbering the reg save area.  Note that the offset of
1163
     virtual_incoming_args_rtx includes the preallocated stack args space.
1164
     It would be no problem to clobber that, but it's on the wrong side
1165
     of the old save area.
1166
 
1167
     What used to happen is that, since we did not know for sure
1168
     whether setjmp() was invoked until after RTL generation, we
1169
     would use reg notes to store the "optimized" size and fix things
1170
     up later.  These days we know this information before we ever
1171
     start building RTL so the reg notes are unnecessary.  */
1172
  if (!cfun->calls_setjmp)
1173
    {
1174
      int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1175
 
1176
      /* ??? Code below assumes that the save area needs maximal
1177
         alignment.  This constraint may be too strong.  */
1178
      gcc_assert (PREFERRED_STACK_BOUNDARY == BIGGEST_ALIGNMENT);
1179
 
1180
      if (CONST_INT_P (size))
1181
        {
1182
          HOST_WIDE_INT new_size = INTVAL (size) / align * align;
1183
 
1184
          if (INTVAL (size) != new_size)
1185
            size = GEN_INT (new_size);
1186
        }
1187
      else
1188
        {
1189
          /* Since we know overflow is not possible, we avoid using
1190
             CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead.  */
1191
          size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1192
                                GEN_INT (align), NULL_RTX, 1);
1193
          size = expand_mult (Pmode, size,
1194
                              GEN_INT (align), NULL_RTX, 1);
1195
        }
1196
    }
1197
  else
1198
    {
1199
      rtx dynamic_offset
1200
        = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1201
                        stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1202
 
1203
      size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1204
                           NULL_RTX, 1, OPTAB_LIB_WIDEN);
1205
    }
1206
#endif /* SETJMP_VIA_SAVE_AREA */
1207
 
1208
  /* Round the size to a multiple of the required stack alignment.
1209
     Since the stack if presumed to be rounded before this allocation,
1210
     this will maintain the required alignment.
1211
 
1212
     If the stack grows downward, we could save an insn by subtracting
1213
     SIZE from the stack pointer and then aligning the stack pointer.
1214
     The problem with this is that the stack pointer may be unaligned
1215
     between the execution of the subtraction and alignment insns and
1216
     some machines do not allow this.  Even on those that do, some
1217
     signal handlers malfunction if a signal should occur between those
1218
     insns.  Since this is an extremely rare event, we have no reliable
1219
     way of knowing which systems have this problem.  So we avoid even
1220
     momentarily mis-aligning the stack.  */
1221
 
1222
  /* If we added a variable amount to SIZE,
1223
     we can no longer assume it is aligned.  */
1224
#if !defined (SETJMP_VIA_SAVE_AREA)
1225
  if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
1226
#endif
1227
    size = round_push (size);
1228
 
1229
  do_pending_stack_adjust ();
1230
 
1231
 /* We ought to be called always on the toplevel and stack ought to be aligned
1232
    properly.  */
1233
  gcc_assert (!(stack_pointer_delta
1234
                % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1235
 
1236
  /* If needed, check that we have the required amount of stack.  Take into
1237
     account what has already been checked.  */
1238
  if (STACK_CHECK_MOVING_SP)
1239
    ;
1240
  else if (flag_stack_check == GENERIC_STACK_CHECK)
1241
    probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1242
                       size);
1243
  else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1244
    probe_stack_range (STACK_CHECK_PROTECT, size);
1245
 
1246
  /* Don't use a TARGET that isn't a pseudo or is the wrong mode.  */
1247
  if (target == 0 || !REG_P (target)
1248
      || REGNO (target) < FIRST_PSEUDO_REGISTER
1249
      || GET_MODE (target) != Pmode)
1250
    target = gen_reg_rtx (Pmode);
1251
 
1252
  mark_reg_pointer (target, known_align);
1253
 
1254
  /* Perform the required allocation from the stack.  Some systems do
1255
     this differently than simply incrementing/decrementing from the
1256
     stack pointer, such as acquiring the space by calling malloc().  */
1257
#ifdef HAVE_allocate_stack
1258
  if (HAVE_allocate_stack)
1259
    {
1260
      enum machine_mode mode = STACK_SIZE_MODE;
1261
      insn_operand_predicate_fn pred;
1262
 
1263
      /* We don't have to check against the predicate for operand 0 since
1264
         TARGET is known to be a pseudo of the proper mode, which must
1265
         be valid for the operand.  For operand 1, convert to the
1266
         proper mode and validate.  */
1267
      if (mode == VOIDmode)
1268
        mode = insn_data[(int) CODE_FOR_allocate_stack].operand[1].mode;
1269
 
1270
      pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
1271
      if (pred && ! ((*pred) (size, mode)))
1272
        size = copy_to_mode_reg (mode, convert_to_mode (mode, size, 1));
1273
 
1274
      emit_insn (gen_allocate_stack (target, size));
1275
    }
1276
  else
1277
#endif
1278
    {
1279
#ifndef STACK_GROWS_DOWNWARD
1280
      emit_move_insn (target, virtual_stack_dynamic_rtx);
1281
#endif
1282
 
1283
      /* Check stack bounds if necessary.  */
1284
      if (crtl->limit_stack)
1285
        {
1286
          rtx available;
1287
          rtx space_available = gen_label_rtx ();
1288
#ifdef STACK_GROWS_DOWNWARD
1289
          available = expand_binop (Pmode, sub_optab,
1290
                                    stack_pointer_rtx, stack_limit_rtx,
1291
                                    NULL_RTX, 1, OPTAB_WIDEN);
1292
#else
1293
          available = expand_binop (Pmode, sub_optab,
1294
                                    stack_limit_rtx, stack_pointer_rtx,
1295
                                    NULL_RTX, 1, OPTAB_WIDEN);
1296
#endif
1297
          emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1298
                                   space_available);
1299
#ifdef HAVE_trap
1300
          if (HAVE_trap)
1301
            emit_insn (gen_trap ());
1302
          else
1303
#endif
1304
            error ("stack limits not supported on this target");
1305
          emit_barrier ();
1306
          emit_label (space_available);
1307
        }
1308
 
1309
      if (flag_stack_check && STACK_CHECK_MOVING_SP)
1310
        anti_adjust_stack_and_probe (size, false);
1311
      else
1312
        anti_adjust_stack (size);
1313
 
1314
#ifdef STACK_GROWS_DOWNWARD
1315
      emit_move_insn (target, virtual_stack_dynamic_rtx);
1316
#endif
1317
    }
1318
 
1319
  if (MUST_ALIGN)
1320
    {
1321
      /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1322
         but we know it can't.  So add ourselves and then do
1323
         TRUNC_DIV_EXPR.  */
1324
      target = expand_binop (Pmode, add_optab, target,
1325
                             GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1326
                             NULL_RTX, 1, OPTAB_LIB_WIDEN);
1327
      target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1328
                              GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1329
                              NULL_RTX, 1);
1330
      target = expand_mult (Pmode, target,
1331
                            GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1332
                            NULL_RTX, 1);
1333
    }
1334
 
1335
  /* Record the new stack level for nonlocal gotos.  */
1336
  if (cfun->nonlocal_goto_save_area != 0)
1337
    update_nonlocal_goto_save_area ();
1338
 
1339
  return target;
1340
}
1341
 
1342
/* A front end may want to override GCC's stack checking by providing a
1343
   run-time routine to call to check the stack, so provide a mechanism for
1344
   calling that routine.  */
1345
 
1346
static GTY(()) rtx stack_check_libfunc;
1347
 
1348
void
1349
set_stack_check_libfunc (rtx libfunc)
1350
{
1351
  stack_check_libfunc = libfunc;
1352
}
1353
 
1354
/* Emit one stack probe at ADDRESS, an address within the stack.  */
1355
 
1356
static void
1357
emit_stack_probe (rtx address)
1358
{
1359
  rtx memref = gen_rtx_MEM (word_mode, address);
1360
 
1361
  MEM_VOLATILE_P (memref) = 1;
1362
 
1363
  /* See if we have an insn to probe the stack.  */
1364
#ifdef HAVE_probe_stack
1365
  if (HAVE_probe_stack)
1366
    emit_insn (gen_probe_stack (memref));
1367
  else
1368
#endif
1369
    emit_move_insn (memref, const0_rtx);
1370
}
1371
 
1372
/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1373
   FIRST is a constant and size is a Pmode RTX.  These are offsets from
1374
   the current stack pointer.  STACK_GROWS_DOWNWARD says whether to add
1375
   or subtract them from the stack pointer.  */
1376
 
1377
#define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
1378
 
1379
#ifdef STACK_GROWS_DOWNWARD
1380
#define STACK_GROW_OP MINUS
1381
#define STACK_GROW_OPTAB sub_optab
1382
#define STACK_GROW_OFF(off) -(off)
1383
#else
1384
#define STACK_GROW_OP PLUS
1385
#define STACK_GROW_OPTAB add_optab
1386
#define STACK_GROW_OFF(off) (off)
1387
#endif
1388
 
1389
void
1390
probe_stack_range (HOST_WIDE_INT first, rtx size)
1391
{
1392
  /* First ensure SIZE is Pmode.  */
1393
  if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1394
    size = convert_to_mode (Pmode, size, 1);
1395
 
1396
  /* Next see if we have a function to check the stack.  */
1397
  if (stack_check_libfunc)
1398
    {
1399
      rtx addr = memory_address (Pmode,
1400
                                 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1401
                                                 stack_pointer_rtx,
1402
                                                 plus_constant (size, first)));
1403
      emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1404
                         Pmode);
1405
    }
1406
 
1407
  /* Next see if we have an insn to check the stack.  */
1408
#ifdef HAVE_check_stack
1409
  else if (HAVE_check_stack)
1410
    {
1411
      rtx addr = memory_address (Pmode,
1412
                                 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1413
                                                 stack_pointer_rtx,
1414
                                                 plus_constant (size, first)));
1415
      insn_operand_predicate_fn pred
1416
        = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
1417
      if (pred && !((*pred) (addr, Pmode)))
1418
        addr = copy_to_mode_reg (Pmode, addr);
1419
 
1420
      emit_insn (gen_check_stack (addr));
1421
    }
1422
#endif
1423
 
1424
  /* Otherwise we have to generate explicit probes.  If we have a constant
1425
     small number of them to generate, that's the easy case.  */
1426
  else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1427
    {
1428
      HOST_WIDE_INT isize = INTVAL (size), i;
1429
      rtx addr;
1430
 
1431
      /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1432
         it exceeds SIZE.  If only one probe is needed, this will not
1433
         generate any code.  Then probe at FIRST + SIZE.  */
1434
      for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1435
        {
1436
          addr = memory_address (Pmode,
1437
                                 plus_constant (stack_pointer_rtx,
1438
                                                STACK_GROW_OFF (first + i)));
1439
          emit_stack_probe (addr);
1440
        }
1441
 
1442
      addr = memory_address (Pmode,
1443
                             plus_constant (stack_pointer_rtx,
1444
                                            STACK_GROW_OFF (first + isize)));
1445
      emit_stack_probe (addr);
1446
    }
1447
 
1448
  /* In the variable case, do the same as above, but in a loop.  Note that we
1449
     must be extra careful with variables wrapping around because we might be
1450
     at the very top (or the very bottom) of the address space and we have to
1451
     be able to handle this case properly; in particular, we use an equality
1452
     test for the loop condition.  */
1453
  else
1454
    {
1455
      rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
1456
      rtx loop_lab = gen_label_rtx ();
1457
      rtx end_lab = gen_label_rtx ();
1458
 
1459
 
1460
      /* Step 1: round SIZE to the previous multiple of the interval.  */
1461
 
1462
      /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL  */
1463
      rounded_size
1464
        = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
1465
      rounded_size_op = force_operand (rounded_size, NULL_RTX);
1466
 
1467
 
1468
      /* Step 2: compute initial and final value of the loop counter.  */
1469
 
1470
      /* TEST_ADDR = SP + FIRST.  */
1471
      test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1472
                                                 stack_pointer_rtx,
1473
                                                 GEN_INT (first)), NULL_RTX);
1474
 
1475
      /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE.  */
1476
      last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1477
                                                 test_addr,
1478
                                                 rounded_size_op), NULL_RTX);
1479
 
1480
 
1481
      /* Step 3: the loop
1482
 
1483
         while (TEST_ADDR != LAST_ADDR)
1484
           {
1485
             TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1486
             probe at TEST_ADDR
1487
           }
1488
 
1489
         probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1490
         until it is equal to ROUNDED_SIZE.  */
1491
 
1492
      emit_label (loop_lab);
1493
 
1494
      /* Jump to END_LAB if TEST_ADDR == LAST_ADDR.  */
1495
      emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1496
                               end_lab);
1497
 
1498
      /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL.  */
1499
      temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
1500
                           GEN_INT (PROBE_INTERVAL), test_addr,
1501
                           1, OPTAB_WIDEN);
1502
 
1503
      gcc_assert (temp == test_addr);
1504
 
1505
      /* Probe at TEST_ADDR.  */
1506
      emit_stack_probe (test_addr);
1507
 
1508
      emit_jump (loop_lab);
1509
 
1510
      emit_label (end_lab);
1511
 
1512
 
1513
      /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1514
         that SIZE is equal to ROUNDED_SIZE.  */
1515
 
1516
      /* TEMP = SIZE - ROUNDED_SIZE.  */
1517
      temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1518
      if (temp != const0_rtx)
1519
        {
1520
          rtx addr;
1521
 
1522
          if (GET_CODE (temp) == CONST_INT)
1523
            {
1524
              /* Use [base + disp} addressing mode if supported.  */
1525
              HOST_WIDE_INT offset = INTVAL (temp);
1526
              addr = memory_address (Pmode,
1527
                                     plus_constant (last_addr,
1528
                                                    STACK_GROW_OFF (offset)));
1529
            }
1530
          else
1531
            {
1532
              /* Manual CSE if the difference is not known at compile-time.  */
1533
              temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1534
              addr = memory_address (Pmode,
1535
                                     gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1536
                                                     last_addr, temp));
1537
            }
1538
 
1539
          emit_stack_probe (addr);
1540
        }
1541
    }
1542
}
1543
 
1544
/* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1545
   while probing it.  This pushes when SIZE is positive.  SIZE need not
1546
   be constant.  If ADJUST_BACK is true, adjust back the stack pointer
1547
   by plus SIZE at the end.  */
1548
 
1549
void
1550
anti_adjust_stack_and_probe (rtx size, bool adjust_back)
1551
{
1552
  /* We skip the probe for the first interval + a small dope of 4 words and
1553
     probe that many bytes past the specified size to maintain a protection
1554
     area at the botton of the stack.  */
1555
  const int dope = 4 * UNITS_PER_WORD;
1556
 
1557
  /* First ensure SIZE is Pmode.  */
1558
  if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1559
    size = convert_to_mode (Pmode, size, 1);
1560
 
1561
  /* If we have a constant small number of probes to generate, that's the
1562
     easy case.  */
1563
  if (GET_CODE (size) == CONST_INT && INTVAL (size) < 7 * PROBE_INTERVAL)
1564
    {
1565
      HOST_WIDE_INT isize = INTVAL (size), i;
1566
      bool first_probe = true;
1567
 
1568
      /* Adjust SP and probe to PROBE_INTERVAL + N * PROBE_INTERVAL for
1569
         values of N from 1 until it exceeds SIZE.  If only one probe is
1570
         needed, this will not generate any code.  Then adjust and probe
1571
         to PROBE_INTERVAL + SIZE.  */
1572
      for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1573
        {
1574
          if (first_probe)
1575
            {
1576
              anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
1577
              first_probe = false;
1578
            }
1579
          else
1580
            anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1581
          emit_stack_probe (stack_pointer_rtx);
1582
        }
1583
 
1584
      if (first_probe)
1585
        anti_adjust_stack (plus_constant (size, PROBE_INTERVAL + dope));
1586
      else
1587
        anti_adjust_stack (plus_constant (size, PROBE_INTERVAL - i));
1588
      emit_stack_probe (stack_pointer_rtx);
1589
    }
1590
 
1591
  /* In the variable case, do the same as above, but in a loop.  Note that we
1592
     must be extra careful with variables wrapping around because we might be
1593
     at the very top (or the very bottom) of the address space and we have to
1594
     be able to handle this case properly; in particular, we use an equality
1595
     test for the loop condition.  */
1596
  else
1597
    {
1598
      rtx rounded_size, rounded_size_op, last_addr, temp;
1599
      rtx loop_lab = gen_label_rtx ();
1600
      rtx end_lab = gen_label_rtx ();
1601
 
1602
 
1603
      /* Step 1: round SIZE to the previous multiple of the interval.  */
1604
 
1605
      /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL  */
1606
      rounded_size
1607
        = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
1608
      rounded_size_op = force_operand (rounded_size, NULL_RTX);
1609
 
1610
 
1611
      /* Step 2: compute initial and final value of the loop counter.  */
1612
 
1613
      /* SP = SP_0 + PROBE_INTERVAL.  */
1614
      anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1615
 
1616
      /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE.  */
1617
      last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1618
                                                 stack_pointer_rtx,
1619
                                                 rounded_size_op), NULL_RTX);
1620
 
1621
 
1622
      /* Step 3: the loop
1623
 
1624
          while (SP != LAST_ADDR)
1625
            {
1626
              SP = SP + PROBE_INTERVAL
1627
              probe at SP
1628
            }
1629
 
1630
         adjusts SP and probes to PROBE_INTERVAL + N * PROBE_INTERVAL for
1631
         values of N from 1 until it is equal to ROUNDED_SIZE.  */
1632
 
1633
      emit_label (loop_lab);
1634
 
1635
      /* Jump to END_LAB if SP == LAST_ADDR.  */
1636
      emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1637
                               Pmode, 1, end_lab);
1638
 
1639
      /* SP = SP + PROBE_INTERVAL and probe at SP.  */
1640
      anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1641
      emit_stack_probe (stack_pointer_rtx);
1642
 
1643
      emit_jump (loop_lab);
1644
 
1645
      emit_label (end_lab);
1646
 
1647
 
1648
      /* Step 4: adjust SP and probe to PROBE_INTERVAL + SIZE if we cannot
1649
         assert at compile-time that SIZE is equal to ROUNDED_SIZE.  */
1650
 
1651
      /* TEMP = SIZE - ROUNDED_SIZE.  */
1652
      temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1653
      if (temp != const0_rtx)
1654
        {
1655
          /* Manual CSE if the difference is not known at compile-time.  */
1656
          if (GET_CODE (temp) != CONST_INT)
1657
            temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1658
          anti_adjust_stack (temp);
1659
          emit_stack_probe (stack_pointer_rtx);
1660
        }
1661
    }
1662
 
1663
  /* Adjust back and account for the additional first interval.  */
1664
  if (adjust_back)
1665
    adjust_stack (plus_constant (size, PROBE_INTERVAL + dope));
1666
  else
1667
    adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1668
}
1669
 
1670
/* Return an rtx representing the register or memory location
1671
   in which a scalar value of data type VALTYPE
1672
   was returned by a function call to function FUNC.
1673
   FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1674
   function is known, otherwise 0.
1675
   OUTGOING is 1 if on a machine with register windows this function
1676
   should return the register in which the function will put its result
1677
   and 0 otherwise.  */
1678
 
1679
rtx
1680
hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
1681
                     int outgoing ATTRIBUTE_UNUSED)
1682
{
1683
  rtx val;
1684
 
1685
  val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
1686
 
1687
  if (REG_P (val)
1688
      && GET_MODE (val) == BLKmode)
1689
    {
1690
      unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1691
      enum machine_mode tmpmode;
1692
 
1693
      /* int_size_in_bytes can return -1.  We don't need a check here
1694
         since the value of bytes will then be large enough that no
1695
         mode will match anyway.  */
1696
 
1697
      for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1698
           tmpmode != VOIDmode;
1699
           tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1700
        {
1701
          /* Have we found a large enough mode?  */
1702
          if (GET_MODE_SIZE (tmpmode) >= bytes)
1703
            break;
1704
        }
1705
 
1706
      /* No suitable mode found.  */
1707
      gcc_assert (tmpmode != VOIDmode);
1708
 
1709
      PUT_MODE (val, tmpmode);
1710
    }
1711
  return val;
1712
}
1713
 
1714
/* Return an rtx representing the register or memory location
1715
   in which a scalar value of mode MODE was returned by a library call.  */
1716
 
1717
rtx
1718
hard_libcall_value (enum machine_mode mode, rtx fun)
1719
{
1720
  return targetm.calls.libcall_value (mode, fun);
1721
}
1722
 
1723
/* Look up the tree code for a given rtx code
1724
   to provide the arithmetic operation for REAL_ARITHMETIC.
1725
   The function returns an int because the caller may not know
1726
   what `enum tree_code' means.  */
1727
 
1728
int
1729
rtx_to_tree_code (enum rtx_code code)
1730
{
1731
  enum tree_code tcode;
1732
 
1733
  switch (code)
1734
    {
1735
    case PLUS:
1736
      tcode = PLUS_EXPR;
1737
      break;
1738
    case MINUS:
1739
      tcode = MINUS_EXPR;
1740
      break;
1741
    case MULT:
1742
      tcode = MULT_EXPR;
1743
      break;
1744
    case DIV:
1745
      tcode = RDIV_EXPR;
1746
      break;
1747
    case SMIN:
1748
      tcode = MIN_EXPR;
1749
      break;
1750
    case SMAX:
1751
      tcode = MAX_EXPR;
1752
      break;
1753
    default:
1754
      tcode = LAST_AND_UNUSED_TREE_CODE;
1755
      break;
1756
    }
1757
  return ((int) tcode);
1758
}
1759
 
1760
#include "gt-explow.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.