OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.2.2/] [gcc/] [explow.c] - Blame information for rev 299

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Subroutines for manipulating rtx's in semantically interesting ways.
2
   Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
 
23
#include "config.h"
24
#include "system.h"
25
#include "coretypes.h"
26
#include "tm.h"
27
#include "toplev.h"
28
#include "rtl.h"
29
#include "tree.h"
30
#include "tm_p.h"
31
#include "flags.h"
32
#include "function.h"
33
#include "expr.h"
34
#include "optabs.h"
35
#include "hard-reg-set.h"
36
#include "insn-config.h"
37
#include "ggc.h"
38
#include "recog.h"
39
#include "langhooks.h"
40
#include "target.h"
41
#include "output.h"
42
 
43
static rtx break_out_memory_refs (rtx);
44
static void emit_stack_probe (rtx);
45
 
46
 
47
/* Truncate and perhaps sign-extend C as appropriate for MODE.  */
48
 
49
HOST_WIDE_INT
50
trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
51
{
52
  int width = GET_MODE_BITSIZE (mode);
53
 
54
  /* You want to truncate to a _what_?  */
55
  gcc_assert (SCALAR_INT_MODE_P (mode));
56
 
57
  /* Canonicalize BImode to 0 and STORE_FLAG_VALUE.  */
58
  if (mode == BImode)
59
    return c & 1 ? STORE_FLAG_VALUE : 0;
60
 
61
  /* Sign-extend for the requested mode.  */
62
 
63
  if (width < HOST_BITS_PER_WIDE_INT)
64
    {
65
      HOST_WIDE_INT sign = 1;
66
      sign <<= width - 1;
67
      c &= (sign << 1) - 1;
68
      c ^= sign;
69
      c -= sign;
70
    }
71
 
72
  return c;
73
}
74
 
75
/* Return an rtx for the sum of X and the integer C.  */
76
 
77
rtx
78
plus_constant (rtx x, HOST_WIDE_INT c)
79
{
80
  RTX_CODE code;
81
  rtx y;
82
  enum machine_mode mode;
83
  rtx tem;
84
  int all_constant = 0;
85
 
86
  if (c == 0)
87
    return x;
88
 
89
 restart:
90
 
91
  code = GET_CODE (x);
92
  mode = GET_MODE (x);
93
  y = x;
94
 
95
  switch (code)
96
    {
97
    case CONST_INT:
98
      return GEN_INT (INTVAL (x) + c);
99
 
100
    case CONST_DOUBLE:
101
      {
102
        unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
103
        HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
104
        unsigned HOST_WIDE_INT l2 = c;
105
        HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
106
        unsigned HOST_WIDE_INT lv;
107
        HOST_WIDE_INT hv;
108
 
109
        add_double (l1, h1, l2, h2, &lv, &hv);
110
 
111
        return immed_double_const (lv, hv, VOIDmode);
112
      }
113
 
114
    case MEM:
115
      /* If this is a reference to the constant pool, try replacing it with
116
         a reference to a new constant.  If the resulting address isn't
117
         valid, don't return it because we have no way to validize it.  */
118
      if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
119
          && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
120
        {
121
          tem
122
            = force_const_mem (GET_MODE (x),
123
                               plus_constant (get_pool_constant (XEXP (x, 0)),
124
                                              c));
125
          if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
126
            return tem;
127
        }
128
      break;
129
 
130
    case CONST:
131
      /* If adding to something entirely constant, set a flag
132
         so that we can add a CONST around the result.  */
133
      x = XEXP (x, 0);
134
      all_constant = 1;
135
      goto restart;
136
 
137
    case SYMBOL_REF:
138
    case LABEL_REF:
139
      all_constant = 1;
140
      break;
141
 
142
    case PLUS:
143
      /* The interesting case is adding the integer to a sum.
144
         Look for constant term in the sum and combine
145
         with C.  For an integer constant term, we make a combined
146
         integer.  For a constant term that is not an explicit integer,
147
         we cannot really combine, but group them together anyway.
148
 
149
         Restart or use a recursive call in case the remaining operand is
150
         something that we handle specially, such as a SYMBOL_REF.
151
 
152
         We may not immediately return from the recursive call here, lest
153
         all_constant gets lost.  */
154
 
155
      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
156
        {
157
          c += INTVAL (XEXP (x, 1));
158
 
159
          if (GET_MODE (x) != VOIDmode)
160
            c = trunc_int_for_mode (c, GET_MODE (x));
161
 
162
          x = XEXP (x, 0);
163
          goto restart;
164
        }
165
      else if (CONSTANT_P (XEXP (x, 1)))
166
        {
167
          x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
168
          c = 0;
169
        }
170
      else if (find_constant_term_loc (&y))
171
        {
172
          /* We need to be careful since X may be shared and we can't
173
             modify it in place.  */
174
          rtx copy = copy_rtx (x);
175
          rtx *const_loc = find_constant_term_loc (&copy);
176
 
177
          *const_loc = plus_constant (*const_loc, c);
178
          x = copy;
179
          c = 0;
180
        }
181
      break;
182
 
183
    default:
184
      break;
185
    }
186
 
187
  if (c != 0)
188
    x = gen_rtx_PLUS (mode, x, GEN_INT (c));
189
 
190
  if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
191
    return x;
192
  else if (all_constant)
193
    return gen_rtx_CONST (mode, x);
194
  else
195
    return x;
196
}
197
 
198
/* If X is a sum, return a new sum like X but lacking any constant terms.
199
   Add all the removed constant terms into *CONSTPTR.
200
   X itself is not altered.  The result != X if and only if
201
   it is not isomorphic to X.  */
202
 
203
rtx
204
eliminate_constant_term (rtx x, rtx *constptr)
205
{
206
  rtx x0, x1;
207
  rtx tem;
208
 
209
  if (GET_CODE (x) != PLUS)
210
    return x;
211
 
212
  /* First handle constants appearing at this level explicitly.  */
213
  if (GET_CODE (XEXP (x, 1)) == CONST_INT
214
      && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
215
                                                XEXP (x, 1)))
216
      && GET_CODE (tem) == CONST_INT)
217
    {
218
      *constptr = tem;
219
      return eliminate_constant_term (XEXP (x, 0), constptr);
220
    }
221
 
222
  tem = const0_rtx;
223
  x0 = eliminate_constant_term (XEXP (x, 0), &tem);
224
  x1 = eliminate_constant_term (XEXP (x, 1), &tem);
225
  if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
226
      && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
227
                                                *constptr, tem))
228
      && GET_CODE (tem) == CONST_INT)
229
    {
230
      *constptr = tem;
231
      return gen_rtx_PLUS (GET_MODE (x), x0, x1);
232
    }
233
 
234
  return x;
235
}
236
 
237
/* Return an rtx for the size in bytes of the value of EXP.  */
238
 
239
rtx
240
expr_size (tree exp)
241
{
242
  tree size;
243
 
244
  if (TREE_CODE (exp) == WITH_SIZE_EXPR)
245
    size = TREE_OPERAND (exp, 1);
246
  else
247
    size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lang_hooks.expr_size (exp), exp);
248
 
249
  return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), 0);
250
}
251
 
252
/* Return a wide integer for the size in bytes of the value of EXP, or -1
253
   if the size can vary or is larger than an integer.  */
254
 
255
HOST_WIDE_INT
256
int_expr_size (tree exp)
257
{
258
  tree size;
259
 
260
  if (TREE_CODE (exp) == WITH_SIZE_EXPR)
261
    size = TREE_OPERAND (exp, 1);
262
  else
263
    size = lang_hooks.expr_size (exp);
264
 
265
  if (size == 0 || !host_integerp (size, 0))
266
    return -1;
267
 
268
  return tree_low_cst (size, 0);
269
}
270
 
271
/* Return a copy of X in which all memory references
272
   and all constants that involve symbol refs
273
   have been replaced with new temporary registers.
274
   Also emit code to load the memory locations and constants
275
   into those registers.
276
 
277
   If X contains no such constants or memory references,
278
   X itself (not a copy) is returned.
279
 
280
   If a constant is found in the address that is not a legitimate constant
281
   in an insn, it is left alone in the hope that it might be valid in the
282
   address.
283
 
284
   X may contain no arithmetic except addition, subtraction and multiplication.
285
   Values returned by expand_expr with 1 for sum_ok fit this constraint.  */
286
 
287
static rtx
288
break_out_memory_refs (rtx x)
289
{
290
  if (MEM_P (x)
291
      || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
292
          && GET_MODE (x) != VOIDmode))
293
    x = force_reg (GET_MODE (x), x);
294
  else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
295
           || GET_CODE (x) == MULT)
296
    {
297
      rtx op0 = break_out_memory_refs (XEXP (x, 0));
298
      rtx op1 = break_out_memory_refs (XEXP (x, 1));
299
 
300
      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
301
        x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
302
    }
303
 
304
  return x;
305
}
306
 
307
/* Given X, a memory address in ptr_mode, convert it to an address
308
   in Pmode, or vice versa (TO_MODE says which way).  We take advantage of
309
   the fact that pointers are not allowed to overflow by commuting arithmetic
310
   operations over conversions so that address arithmetic insns can be
311
   used.  */
312
 
313
rtx
314
convert_memory_address (enum machine_mode to_mode ATTRIBUTE_UNUSED,
315
                        rtx x)
316
{
317
#ifndef POINTERS_EXTEND_UNSIGNED
318
  gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
319
  return x;
320
#else /* defined(POINTERS_EXTEND_UNSIGNED) */
321
  enum machine_mode from_mode;
322
  rtx temp;
323
  enum rtx_code code;
324
 
325
  /* If X already has the right mode, just return it.  */
326
  if (GET_MODE (x) == to_mode)
327
    return x;
328
 
329
  from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
330
 
331
  /* Here we handle some special cases.  If none of them apply, fall through
332
     to the default case.  */
333
  switch (GET_CODE (x))
334
    {
335
    case CONST_INT:
336
    case CONST_DOUBLE:
337
      if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
338
        code = TRUNCATE;
339
      else if (POINTERS_EXTEND_UNSIGNED < 0)
340
        break;
341
      else if (POINTERS_EXTEND_UNSIGNED > 0)
342
        code = ZERO_EXTEND;
343
      else
344
        code = SIGN_EXTEND;
345
      temp = simplify_unary_operation (code, to_mode, x, from_mode);
346
      if (temp)
347
        return temp;
348
      break;
349
 
350
    case SUBREG:
351
      if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
352
          && GET_MODE (SUBREG_REG (x)) == to_mode)
353
        return SUBREG_REG (x);
354
      break;
355
 
356
    case LABEL_REF:
357
      temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
358
      LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
359
      return temp;
360
      break;
361
 
362
    case SYMBOL_REF:
363
      temp = shallow_copy_rtx (x);
364
      PUT_MODE (temp, to_mode);
365
      return temp;
366
      break;
367
 
368
    case CONST:
369
      return gen_rtx_CONST (to_mode,
370
                            convert_memory_address (to_mode, XEXP (x, 0)));
371
      break;
372
 
373
    case PLUS:
374
    case MULT:
375
      /* For addition we can safely permute the conversion and addition
376
         operation if one operand is a constant and converting the constant
377
         does not change it.  We can always safely permute them if we are
378
         making the address narrower.  */
379
      if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
380
          || (GET_CODE (x) == PLUS
381
              && GET_CODE (XEXP (x, 1)) == CONST_INT
382
              && XEXP (x, 1) == convert_memory_address (to_mode, XEXP (x, 1))))
383
        return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
384
                               convert_memory_address (to_mode, XEXP (x, 0)),
385
                               XEXP (x, 1));
386
      break;
387
 
388
    default:
389
      break;
390
    }
391
 
392
  return convert_modes (to_mode, from_mode,
393
                        x, POINTERS_EXTEND_UNSIGNED);
394
#endif /* defined(POINTERS_EXTEND_UNSIGNED) */
395
}
396
 
397
/* Return something equivalent to X but valid as a memory address
398
   for something of mode MODE.  When X is not itself valid, this
399
   works by copying X or subexpressions of it into registers.  */
400
 
401
rtx
402
memory_address (enum machine_mode mode, rtx x)
403
{
404
  rtx oldx = x;
405
 
406
  x = convert_memory_address (Pmode, x);
407
 
408
  /* By passing constant addresses through registers
409
     we get a chance to cse them.  */
410
  if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
411
    x = force_reg (Pmode, x);
412
 
413
  /* We get better cse by rejecting indirect addressing at this stage.
414
     Let the combiner create indirect addresses where appropriate.
415
     For now, generate the code so that the subexpressions useful to share
416
     are visible.  But not if cse won't be done!  */
417
  else
418
    {
419
      if (! cse_not_expected && !REG_P (x))
420
        x = break_out_memory_refs (x);
421
 
422
      /* At this point, any valid address is accepted.  */
423
      if (memory_address_p (mode, x))
424
        goto win;
425
 
426
      /* If it was valid before but breaking out memory refs invalidated it,
427
         use it the old way.  */
428
      if (memory_address_p (mode, oldx))
429
        goto win2;
430
 
431
      /* Perform machine-dependent transformations on X
432
         in certain cases.  This is not necessary since the code
433
         below can handle all possible cases, but machine-dependent
434
         transformations can make better code.  */
435
      LEGITIMIZE_ADDRESS (x, oldx, mode, win);
436
 
437
      /* PLUS and MULT can appear in special ways
438
         as the result of attempts to make an address usable for indexing.
439
         Usually they are dealt with by calling force_operand, below.
440
         But a sum containing constant terms is special
441
         if removing them makes the sum a valid address:
442
         then we generate that address in a register
443
         and index off of it.  We do this because it often makes
444
         shorter code, and because the addresses thus generated
445
         in registers often become common subexpressions.  */
446
      if (GET_CODE (x) == PLUS)
447
        {
448
          rtx constant_term = const0_rtx;
449
          rtx y = eliminate_constant_term (x, &constant_term);
450
          if (constant_term == const0_rtx
451
              || ! memory_address_p (mode, y))
452
            x = force_operand (x, NULL_RTX);
453
          else
454
            {
455
              y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
456
              if (! memory_address_p (mode, y))
457
                x = force_operand (x, NULL_RTX);
458
              else
459
                x = y;
460
            }
461
        }
462
 
463
      else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
464
        x = force_operand (x, NULL_RTX);
465
 
466
      /* If we have a register that's an invalid address,
467
         it must be a hard reg of the wrong class.  Copy it to a pseudo.  */
468
      else if (REG_P (x))
469
        x = copy_to_reg (x);
470
 
471
      /* Last resort: copy the value to a register, since
472
         the register is a valid address.  */
473
      else
474
        x = force_reg (Pmode, x);
475
 
476
      goto done;
477
 
478
    win2:
479
      x = oldx;
480
    win:
481
      if (flag_force_addr && ! cse_not_expected && !REG_P (x))
482
        {
483
          x = force_operand (x, NULL_RTX);
484
          x = force_reg (Pmode, x);
485
        }
486
    }
487
 
488
 done:
489
 
490
  /* If we didn't change the address, we are done.  Otherwise, mark
491
     a reg as a pointer if we have REG or REG + CONST_INT.  */
492
  if (oldx == x)
493
    return x;
494
  else if (REG_P (x))
495
    mark_reg_pointer (x, BITS_PER_UNIT);
496
  else if (GET_CODE (x) == PLUS
497
           && REG_P (XEXP (x, 0))
498
           && GET_CODE (XEXP (x, 1)) == CONST_INT)
499
    mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
500
 
501
  /* OLDX may have been the address on a temporary.  Update the address
502
     to indicate that X is now used.  */
503
  update_temp_slot_address (oldx, x);
504
 
505
  return x;
506
}
507
 
508
/* Like `memory_address' but pretend `flag_force_addr' is 0.  */
509
 
510
rtx
511
memory_address_noforce (enum machine_mode mode, rtx x)
512
{
513
  int ambient_force_addr = flag_force_addr;
514
  rtx val;
515
 
516
  flag_force_addr = 0;
517
  val = memory_address (mode, x);
518
  flag_force_addr = ambient_force_addr;
519
  return val;
520
}
521
 
522
/* Convert a mem ref into one with a valid memory address.
523
   Pass through anything else unchanged.  */
524
 
525
rtx
526
validize_mem (rtx ref)
527
{
528
  if (!MEM_P (ref))
529
    return ref;
530
  ref = use_anchored_address (ref);
531
  if (! (flag_force_addr && CONSTANT_ADDRESS_P (XEXP (ref, 0)))
532
      && memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
533
    return ref;
534
 
535
  /* Don't alter REF itself, since that is probably a stack slot.  */
536
  return replace_equiv_address (ref, XEXP (ref, 0));
537
}
538
 
539
/* If X is a memory reference to a member of an object block, try rewriting
540
   it to use an anchor instead.  Return the new memory reference on success
541
   and the old one on failure.  */
542
 
543
rtx
544
use_anchored_address (rtx x)
545
{
546
  rtx base;
547
  HOST_WIDE_INT offset;
548
 
549
  if (!flag_section_anchors)
550
    return x;
551
 
552
  if (!MEM_P (x))
553
    return x;
554
 
555
  /* Split the address into a base and offset.  */
556
  base = XEXP (x, 0);
557
  offset = 0;
558
  if (GET_CODE (base) == CONST
559
      && GET_CODE (XEXP (base, 0)) == PLUS
560
      && GET_CODE (XEXP (XEXP (base, 0), 1)) == CONST_INT)
561
    {
562
      offset += INTVAL (XEXP (XEXP (base, 0), 1));
563
      base = XEXP (XEXP (base, 0), 0);
564
    }
565
 
566
  /* Check whether BASE is suitable for anchors.  */
567
  if (GET_CODE (base) != SYMBOL_REF
568
      || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
569
      || SYMBOL_REF_ANCHOR_P (base)
570
      || SYMBOL_REF_BLOCK (base) == NULL
571
      || !targetm.use_anchors_for_symbol_p (base))
572
    return x;
573
 
574
  /* Decide where BASE is going to be.  */
575
  place_block_symbol (base);
576
 
577
  /* Get the anchor we need to use.  */
578
  offset += SYMBOL_REF_BLOCK_OFFSET (base);
579
  base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
580
                             SYMBOL_REF_TLS_MODEL (base));
581
 
582
  /* Work out the offset from the anchor.  */
583
  offset -= SYMBOL_REF_BLOCK_OFFSET (base);
584
 
585
  /* If we're going to run a CSE pass, force the anchor into a register.
586
     We will then be able to reuse registers for several accesses, if the
587
     target costs say that that's worthwhile.  */
588
  if (!cse_not_expected)
589
    base = force_reg (GET_MODE (base), base);
590
 
591
  return replace_equiv_address (x, plus_constant (base, offset));
592
}
593
 
594
/* Copy the value or contents of X to a new temp reg and return that reg.  */
595
 
596
rtx
597
copy_to_reg (rtx x)
598
{
599
  rtx temp = gen_reg_rtx (GET_MODE (x));
600
 
601
  /* If not an operand, must be an address with PLUS and MULT so
602
     do the computation.  */
603
  if (! general_operand (x, VOIDmode))
604
    x = force_operand (x, temp);
605
 
606
  if (x != temp)
607
    emit_move_insn (temp, x);
608
 
609
  return temp;
610
}
611
 
612
/* Like copy_to_reg but always give the new register mode Pmode
613
   in case X is a constant.  */
614
 
615
rtx
616
copy_addr_to_reg (rtx x)
617
{
618
  return copy_to_mode_reg (Pmode, x);
619
}
620
 
621
/* Like copy_to_reg but always give the new register mode MODE
622
   in case X is a constant.  */
623
 
624
rtx
625
copy_to_mode_reg (enum machine_mode mode, rtx x)
626
{
627
  rtx temp = gen_reg_rtx (mode);
628
 
629
  /* If not an operand, must be an address with PLUS and MULT so
630
     do the computation.  */
631
  if (! general_operand (x, VOIDmode))
632
    x = force_operand (x, temp);
633
 
634
  gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
635
  if (x != temp)
636
    emit_move_insn (temp, x);
637
  return temp;
638
}
639
 
640
/* Load X into a register if it is not already one.
641
   Use mode MODE for the register.
642
   X should be valid for mode MODE, but it may be a constant which
643
   is valid for all integer modes; that's why caller must specify MODE.
644
 
645
   The caller must not alter the value in the register we return,
646
   since we mark it as a "constant" register.  */
647
 
648
rtx
649
force_reg (enum machine_mode mode, rtx x)
650
{
651
  rtx temp, insn, set;
652
 
653
  if (REG_P (x))
654
    return x;
655
 
656
  if (general_operand (x, mode))
657
    {
658
      temp = gen_reg_rtx (mode);
659
      insn = emit_move_insn (temp, x);
660
    }
661
  else
662
    {
663
      temp = force_operand (x, NULL_RTX);
664
      if (REG_P (temp))
665
        insn = get_last_insn ();
666
      else
667
        {
668
          rtx temp2 = gen_reg_rtx (mode);
669
          insn = emit_move_insn (temp2, temp);
670
          temp = temp2;
671
        }
672
    }
673
 
674
  /* Let optimizers know that TEMP's value never changes
675
     and that X can be substituted for it.  Don't get confused
676
     if INSN set something else (such as a SUBREG of TEMP).  */
677
  if (CONSTANT_P (x)
678
      && (set = single_set (insn)) != 0
679
      && SET_DEST (set) == temp
680
      && ! rtx_equal_p (x, SET_SRC (set)))
681
    set_unique_reg_note (insn, REG_EQUAL, x);
682
 
683
  /* Let optimizers know that TEMP is a pointer, and if so, the
684
     known alignment of that pointer.  */
685
  {
686
    unsigned align = 0;
687
    if (GET_CODE (x) == SYMBOL_REF)
688
      {
689
        align = BITS_PER_UNIT;
690
        if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
691
          align = DECL_ALIGN (SYMBOL_REF_DECL (x));
692
      }
693
    else if (GET_CODE (x) == LABEL_REF)
694
      align = BITS_PER_UNIT;
695
    else if (GET_CODE (x) == CONST
696
             && GET_CODE (XEXP (x, 0)) == PLUS
697
             && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
698
             && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
699
      {
700
        rtx s = XEXP (XEXP (x, 0), 0);
701
        rtx c = XEXP (XEXP (x, 0), 1);
702
        unsigned sa, ca;
703
 
704
        sa = BITS_PER_UNIT;
705
        if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
706
          sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
707
 
708
        ca = exact_log2 (INTVAL (c) & -INTVAL (c)) * BITS_PER_UNIT;
709
 
710
        align = MIN (sa, ca);
711
      }
712
    else if (MEM_P (x) && MEM_POINTER (x))
713
      align = MEM_ALIGN (x);
714
 
715
    if (align)
716
      mark_reg_pointer (temp, align);
717
  }
718
 
719
  return temp;
720
}
721
 
722
/* If X is a memory ref, copy its contents to a new temp reg and return
723
   that reg.  Otherwise, return X.  */
724
 
725
rtx
726
force_not_mem (rtx x)
727
{
728
  rtx temp;
729
 
730
  if (!MEM_P (x) || GET_MODE (x) == BLKmode)
731
    return x;
732
 
733
  temp = gen_reg_rtx (GET_MODE (x));
734
 
735
  if (MEM_POINTER (x))
736
    REG_POINTER (temp) = 1;
737
 
738
  emit_move_insn (temp, x);
739
  return temp;
740
}
741
 
742
/* Copy X to TARGET (if it's nonzero and a reg)
743
   or to a new temp reg and return that reg.
744
   MODE is the mode to use for X in case it is a constant.  */
745
 
746
rtx
747
copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
748
{
749
  rtx temp;
750
 
751
  if (target && REG_P (target))
752
    temp = target;
753
  else
754
    temp = gen_reg_rtx (mode);
755
 
756
  emit_move_insn (temp, x);
757
  return temp;
758
}
759
 
760
/* Return the mode to use to store a scalar of TYPE and MODE.
761
   PUNSIGNEDP points to the signedness of the type and may be adjusted
762
   to show what signedness to use on extension operations.
763
 
764
   FOR_CALL is nonzero if this call is promoting args for a call.  */
765
 
766
#if defined(PROMOTE_MODE) && !defined(PROMOTE_FUNCTION_MODE)
767
#define PROMOTE_FUNCTION_MODE PROMOTE_MODE
768
#endif
769
 
770
enum machine_mode
771
promote_mode (tree type, enum machine_mode mode, int *punsignedp,
772
              int for_call ATTRIBUTE_UNUSED)
773
{
774
  enum tree_code code = TREE_CODE (type);
775
  int unsignedp = *punsignedp;
776
 
777
#ifndef PROMOTE_MODE
778
  if (! for_call)
779
    return mode;
780
#endif
781
 
782
  switch (code)
783
    {
784
#ifdef PROMOTE_FUNCTION_MODE
785
    case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
786
    case REAL_TYPE:      case OFFSET_TYPE:
787
#ifdef PROMOTE_MODE
788
      if (for_call)
789
        {
790
#endif
791
          PROMOTE_FUNCTION_MODE (mode, unsignedp, type);
792
#ifdef PROMOTE_MODE
793
        }
794
      else
795
        {
796
          PROMOTE_MODE (mode, unsignedp, type);
797
        }
798
#endif
799
      break;
800
#endif
801
 
802
#ifdef POINTERS_EXTEND_UNSIGNED
803
    case REFERENCE_TYPE:
804
    case POINTER_TYPE:
805
      mode = Pmode;
806
      unsignedp = POINTERS_EXTEND_UNSIGNED;
807
      break;
808
#endif
809
 
810
    default:
811
      break;
812
    }
813
 
814
  *punsignedp = unsignedp;
815
  return mode;
816
}
817
 
818
/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
819
   This pops when ADJUST is positive.  ADJUST need not be constant.  */
820
 
821
void
822
adjust_stack (rtx adjust)
823
{
824
  rtx temp;
825
 
826
  if (adjust == const0_rtx)
827
    return;
828
 
829
  /* We expect all variable sized adjustments to be multiple of
830
     PREFERRED_STACK_BOUNDARY.  */
831
  if (GET_CODE (adjust) == CONST_INT)
832
    stack_pointer_delta -= INTVAL (adjust);
833
 
834
  temp = expand_binop (Pmode,
835
#ifdef STACK_GROWS_DOWNWARD
836
                       add_optab,
837
#else
838
                       sub_optab,
839
#endif
840
                       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
841
                       OPTAB_LIB_WIDEN);
842
 
843
  if (temp != stack_pointer_rtx)
844
    emit_move_insn (stack_pointer_rtx, temp);
845
}
846
 
847
/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
848
   This pushes when ADJUST is positive.  ADJUST need not be constant.  */
849
 
850
void
851
anti_adjust_stack (rtx adjust)
852
{
853
  rtx temp;
854
 
855
  if (adjust == const0_rtx)
856
    return;
857
 
858
  /* We expect all variable sized adjustments to be multiple of
859
     PREFERRED_STACK_BOUNDARY.  */
860
  if (GET_CODE (adjust) == CONST_INT)
861
    stack_pointer_delta += INTVAL (adjust);
862
 
863
  temp = expand_binop (Pmode,
864
#ifdef STACK_GROWS_DOWNWARD
865
                       sub_optab,
866
#else
867
                       add_optab,
868
#endif
869
                       stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
870
                       OPTAB_LIB_WIDEN);
871
 
872
  if (temp != stack_pointer_rtx)
873
    emit_move_insn (stack_pointer_rtx, temp);
874
}
875
 
876
/* Round the size of a block to be pushed up to the boundary required
877
   by this machine.  SIZE is the desired size, which need not be constant.  */
878
 
879
static rtx
880
round_push (rtx size)
881
{
882
  int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
883
 
884
  if (align == 1)
885
    return size;
886
 
887
  if (GET_CODE (size) == CONST_INT)
888
    {
889
      HOST_WIDE_INT new = (INTVAL (size) + align - 1) / align * align;
890
 
891
      if (INTVAL (size) != new)
892
        size = GEN_INT (new);
893
    }
894
  else
895
    {
896
      /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
897
         but we know it can't.  So add ourselves and then do
898
         TRUNC_DIV_EXPR.  */
899
      size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
900
                           NULL_RTX, 1, OPTAB_LIB_WIDEN);
901
      size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
902
                            NULL_RTX, 1);
903
      size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
904
    }
905
 
906
  return size;
907
}
908
 
909
/* Save the stack pointer for the purpose in SAVE_LEVEL.  PSAVE is a pointer
910
   to a previously-created save area.  If no save area has been allocated,
911
   this function will allocate one.  If a save area is specified, it
912
   must be of the proper mode.
913
 
914
   The insns are emitted after insn AFTER, if nonzero, otherwise the insns
915
   are emitted at the current position.  */
916
 
917
void
918
emit_stack_save (enum save_level save_level, rtx *psave, rtx after)
919
{
920
  rtx sa = *psave;
921
  /* The default is that we use a move insn and save in a Pmode object.  */
922
  rtx (*fcn) (rtx, rtx) = gen_move_insn;
923
  enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
924
 
925
  /* See if this machine has anything special to do for this kind of save.  */
926
  switch (save_level)
927
    {
928
#ifdef HAVE_save_stack_block
929
    case SAVE_BLOCK:
930
      if (HAVE_save_stack_block)
931
        fcn = gen_save_stack_block;
932
      break;
933
#endif
934
#ifdef HAVE_save_stack_function
935
    case SAVE_FUNCTION:
936
      if (HAVE_save_stack_function)
937
        fcn = gen_save_stack_function;
938
      break;
939
#endif
940
#ifdef HAVE_save_stack_nonlocal
941
    case SAVE_NONLOCAL:
942
      if (HAVE_save_stack_nonlocal)
943
        fcn = gen_save_stack_nonlocal;
944
      break;
945
#endif
946
    default:
947
      break;
948
    }
949
 
950
  /* If there is no save area and we have to allocate one, do so.  Otherwise
951
     verify the save area is the proper mode.  */
952
 
953
  if (sa == 0)
954
    {
955
      if (mode != VOIDmode)
956
        {
957
          if (save_level == SAVE_NONLOCAL)
958
            *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
959
          else
960
            *psave = sa = gen_reg_rtx (mode);
961
        }
962
    }
963
 
964
  if (after)
965
    {
966
      rtx seq;
967
 
968
      start_sequence ();
969
      do_pending_stack_adjust ();
970
      /* We must validize inside the sequence, to ensure that any instructions
971
         created by the validize call also get moved to the right place.  */
972
      if (sa != 0)
973
        sa = validize_mem (sa);
974
      emit_insn (fcn (sa, stack_pointer_rtx));
975
      seq = get_insns ();
976
      end_sequence ();
977
      emit_insn_after (seq, after);
978
    }
979
  else
980
    {
981
      do_pending_stack_adjust ();
982
      if (sa != 0)
983
        sa = validize_mem (sa);
984
      emit_insn (fcn (sa, stack_pointer_rtx));
985
    }
986
}
987
 
988
/* Restore the stack pointer for the purpose in SAVE_LEVEL.  SA is the save
989
   area made by emit_stack_save.  If it is zero, we have nothing to do.
990
 
991
   Put any emitted insns after insn AFTER, if nonzero, otherwise at
992
   current position.  */
993
 
994
void
995
emit_stack_restore (enum save_level save_level, rtx sa, rtx after)
996
{
997
  /* The default is that we use a move insn.  */
998
  rtx (*fcn) (rtx, rtx) = gen_move_insn;
999
 
1000
  /* See if this machine has anything special to do for this kind of save.  */
1001
  switch (save_level)
1002
    {
1003
#ifdef HAVE_restore_stack_block
1004
    case SAVE_BLOCK:
1005
      if (HAVE_restore_stack_block)
1006
        fcn = gen_restore_stack_block;
1007
      break;
1008
#endif
1009
#ifdef HAVE_restore_stack_function
1010
    case SAVE_FUNCTION:
1011
      if (HAVE_restore_stack_function)
1012
        fcn = gen_restore_stack_function;
1013
      break;
1014
#endif
1015
#ifdef HAVE_restore_stack_nonlocal
1016
    case SAVE_NONLOCAL:
1017
      if (HAVE_restore_stack_nonlocal)
1018
        fcn = gen_restore_stack_nonlocal;
1019
      break;
1020
#endif
1021
    default:
1022
      break;
1023
    }
1024
 
1025
  if (sa != 0)
1026
    {
1027
      sa = validize_mem (sa);
1028
      /* These clobbers prevent the scheduler from moving
1029
         references to variable arrays below the code
1030
         that deletes (pops) the arrays.  */
1031
      emit_insn (gen_rtx_CLOBBER (VOIDmode,
1032
                    gen_rtx_MEM (BLKmode,
1033
                        gen_rtx_SCRATCH (VOIDmode))));
1034
      emit_insn (gen_rtx_CLOBBER (VOIDmode,
1035
                    gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
1036
    }
1037
 
1038
  discard_pending_stack_adjust ();
1039
 
1040
  if (after)
1041
    {
1042
      rtx seq;
1043
 
1044
      start_sequence ();
1045
      emit_insn (fcn (stack_pointer_rtx, sa));
1046
      seq = get_insns ();
1047
      end_sequence ();
1048
      emit_insn_after (seq, after);
1049
    }
1050
  else
1051
    emit_insn (fcn (stack_pointer_rtx, sa));
1052
}
1053
 
1054
/* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1055
   function.  This function should be called whenever we allocate or
1056
   deallocate dynamic stack space.  */
1057
 
1058
void
1059
update_nonlocal_goto_save_area (void)
1060
{
1061
  tree t_save;
1062
  rtx r_save;
1063
 
1064
  /* The nonlocal_goto_save_area object is an array of N pointers.  The
1065
     first one is used for the frame pointer save; the rest are sized by
1066
     STACK_SAVEAREA_MODE.  Create a reference to array index 1, the first
1067
     of the stack save area slots.  */
1068
  t_save = build4 (ARRAY_REF, ptr_type_node, cfun->nonlocal_goto_save_area,
1069
                   integer_one_node, NULL_TREE, NULL_TREE);
1070
  r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1071
 
1072
  emit_stack_save (SAVE_NONLOCAL, &r_save, NULL_RTX);
1073
}
1074
 
1075
/* Return an rtx representing the address of an area of memory dynamically
1076
   pushed on the stack.  This region of memory is always aligned to
1077
   a multiple of BIGGEST_ALIGNMENT.
1078
 
1079
   Any required stack pointer alignment is preserved.
1080
 
1081
   SIZE is an rtx representing the size of the area.
1082
   TARGET is a place in which the address can be placed.
1083
 
1084
   KNOWN_ALIGN is the alignment (in bits) that we know SIZE has.  */
1085
 
1086
rtx
1087
allocate_dynamic_stack_space (rtx size, rtx target, int known_align)
1088
{
1089
  /* If we're asking for zero bytes, it doesn't matter what we point
1090
     to since we can't dereference it.  But return a reasonable
1091
     address anyway.  */
1092
  if (size == const0_rtx)
1093
    return virtual_stack_dynamic_rtx;
1094
 
1095
  /* Otherwise, show we're calling alloca or equivalent.  */
1096
  current_function_calls_alloca = 1;
1097
 
1098
  /* Ensure the size is in the proper mode.  */
1099
  if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1100
    size = convert_to_mode (Pmode, size, 1);
1101
 
1102
  /* We can't attempt to minimize alignment necessary, because we don't
1103
     know the final value of preferred_stack_boundary yet while executing
1104
     this code.  */
1105
  cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1106
 
1107
  /* We will need to ensure that the address we return is aligned to
1108
     BIGGEST_ALIGNMENT.  If STACK_DYNAMIC_OFFSET is defined, we don't
1109
     always know its final value at this point in the compilation (it
1110
     might depend on the size of the outgoing parameter lists, for
1111
     example), so we must align the value to be returned in that case.
1112
     (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1113
     STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1114
     We must also do an alignment operation on the returned value if
1115
     the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1116
 
1117
     If we have to align, we must leave space in SIZE for the hole
1118
     that might result from the alignment operation.  */
1119
 
1120
#if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1121
#define MUST_ALIGN 1
1122
#else
1123
#define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
1124
#endif
1125
 
1126
  if (MUST_ALIGN)
1127
    size
1128
      = force_operand (plus_constant (size,
1129
                                      BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1130
                       NULL_RTX);
1131
 
1132
#ifdef SETJMP_VIA_SAVE_AREA
1133
  /* If setjmp restores regs from a save area in the stack frame,
1134
     avoid clobbering the reg save area.  Note that the offset of
1135
     virtual_incoming_args_rtx includes the preallocated stack args space.
1136
     It would be no problem to clobber that, but it's on the wrong side
1137
     of the old save area.
1138
 
1139
     What used to happen is that, since we did not know for sure
1140
     whether setjmp() was invoked until after RTL generation, we
1141
     would use reg notes to store the "optimized" size and fix things
1142
     up later.  These days we know this information before we ever
1143
     start building RTL so the reg notes are unnecessary.  */
1144
  if (!current_function_calls_setjmp)
1145
    {
1146
      int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1147
 
1148
      /* ??? Code below assumes that the save area needs maximal
1149
         alignment.  This constraint may be too strong.  */
1150
      gcc_assert (PREFERRED_STACK_BOUNDARY == BIGGEST_ALIGNMENT);
1151
 
1152
      if (GET_CODE (size) == CONST_INT)
1153
        {
1154
          HOST_WIDE_INT new = INTVAL (size) / align * align;
1155
 
1156
          if (INTVAL (size) != new)
1157
            size = GEN_INT (new);
1158
        }
1159
      else
1160
        {
1161
          /* Since we know overflow is not possible, we avoid using
1162
             CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead.  */
1163
          size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1164
                                GEN_INT (align), NULL_RTX, 1);
1165
          size = expand_mult (Pmode, size,
1166
                              GEN_INT (align), NULL_RTX, 1);
1167
        }
1168
    }
1169
  else
1170
    {
1171
      rtx dynamic_offset
1172
        = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1173
                        stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1174
 
1175
      size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1176
                           NULL_RTX, 1, OPTAB_LIB_WIDEN);
1177
    }
1178
#endif /* SETJMP_VIA_SAVE_AREA */
1179
 
1180
  /* Round the size to a multiple of the required stack alignment.
1181
     Since the stack if presumed to be rounded before this allocation,
1182
     this will maintain the required alignment.
1183
 
1184
     If the stack grows downward, we could save an insn by subtracting
1185
     SIZE from the stack pointer and then aligning the stack pointer.
1186
     The problem with this is that the stack pointer may be unaligned
1187
     between the execution of the subtraction and alignment insns and
1188
     some machines do not allow this.  Even on those that do, some
1189
     signal handlers malfunction if a signal should occur between those
1190
     insns.  Since this is an extremely rare event, we have no reliable
1191
     way of knowing which systems have this problem.  So we avoid even
1192
     momentarily mis-aligning the stack.  */
1193
 
1194
  /* If we added a variable amount to SIZE,
1195
     we can no longer assume it is aligned.  */
1196
#if !defined (SETJMP_VIA_SAVE_AREA)
1197
  if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
1198
#endif
1199
    size = round_push (size);
1200
 
1201
  do_pending_stack_adjust ();
1202
 
1203
 /* We ought to be called always on the toplevel and stack ought to be aligned
1204
    properly.  */
1205
  gcc_assert (!(stack_pointer_delta
1206
                % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1207
 
1208
  /* If needed, check that we have the required amount of stack.  Take into
1209
     account what has already been checked.  */
1210
  if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1211
    probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
1212
 
1213
  /* Don't use a TARGET that isn't a pseudo or is the wrong mode.  */
1214
  if (target == 0 || !REG_P (target)
1215
      || REGNO (target) < FIRST_PSEUDO_REGISTER
1216
      || GET_MODE (target) != Pmode)
1217
    target = gen_reg_rtx (Pmode);
1218
 
1219
  mark_reg_pointer (target, known_align);
1220
 
1221
  /* Perform the required allocation from the stack.  Some systems do
1222
     this differently than simply incrementing/decrementing from the
1223
     stack pointer, such as acquiring the space by calling malloc().  */
1224
#ifdef HAVE_allocate_stack
1225
  if (HAVE_allocate_stack)
1226
    {
1227
      enum machine_mode mode = STACK_SIZE_MODE;
1228
      insn_operand_predicate_fn pred;
1229
 
1230
      /* We don't have to check against the predicate for operand 0 since
1231
         TARGET is known to be a pseudo of the proper mode, which must
1232
         be valid for the operand.  For operand 1, convert to the
1233
         proper mode and validate.  */
1234
      if (mode == VOIDmode)
1235
        mode = insn_data[(int) CODE_FOR_allocate_stack].operand[1].mode;
1236
 
1237
      pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
1238
      if (pred && ! ((*pred) (size, mode)))
1239
        size = copy_to_mode_reg (mode, convert_to_mode (mode, size, 1));
1240
 
1241
      emit_insn (gen_allocate_stack (target, size));
1242
    }
1243
  else
1244
#endif
1245
    {
1246
#ifndef STACK_GROWS_DOWNWARD
1247
      emit_move_insn (target, virtual_stack_dynamic_rtx);
1248
#endif
1249
 
1250
      /* Check stack bounds if necessary.  */
1251
      if (current_function_limit_stack)
1252
        {
1253
          rtx available;
1254
          rtx space_available = gen_label_rtx ();
1255
#ifdef STACK_GROWS_DOWNWARD
1256
          available = expand_binop (Pmode, sub_optab,
1257
                                    stack_pointer_rtx, stack_limit_rtx,
1258
                                    NULL_RTX, 1, OPTAB_WIDEN);
1259
#else
1260
          available = expand_binop (Pmode, sub_optab,
1261
                                    stack_limit_rtx, stack_pointer_rtx,
1262
                                    NULL_RTX, 1, OPTAB_WIDEN);
1263
#endif
1264
          emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1265
                                   space_available);
1266
#ifdef HAVE_trap
1267
          if (HAVE_trap)
1268
            emit_insn (gen_trap ());
1269
          else
1270
#endif
1271
            error ("stack limits not supported on this target");
1272
          emit_barrier ();
1273
          emit_label (space_available);
1274
        }
1275
 
1276
      anti_adjust_stack (size);
1277
 
1278
#ifdef STACK_GROWS_DOWNWARD
1279
      emit_move_insn (target, virtual_stack_dynamic_rtx);
1280
#endif
1281
    }
1282
 
1283
  if (MUST_ALIGN)
1284
    {
1285
      /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1286
         but we know it can't.  So add ourselves and then do
1287
         TRUNC_DIV_EXPR.  */
1288
      target = expand_binop (Pmode, add_optab, target,
1289
                             GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1290
                             NULL_RTX, 1, OPTAB_LIB_WIDEN);
1291
      target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1292
                              GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1293
                              NULL_RTX, 1);
1294
      target = expand_mult (Pmode, target,
1295
                            GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1296
                            NULL_RTX, 1);
1297
    }
1298
 
1299
  /* Record the new stack level for nonlocal gotos.  */
1300
  if (cfun->nonlocal_goto_save_area != 0)
1301
    update_nonlocal_goto_save_area ();
1302
 
1303
  return target;
1304
}
1305
 
1306
/* A front end may want to override GCC's stack checking by providing a
1307
   run-time routine to call to check the stack, so provide a mechanism for
1308
   calling that routine.  */
1309
 
1310
static GTY(()) rtx stack_check_libfunc;
1311
 
1312
void
1313
set_stack_check_libfunc (rtx libfunc)
1314
{
1315
  stack_check_libfunc = libfunc;
1316
}
1317
 
1318
/* Emit one stack probe at ADDRESS, an address within the stack.  */
1319
 
1320
static void
1321
emit_stack_probe (rtx address)
1322
{
1323
  rtx memref = gen_rtx_MEM (word_mode, address);
1324
 
1325
  MEM_VOLATILE_P (memref) = 1;
1326
 
1327
  if (STACK_CHECK_PROBE_LOAD)
1328
    emit_move_insn (gen_reg_rtx (word_mode), memref);
1329
  else
1330
    emit_move_insn (memref, const0_rtx);
1331
}
1332
 
1333
/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1334
   FIRST is a constant and size is a Pmode RTX.  These are offsets from the
1335
   current stack pointer.  STACK_GROWS_DOWNWARD says whether to add or
1336
   subtract from the stack.  If SIZE is constant, this is done
1337
   with a fixed number of probes.  Otherwise, we must make a loop.  */
1338
 
1339
#ifdef STACK_GROWS_DOWNWARD
1340
#define STACK_GROW_OP MINUS
1341
#else
1342
#define STACK_GROW_OP PLUS
1343
#endif
1344
 
1345
void
1346
probe_stack_range (HOST_WIDE_INT first, rtx size)
1347
{
1348
  /* First ensure SIZE is Pmode.  */
1349
  if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1350
    size = convert_to_mode (Pmode, size, 1);
1351
 
1352
  /* Next see if the front end has set up a function for us to call to
1353
     check the stack.  */
1354
  if (stack_check_libfunc != 0)
1355
    {
1356
      rtx addr = memory_address (QImode,
1357
                                 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1358
                                                 stack_pointer_rtx,
1359
                                                 plus_constant (size, first)));
1360
 
1361
      addr = convert_memory_address (ptr_mode, addr);
1362
      emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1363
                         ptr_mode);
1364
    }
1365
 
1366
  /* Next see if we have an insn to check the stack.  Use it if so.  */
1367
#ifdef HAVE_check_stack
1368
  else if (HAVE_check_stack)
1369
    {
1370
      insn_operand_predicate_fn pred;
1371
      rtx last_addr
1372
        = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1373
                                         stack_pointer_rtx,
1374
                                         plus_constant (size, first)),
1375
                         NULL_RTX);
1376
 
1377
      pred = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
1378
      if (pred && ! ((*pred) (last_addr, Pmode)))
1379
        last_addr = copy_to_mode_reg (Pmode, last_addr);
1380
 
1381
      emit_insn (gen_check_stack (last_addr));
1382
    }
1383
#endif
1384
 
1385
  /* If we have to generate explicit probes, see if we have a constant
1386
     small number of them to generate.  If so, that's the easy case.  */
1387
  else if (GET_CODE (size) == CONST_INT
1388
           && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
1389
    {
1390
      HOST_WIDE_INT offset;
1391
 
1392
      /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1393
         for values of N from 1 until it exceeds LAST.  If only one
1394
         probe is needed, this will not generate any code.  Then probe
1395
         at LAST.  */
1396
      for (offset = first + STACK_CHECK_PROBE_INTERVAL;
1397
           offset < INTVAL (size);
1398
           offset = offset + STACK_CHECK_PROBE_INTERVAL)
1399
        emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1400
                                          stack_pointer_rtx,
1401
                                          GEN_INT (offset)));
1402
 
1403
      emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1404
                                        stack_pointer_rtx,
1405
                                        plus_constant (size, first)));
1406
    }
1407
 
1408
  /* In the variable case, do the same as above, but in a loop.  We emit loop
1409
     notes so that loop optimization can be done.  */
1410
  else
1411
    {
1412
      rtx test_addr
1413
        = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1414
                                         stack_pointer_rtx,
1415
                                         GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
1416
                         NULL_RTX);
1417
      rtx last_addr
1418
        = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1419
                                         stack_pointer_rtx,
1420
                                         plus_constant (size, first)),
1421
                         NULL_RTX);
1422
      rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
1423
      rtx loop_lab = gen_label_rtx ();
1424
      rtx test_lab = gen_label_rtx ();
1425
      rtx end_lab = gen_label_rtx ();
1426
      rtx temp;
1427
 
1428
      if (!REG_P (test_addr)
1429
          || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
1430
        test_addr = force_reg (Pmode, test_addr);
1431
 
1432
      emit_jump (test_lab);
1433
 
1434
      emit_label (loop_lab);
1435
      emit_stack_probe (test_addr);
1436
 
1437
#ifdef STACK_GROWS_DOWNWARD
1438
#define CMP_OPCODE GTU
1439
      temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
1440
                           1, OPTAB_WIDEN);
1441
#else
1442
#define CMP_OPCODE LTU
1443
      temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
1444
                           1, OPTAB_WIDEN);
1445
#endif
1446
 
1447
      gcc_assert (temp == test_addr);
1448
 
1449
      emit_label (test_lab);
1450
      emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1451
                               NULL_RTX, Pmode, 1, loop_lab);
1452
      emit_jump (end_lab);
1453
      emit_label (end_lab);
1454
 
1455
      emit_stack_probe (last_addr);
1456
    }
1457
}
1458
 
1459
/* Return an rtx representing the register or memory location
1460
   in which a scalar value of data type VALTYPE
1461
   was returned by a function call to function FUNC.
1462
   FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1463
   function is known, otherwise 0.
1464
   OUTGOING is 1 if on a machine with register windows this function
1465
   should return the register in which the function will put its result
1466
   and 0 otherwise.  */
1467
 
1468
rtx
1469
hard_function_value (tree valtype, tree func, tree fntype,
1470
                     int outgoing ATTRIBUTE_UNUSED)
1471
{
1472
  rtx val;
1473
 
1474
  val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
1475
 
1476
  if (REG_P (val)
1477
      && GET_MODE (val) == BLKmode)
1478
    {
1479
      unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1480
      enum machine_mode tmpmode;
1481
 
1482
      /* int_size_in_bytes can return -1.  We don't need a check here
1483
         since the value of bytes will then be large enough that no
1484
         mode will match anyway.  */
1485
 
1486
      for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1487
           tmpmode != VOIDmode;
1488
           tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1489
        {
1490
          /* Have we found a large enough mode?  */
1491
          if (GET_MODE_SIZE (tmpmode) >= bytes)
1492
            break;
1493
        }
1494
 
1495
      /* No suitable mode found.  */
1496
      gcc_assert (tmpmode != VOIDmode);
1497
 
1498
      PUT_MODE (val, tmpmode);
1499
    }
1500
  return val;
1501
}
1502
 
1503
/* Return an rtx representing the register or memory location
1504
   in which a scalar value of mode MODE was returned by a library call.  */
1505
 
1506
rtx
1507
hard_libcall_value (enum machine_mode mode)
1508
{
1509
  return LIBCALL_VALUE (mode);
1510
}
1511
 
1512
/* Look up the tree code for a given rtx code
1513
   to provide the arithmetic operation for REAL_ARITHMETIC.
1514
   The function returns an int because the caller may not know
1515
   what `enum tree_code' means.  */
1516
 
1517
int
1518
rtx_to_tree_code (enum rtx_code code)
1519
{
1520
  enum tree_code tcode;
1521
 
1522
  switch (code)
1523
    {
1524
    case PLUS:
1525
      tcode = PLUS_EXPR;
1526
      break;
1527
    case MINUS:
1528
      tcode = MINUS_EXPR;
1529
      break;
1530
    case MULT:
1531
      tcode = MULT_EXPR;
1532
      break;
1533
    case DIV:
1534
      tcode = RDIV_EXPR;
1535
      break;
1536
    case SMIN:
1537
      tcode = MIN_EXPR;
1538
      break;
1539
    case SMAX:
1540
      tcode = MAX_EXPR;
1541
      break;
1542
    default:
1543
      tcode = LAST_AND_UNUSED_TREE_CODE;
1544
      break;
1545
    }
1546
  return ((int) tcode);
1547
}
1548
 
1549
#include "gt-explow.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.