OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [expr.c] - Blame information for rev 12

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Convert tree expression to rtl instructions, for GNU compiler.
2
   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3
   2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 2, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING.  If not, write to the Free
19
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20
02110-1301, USA.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "machmode.h"
27
#include "real.h"
28
#include "rtl.h"
29
#include "tree.h"
30
#include "flags.h"
31
#include "regs.h"
32
#include "hard-reg-set.h"
33
#include "except.h"
34
#include "function.h"
35
#include "insn-config.h"
36
#include "insn-attr.h"
37
/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
38
#include "expr.h"
39
#include "optabs.h"
40
#include "libfuncs.h"
41
#include "recog.h"
42
#include "reload.h"
43
#include "output.h"
44
#include "typeclass.h"
45
#include "toplev.h"
46
#include "ggc.h"
47
#include "langhooks.h"
48
#include "intl.h"
49
#include "tm_p.h"
50
#include "tree-iterator.h"
51
#include "tree-pass.h"
52
#include "tree-flow.h"
53
#include "target.h"
54
#include "timevar.h"
55
 
56
/* Decide whether a function's arguments should be processed
57
   from first to last or from last to first.
58
 
59
   They should if the stack and args grow in opposite directions, but
60
   only if we have push insns.  */
61
 
62
#ifdef PUSH_ROUNDING
63
 
64
#ifndef PUSH_ARGS_REVERSED
65
#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66
#define PUSH_ARGS_REVERSED      /* If it's last to first.  */
67
#endif
68
#endif
69
 
70
#endif
71
 
72
#ifndef STACK_PUSH_CODE
73
#ifdef STACK_GROWS_DOWNWARD
74
#define STACK_PUSH_CODE PRE_DEC
75
#else
76
#define STACK_PUSH_CODE PRE_INC
77
#endif
78
#endif
79
 
80
 
81
/* If this is nonzero, we do not bother generating VOLATILE
82
   around volatile memory references, and we are willing to
83
   output indirect addresses.  If cse is to follow, we reject
84
   indirect addresses so a useful potential cse is generated;
85
   if it is used only once, instruction combination will produce
86
   the same indirect address eventually.  */
87
int cse_not_expected;
88
 
89
/* This structure is used by move_by_pieces to describe the move to
90
   be performed.  */
91
struct move_by_pieces
92
{
93
  rtx to;
94
  rtx to_addr;
95
  int autinc_to;
96
  int explicit_inc_to;
97
  rtx from;
98
  rtx from_addr;
99
  int autinc_from;
100
  int explicit_inc_from;
101
  unsigned HOST_WIDE_INT len;
102
  HOST_WIDE_INT offset;
103
  int reverse;
104
};
105
 
106
/* This structure is used by store_by_pieces to describe the clear to
107
   be performed.  */
108
 
109
struct store_by_pieces
110
{
111
  rtx to;
112
  rtx to_addr;
113
  int autinc_to;
114
  int explicit_inc_to;
115
  unsigned HOST_WIDE_INT len;
116
  HOST_WIDE_INT offset;
117
  rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118
  void *constfundata;
119
  int reverse;
120
};
121
 
122
static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123
                                                     unsigned int,
124
                                                     unsigned int);
125
static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126
                              struct move_by_pieces *);
127
static bool block_move_libcall_safe_for_call_parm (void);
128
static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129
static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
130
static tree emit_block_move_libcall_fn (int);
131
static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132
static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133
static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134
static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135
static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136
                               struct store_by_pieces *);
137
static rtx clear_storage_via_libcall (rtx, rtx, bool);
138
static tree clear_storage_libcall_fn (int);
139
static rtx compress_float_constant (rtx, rtx);
140
static rtx get_subtarget (rtx);
141
static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142
                                     HOST_WIDE_INT, enum machine_mode,
143
                                     tree, tree, int, int);
144
static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145
static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146
                        tree, tree, int);
147
 
148
static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
149
 
150
static int is_aligning_offset (tree, tree);
151
static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152
                             enum expand_modifier);
153
static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154
static rtx do_store_flag (tree, rtx, enum machine_mode, int);
155
#ifdef PUSH_ROUNDING
156
static void emit_single_push_insn (enum machine_mode, rtx, tree);
157
#endif
158
static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159
static rtx const_vector_from_tree (tree);
160
static void write_complex_part (rtx, rtx, bool);
161
 
162
/* Record for each mode whether we can move a register directly to or
163
   from an object of that mode in memory.  If we can't, we won't try
164
   to use that mode directly when accessing a field of that mode.  */
165
 
166
static char direct_load[NUM_MACHINE_MODES];
167
static char direct_store[NUM_MACHINE_MODES];
168
 
169
/* Record for each mode whether we can float-extend from memory.  */
170
 
171
static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
172
 
173
/* This macro is used to determine whether move_by_pieces should be called
174
   to perform a structure copy.  */
175
#ifndef MOVE_BY_PIECES_P
176
#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177
  (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178
   < (unsigned int) MOVE_RATIO)
179
#endif
180
 
181
/* This macro is used to determine whether clear_by_pieces should be
182
   called to clear storage.  */
183
#ifndef CLEAR_BY_PIECES_P
184
#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185
  (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186
   < (unsigned int) CLEAR_RATIO)
187
#endif
188
 
189
/* This macro is used to determine whether store_by_pieces should be
190
   called to "memset" storage with byte values other than zero, or
191
   to "memcpy" storage when the source is a constant string.  */
192
#ifndef STORE_BY_PIECES_P
193
#define STORE_BY_PIECES_P(SIZE, ALIGN) \
194
  (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195
   < (unsigned int) MOVE_RATIO)
196
#endif
197
 
198
/* This array records the insn_code of insns to perform block moves.  */
199
enum insn_code movmem_optab[NUM_MACHINE_MODES];
200
 
201
/* This array records the insn_code of insns to perform block sets.  */
202
enum insn_code setmem_optab[NUM_MACHINE_MODES];
203
 
204
/* These arrays record the insn_code of three different kinds of insns
205
   to perform block compares.  */
206
enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
207
enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
208
enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
209
 
210
/* Synchronization primitives.  */
211
enum insn_code sync_add_optab[NUM_MACHINE_MODES];
212
enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
213
enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
214
enum insn_code sync_and_optab[NUM_MACHINE_MODES];
215
enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
216
enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
217
enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
218
enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
219
enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
220
enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
221
enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
222
enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
223
enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
224
enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
225
enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
226
enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
227
enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
228
enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
229
enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
230
enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
231
enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
232
enum insn_code sync_lock_release[NUM_MACHINE_MODES];
233
 
234
/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow.  */
235
 
236
#ifndef SLOW_UNALIGNED_ACCESS
237
#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
238
#endif
239
 
240
/* This is run once per compilation to set up which modes can be used
241
   directly in memory and to initialize the block move optab.  */
242
 
243
void
244
init_expr_once (void)
245
{
246
  rtx insn, pat;
247
  enum machine_mode mode;
248
  int num_clobbers;
249
  rtx mem, mem1;
250
  rtx reg;
251
 
252
  /* Try indexing by frame ptr and try by stack ptr.
253
     It is known that on the Convex the stack ptr isn't a valid index.
254
     With luck, one or the other is valid on any machine.  */
255
  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
256
  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
257
 
258
  /* A scratch register we can modify in-place below to avoid
259
     useless RTL allocations.  */
260
  reg = gen_rtx_REG (VOIDmode, -1);
261
 
262
  insn = rtx_alloc (INSN);
263
  pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
264
  PATTERN (insn) = pat;
265
 
266
  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267
       mode = (enum machine_mode) ((int) mode + 1))
268
    {
269
      int regno;
270
 
271
      direct_load[(int) mode] = direct_store[(int) mode] = 0;
272
      PUT_MODE (mem, mode);
273
      PUT_MODE (mem1, mode);
274
      PUT_MODE (reg, mode);
275
 
276
      /* See if there is some register that can be used in this mode and
277
         directly loaded or stored from memory.  */
278
 
279
      if (mode != VOIDmode && mode != BLKmode)
280
        for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281
             && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282
             regno++)
283
          {
284
            if (! HARD_REGNO_MODE_OK (regno, mode))
285
              continue;
286
 
287
            REGNO (reg) = regno;
288
 
289
            SET_SRC (pat) = mem;
290
            SET_DEST (pat) = reg;
291
            if (recog (pat, insn, &num_clobbers) >= 0)
292
              direct_load[(int) mode] = 1;
293
 
294
            SET_SRC (pat) = mem1;
295
            SET_DEST (pat) = reg;
296
            if (recog (pat, insn, &num_clobbers) >= 0)
297
              direct_load[(int) mode] = 1;
298
 
299
            SET_SRC (pat) = reg;
300
            SET_DEST (pat) = mem;
301
            if (recog (pat, insn, &num_clobbers) >= 0)
302
              direct_store[(int) mode] = 1;
303
 
304
            SET_SRC (pat) = reg;
305
            SET_DEST (pat) = mem1;
306
            if (recog (pat, insn, &num_clobbers) >= 0)
307
              direct_store[(int) mode] = 1;
308
          }
309
    }
310
 
311
  mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
312
 
313
  for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
314
       mode = GET_MODE_WIDER_MODE (mode))
315
    {
316
      enum machine_mode srcmode;
317
      for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
318
           srcmode = GET_MODE_WIDER_MODE (srcmode))
319
        {
320
          enum insn_code ic;
321
 
322
          ic = can_extend_p (mode, srcmode, 0);
323
          if (ic == CODE_FOR_nothing)
324
            continue;
325
 
326
          PUT_MODE (mem, srcmode);
327
 
328
          if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
329
            float_extend_from_mem[mode][srcmode] = true;
330
        }
331
    }
332
}
333
 
334
/* This is run at the start of compiling a function.  */
335
 
336
void
337
init_expr (void)
338
{
339
  cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
340
}
341
 
342
/* Copy data from FROM to TO, where the machine modes are not the same.
343
   Both modes may be integer, or both may be floating.
344
   UNSIGNEDP should be nonzero if FROM is an unsigned type.
345
   This causes zero-extension instead of sign-extension.  */
346
 
347
void
348
convert_move (rtx to, rtx from, int unsignedp)
349
{
350
  enum machine_mode to_mode = GET_MODE (to);
351
  enum machine_mode from_mode = GET_MODE (from);
352
  int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
353
  int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
354
  enum insn_code code;
355
  rtx libcall;
356
 
357
  /* rtx code for making an equivalent value.  */
358
  enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
359
                              : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
360
 
361
 
362
  gcc_assert (to_real == from_real);
363
 
364
  /* If the source and destination are already the same, then there's
365
     nothing to do.  */
366
  if (to == from)
367
    return;
368
 
369
  /* If FROM is a SUBREG that indicates that we have already done at least
370
     the required extension, strip it.  We don't handle such SUBREGs as
371
     TO here.  */
372
 
373
  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
374
      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
375
          >= GET_MODE_SIZE (to_mode))
376
      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
377
    from = gen_lowpart (to_mode, from), from_mode = to_mode;
378
 
379
  gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
380
 
381
  if (to_mode == from_mode
382
      || (from_mode == VOIDmode && CONSTANT_P (from)))
383
    {
384
      emit_move_insn (to, from);
385
      return;
386
    }
387
 
388
  if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
389
    {
390
      gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
391
 
392
      if (VECTOR_MODE_P (to_mode))
393
        from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
394
      else
395
        to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
396
 
397
      emit_move_insn (to, from);
398
      return;
399
    }
400
 
401
  if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
402
    {
403
      convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
404
      convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
405
      return;
406
    }
407
 
408
  if (to_real)
409
    {
410
      rtx value, insns;
411
      convert_optab tab;
412
 
413
      gcc_assert (GET_MODE_PRECISION (from_mode)
414
                  != GET_MODE_PRECISION (to_mode));
415
 
416
      if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
417
        tab = sext_optab;
418
      else
419
        tab = trunc_optab;
420
 
421
      /* Try converting directly if the insn is supported.  */
422
 
423
      code = tab->handlers[to_mode][from_mode].insn_code;
424
      if (code != CODE_FOR_nothing)
425
        {
426
          emit_unop_insn (code, to, from,
427
                          tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
428
          return;
429
        }
430
 
431
      /* Otherwise use a libcall.  */
432
      libcall = tab->handlers[to_mode][from_mode].libfunc;
433
 
434
      /* Is this conversion implemented yet?  */
435
      gcc_assert (libcall);
436
 
437
      start_sequence ();
438
      value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
439
                                       1, from, from_mode);
440
      insns = get_insns ();
441
      end_sequence ();
442
      emit_libcall_block (insns, to, value,
443
                          tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
444
                                                                       from)
445
                          : gen_rtx_FLOAT_EXTEND (to_mode, from));
446
      return;
447
    }
448
 
449
  /* Handle pointer conversion.  */                     /* SPEE 900220.  */
450
  /* Targets are expected to provide conversion insns between PxImode and
451
     xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
452
  if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
453
    {
454
      enum machine_mode full_mode
455
        = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
456
 
457
      gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
458
                  != CODE_FOR_nothing);
459
 
460
      if (full_mode != from_mode)
461
        from = convert_to_mode (full_mode, from, unsignedp);
462
      emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
463
                      to, from, UNKNOWN);
464
      return;
465
    }
466
  if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
467
    {
468
      rtx new_from;
469
      enum machine_mode full_mode
470
        = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
471
 
472
      gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
473
                  != CODE_FOR_nothing);
474
 
475
      if (to_mode == full_mode)
476
        {
477
          emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
478
                          to, from, UNKNOWN);
479
          return;
480
        }
481
 
482
      new_from = gen_reg_rtx (full_mode);
483
      emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
484
                      new_from, from, UNKNOWN);
485
 
486
      /* else proceed to integer conversions below.  */
487
      from_mode = full_mode;
488
      from = new_from;
489
    }
490
 
491
  /* Now both modes are integers.  */
492
 
493
  /* Handle expanding beyond a word.  */
494
  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
495
      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
496
    {
497
      rtx insns;
498
      rtx lowpart;
499
      rtx fill_value;
500
      rtx lowfrom;
501
      int i;
502
      enum machine_mode lowpart_mode;
503
      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
504
 
505
      /* Try converting directly if the insn is supported.  */
506
      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
507
          != CODE_FOR_nothing)
508
        {
509
          /* If FROM is a SUBREG, put it into a register.  Do this
510
             so that we always generate the same set of insns for
511
             better cse'ing; if an intermediate assignment occurred,
512
             we won't be doing the operation directly on the SUBREG.  */
513
          if (optimize > 0 && GET_CODE (from) == SUBREG)
514
            from = force_reg (from_mode, from);
515
          emit_unop_insn (code, to, from, equiv_code);
516
          return;
517
        }
518
      /* Next, try converting via full word.  */
519
      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
520
               && ((code = can_extend_p (to_mode, word_mode, unsignedp))
521
                   != CODE_FOR_nothing))
522
        {
523
          if (REG_P (to))
524
            {
525
              if (reg_overlap_mentioned_p (to, from))
526
                from = force_reg (from_mode, from);
527
              emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
528
            }
529
          convert_move (gen_lowpart (word_mode, to), from, unsignedp);
530
          emit_unop_insn (code, to,
531
                          gen_lowpart (word_mode, to), equiv_code);
532
          return;
533
        }
534
 
535
      /* No special multiword conversion insn; do it by hand.  */
536
      start_sequence ();
537
 
538
      /* Since we will turn this into a no conflict block, we must ensure
539
         that the source does not overlap the target.  */
540
 
541
      if (reg_overlap_mentioned_p (to, from))
542
        from = force_reg (from_mode, from);
543
 
544
      /* Get a copy of FROM widened to a word, if necessary.  */
545
      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
546
        lowpart_mode = word_mode;
547
      else
548
        lowpart_mode = from_mode;
549
 
550
      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
551
 
552
      lowpart = gen_lowpart (lowpart_mode, to);
553
      emit_move_insn (lowpart, lowfrom);
554
 
555
      /* Compute the value to put in each remaining word.  */
556
      if (unsignedp)
557
        fill_value = const0_rtx;
558
      else
559
        {
560
#ifdef HAVE_slt
561
          if (HAVE_slt
562
              && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
563
              && STORE_FLAG_VALUE == -1)
564
            {
565
              emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
566
                             lowpart_mode, 0);
567
              fill_value = gen_reg_rtx (word_mode);
568
              emit_insn (gen_slt (fill_value));
569
            }
570
          else
571
#endif
572
            {
573
              fill_value
574
                = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
575
                                size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
576
                                NULL_RTX, 0);
577
              fill_value = convert_to_mode (word_mode, fill_value, 1);
578
            }
579
        }
580
 
581
      /* Fill the remaining words.  */
582
      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
583
        {
584
          int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
585
          rtx subword = operand_subword (to, index, 1, to_mode);
586
 
587
          gcc_assert (subword);
588
 
589
          if (fill_value != subword)
590
            emit_move_insn (subword, fill_value);
591
        }
592
 
593
      insns = get_insns ();
594
      end_sequence ();
595
 
596
      emit_no_conflict_block (insns, to, from, NULL_RTX,
597
                              gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
598
      return;
599
    }
600
 
601
  /* Truncating multi-word to a word or less.  */
602
  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
603
      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
604
    {
605
      if (!((MEM_P (from)
606
             && ! MEM_VOLATILE_P (from)
607
             && direct_load[(int) to_mode]
608
             && ! mode_dependent_address_p (XEXP (from, 0)))
609
            || REG_P (from)
610
            || GET_CODE (from) == SUBREG))
611
        from = force_reg (from_mode, from);
612
      convert_move (to, gen_lowpart (word_mode, from), 0);
613
      return;
614
    }
615
 
616
  /* Now follow all the conversions between integers
617
     no more than a word long.  */
618
 
619
  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
620
  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
621
      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
622
                                GET_MODE_BITSIZE (from_mode)))
623
    {
624
      if (!((MEM_P (from)
625
             && ! MEM_VOLATILE_P (from)
626
             && direct_load[(int) to_mode]
627
             && ! mode_dependent_address_p (XEXP (from, 0)))
628
            || REG_P (from)
629
            || GET_CODE (from) == SUBREG))
630
        from = force_reg (from_mode, from);
631
      if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
632
          && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
633
        from = copy_to_reg (from);
634
      emit_move_insn (to, gen_lowpart (to_mode, from));
635
      return;
636
    }
637
 
638
  /* Handle extension.  */
639
  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
640
    {
641
      /* Convert directly if that works.  */
642
      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
643
          != CODE_FOR_nothing)
644
        {
645
          emit_unop_insn (code, to, from, equiv_code);
646
          return;
647
        }
648
      else
649
        {
650
          enum machine_mode intermediate;
651
          rtx tmp;
652
          tree shift_amount;
653
 
654
          /* Search for a mode to convert via.  */
655
          for (intermediate = from_mode; intermediate != VOIDmode;
656
               intermediate = GET_MODE_WIDER_MODE (intermediate))
657
            if (((can_extend_p (to_mode, intermediate, unsignedp)
658
                  != CODE_FOR_nothing)
659
                 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
660
                     && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
661
                                               GET_MODE_BITSIZE (intermediate))))
662
                && (can_extend_p (intermediate, from_mode, unsignedp)
663
                    != CODE_FOR_nothing))
664
              {
665
                convert_move (to, convert_to_mode (intermediate, from,
666
                                                   unsignedp), unsignedp);
667
                return;
668
              }
669
 
670
          /* No suitable intermediate mode.
671
             Generate what we need with shifts.  */
672
          shift_amount = build_int_cst (NULL_TREE,
673
                                        GET_MODE_BITSIZE (to_mode)
674
                                        - GET_MODE_BITSIZE (from_mode));
675
          from = gen_lowpart (to_mode, force_reg (from_mode, from));
676
          tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
677
                              to, unsignedp);
678
          tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
679
                              to, unsignedp);
680
          if (tmp != to)
681
            emit_move_insn (to, tmp);
682
          return;
683
        }
684
    }
685
 
686
  /* Support special truncate insns for certain modes.  */
687
  if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
688
    {
689
      emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
690
                      to, from, UNKNOWN);
691
      return;
692
    }
693
 
694
  /* Handle truncation of volatile memrefs, and so on;
695
     the things that couldn't be truncated directly,
696
     and for which there was no special instruction.
697
 
698
     ??? Code above formerly short-circuited this, for most integer
699
     mode pairs, with a force_reg in from_mode followed by a recursive
700
     call to this routine.  Appears always to have been wrong.  */
701
  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
702
    {
703
      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
704
      emit_move_insn (to, temp);
705
      return;
706
    }
707
 
708
  /* Mode combination is not recognized.  */
709
  gcc_unreachable ();
710
}
711
 
712
/* Return an rtx for a value that would result
713
   from converting X to mode MODE.
714
   Both X and MODE may be floating, or both integer.
715
   UNSIGNEDP is nonzero if X is an unsigned value.
716
   This can be done by referring to a part of X in place
717
   or by copying to a new temporary with conversion.  */
718
 
719
rtx
720
convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
721
{
722
  return convert_modes (mode, VOIDmode, x, unsignedp);
723
}
724
 
725
/* Return an rtx for a value that would result
726
   from converting X from mode OLDMODE to mode MODE.
727
   Both modes may be floating, or both integer.
728
   UNSIGNEDP is nonzero if X is an unsigned value.
729
 
730
   This can be done by referring to a part of X in place
731
   or by copying to a new temporary with conversion.
732
 
733
   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.  */
734
 
735
rtx
736
convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
737
{
738
  rtx temp;
739
 
740
  /* If FROM is a SUBREG that indicates that we have already done at least
741
     the required extension, strip it.  */
742
 
743
  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
744
      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
745
      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
746
    x = gen_lowpart (mode, x);
747
 
748
  if (GET_MODE (x) != VOIDmode)
749
    oldmode = GET_MODE (x);
750
 
751
  if (mode == oldmode)
752
    return x;
753
 
754
  /* There is one case that we must handle specially: If we are converting
755
     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
756
     we are to interpret the constant as unsigned, gen_lowpart will do
757
     the wrong if the constant appears negative.  What we want to do is
758
     make the high-order word of the constant zero, not all ones.  */
759
 
760
  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
761
      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
762
      && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
763
    {
764
      HOST_WIDE_INT val = INTVAL (x);
765
 
766
      if (oldmode != VOIDmode
767
          && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
768
        {
769
          int width = GET_MODE_BITSIZE (oldmode);
770
 
771
          /* We need to zero extend VAL.  */
772
          val &= ((HOST_WIDE_INT) 1 << width) - 1;
773
        }
774
 
775
      return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
776
    }
777
 
778
  /* We can do this with a gen_lowpart if both desired and current modes
779
     are integer, and this is either a constant integer, a register, or a
780
     non-volatile MEM.  Except for the constant case where MODE is no
781
     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
782
 
783
  if ((GET_CODE (x) == CONST_INT
784
       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
785
      || (GET_MODE_CLASS (mode) == MODE_INT
786
          && GET_MODE_CLASS (oldmode) == MODE_INT
787
          && (GET_CODE (x) == CONST_DOUBLE
788
              || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
789
                  && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
790
                       && direct_load[(int) mode])
791
                      || (REG_P (x)
792
                          && (! HARD_REGISTER_P (x)
793
                              || HARD_REGNO_MODE_OK (REGNO (x), mode))
794
                          && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
795
                                                    GET_MODE_BITSIZE (GET_MODE (x)))))))))
796
    {
797
      /* ?? If we don't know OLDMODE, we have to assume here that
798
         X does not need sign- or zero-extension.   This may not be
799
         the case, but it's the best we can do.  */
800
      if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
801
          && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
802
        {
803
          HOST_WIDE_INT val = INTVAL (x);
804
          int width = GET_MODE_BITSIZE (oldmode);
805
 
806
          /* We must sign or zero-extend in this case.  Start by
807
             zero-extending, then sign extend if we need to.  */
808
          val &= ((HOST_WIDE_INT) 1 << width) - 1;
809
          if (! unsignedp
810
              && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
811
            val |= (HOST_WIDE_INT) (-1) << width;
812
 
813
          return gen_int_mode (val, mode);
814
        }
815
 
816
      return gen_lowpart (mode, x);
817
    }
818
 
819
  /* Converting from integer constant into mode is always equivalent to an
820
     subreg operation.  */
821
  if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
822
    {
823
      gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
824
      return simplify_gen_subreg (mode, x, oldmode, 0);
825
    }
826
 
827
  temp = gen_reg_rtx (mode);
828
  convert_move (temp, x, unsignedp);
829
  return temp;
830
}
831
 
832
/* STORE_MAX_PIECES is the number of bytes at a time that we can
833
   store efficiently.  Due to internal GCC limitations, this is
834
   MOVE_MAX_PIECES limited by the number of bytes GCC can represent
835
   for an immediate constant.  */
836
 
837
#define STORE_MAX_PIECES  MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
838
 
839
/* Determine whether the LEN bytes can be moved by using several move
840
   instructions.  Return nonzero if a call to move_by_pieces should
841
   succeed.  */
842
 
843
int
844
can_move_by_pieces (unsigned HOST_WIDE_INT len,
845
                    unsigned int align ATTRIBUTE_UNUSED)
846
{
847
  return MOVE_BY_PIECES_P (len, align);
848
}
849
 
850
/* Generate several move instructions to copy LEN bytes from block FROM to
851
   block TO.  (These are MEM rtx's with BLKmode).
852
 
853
   If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
854
   used to push FROM to the stack.
855
 
856
   ALIGN is maximum stack alignment we can assume.
857
 
858
   If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
859
   mempcpy, and if ENDP is 2 return memory the end minus one byte ala
860
   stpcpy.  */
861
 
862
rtx
863
move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
864
                unsigned int align, int endp)
865
{
866
  struct move_by_pieces data;
867
  rtx to_addr, from_addr = XEXP (from, 0);
868
  unsigned int max_size = MOVE_MAX_PIECES + 1;
869
  enum machine_mode mode = VOIDmode, tmode;
870
  enum insn_code icode;
871
 
872
  align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
873
 
874
  data.offset = 0;
875
  data.from_addr = from_addr;
876
  if (to)
877
    {
878
      to_addr = XEXP (to, 0);
879
      data.to = to;
880
      data.autinc_to
881
        = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
882
           || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
883
      data.reverse
884
        = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
885
    }
886
  else
887
    {
888
      to_addr = NULL_RTX;
889
      data.to = NULL_RTX;
890
      data.autinc_to = 1;
891
#ifdef STACK_GROWS_DOWNWARD
892
      data.reverse = 1;
893
#else
894
      data.reverse = 0;
895
#endif
896
    }
897
  data.to_addr = to_addr;
898
  data.from = from;
899
  data.autinc_from
900
    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
901
       || GET_CODE (from_addr) == POST_INC
902
       || GET_CODE (from_addr) == POST_DEC);
903
 
904
  data.explicit_inc_from = 0;
905
  data.explicit_inc_to = 0;
906
  if (data.reverse) data.offset = len;
907
  data.len = len;
908
 
909
  /* If copying requires more than two move insns,
910
     copy addresses to registers (to make displacements shorter)
911
     and use post-increment if available.  */
912
  if (!(data.autinc_from && data.autinc_to)
913
      && move_by_pieces_ninsns (len, align, max_size) > 2)
914
    {
915
      /* Find the mode of the largest move...  */
916
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
917
           tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
918
        if (GET_MODE_SIZE (tmode) < max_size)
919
          mode = tmode;
920
 
921
      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
922
        {
923
          data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
924
          data.autinc_from = 1;
925
          data.explicit_inc_from = -1;
926
        }
927
      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
928
        {
929
          data.from_addr = copy_addr_to_reg (from_addr);
930
          data.autinc_from = 1;
931
          data.explicit_inc_from = 1;
932
        }
933
      if (!data.autinc_from && CONSTANT_P (from_addr))
934
        data.from_addr = copy_addr_to_reg (from_addr);
935
      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
936
        {
937
          data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
938
          data.autinc_to = 1;
939
          data.explicit_inc_to = -1;
940
        }
941
      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
942
        {
943
          data.to_addr = copy_addr_to_reg (to_addr);
944
          data.autinc_to = 1;
945
          data.explicit_inc_to = 1;
946
        }
947
      if (!data.autinc_to && CONSTANT_P (to_addr))
948
        data.to_addr = copy_addr_to_reg (to_addr);
949
    }
950
 
951
  tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
952
  if (align >= GET_MODE_ALIGNMENT (tmode))
953
    align = GET_MODE_ALIGNMENT (tmode);
954
  else
955
    {
956
      enum machine_mode xmode;
957
 
958
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
959
           tmode != VOIDmode;
960
           xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
961
        if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
962
            || SLOW_UNALIGNED_ACCESS (tmode, align))
963
          break;
964
 
965
      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
966
    }
967
 
968
  /* First move what we can in the largest integer mode, then go to
969
     successively smaller modes.  */
970
 
971
  while (max_size > 1)
972
    {
973
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
974
           tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
975
        if (GET_MODE_SIZE (tmode) < max_size)
976
          mode = tmode;
977
 
978
      if (mode == VOIDmode)
979
        break;
980
 
981
      icode = mov_optab->handlers[(int) mode].insn_code;
982
      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
983
        move_by_pieces_1 (GEN_FCN (icode), mode, &data);
984
 
985
      max_size = GET_MODE_SIZE (mode);
986
    }
987
 
988
  /* The code above should have handled everything.  */
989
  gcc_assert (!data.len);
990
 
991
  if (endp)
992
    {
993
      rtx to1;
994
 
995
      gcc_assert (!data.reverse);
996
      if (data.autinc_to)
997
        {
998
          if (endp == 2)
999
            {
1000
              if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1001
                emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1002
              else
1003
                data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1004
                                                                -1));
1005
            }
1006
          to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1007
                                           data.offset);
1008
        }
1009
      else
1010
        {
1011
          if (endp == 2)
1012
            --data.offset;
1013
          to1 = adjust_address (data.to, QImode, data.offset);
1014
        }
1015
      return to1;
1016
    }
1017
  else
1018
    return data.to;
1019
}
1020
 
1021
/* Return number of insns required to move L bytes by pieces.
1022
   ALIGN (in bits) is maximum alignment we can assume.  */
1023
 
1024
static unsigned HOST_WIDE_INT
1025
move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1026
                       unsigned int max_size)
1027
{
1028
  unsigned HOST_WIDE_INT n_insns = 0;
1029
  enum machine_mode tmode;
1030
 
1031
  tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1032
  if (align >= GET_MODE_ALIGNMENT (tmode))
1033
    align = GET_MODE_ALIGNMENT (tmode);
1034
  else
1035
    {
1036
      enum machine_mode tmode, xmode;
1037
 
1038
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1039
           tmode != VOIDmode;
1040
           xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1041
        if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1042
            || SLOW_UNALIGNED_ACCESS (tmode, align))
1043
          break;
1044
 
1045
      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1046
    }
1047
 
1048
  while (max_size > 1)
1049
    {
1050
      enum machine_mode mode = VOIDmode;
1051
      enum insn_code icode;
1052
 
1053
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1054
           tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1055
        if (GET_MODE_SIZE (tmode) < max_size)
1056
          mode = tmode;
1057
 
1058
      if (mode == VOIDmode)
1059
        break;
1060
 
1061
      icode = mov_optab->handlers[(int) mode].insn_code;
1062
      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1063
        n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1064
 
1065
      max_size = GET_MODE_SIZE (mode);
1066
    }
1067
 
1068
  gcc_assert (!l);
1069
  return n_insns;
1070
}
1071
 
1072
/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1073
   with move instructions for mode MODE.  GENFUN is the gen_... function
1074
   to make a move insn for that mode.  DATA has all the other info.  */
1075
 
1076
static void
1077
move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1078
                  struct move_by_pieces *data)
1079
{
1080
  unsigned int size = GET_MODE_SIZE (mode);
1081
  rtx to1 = NULL_RTX, from1;
1082
 
1083
  while (data->len >= size)
1084
    {
1085
      if (data->reverse)
1086
        data->offset -= size;
1087
 
1088
      if (data->to)
1089
        {
1090
          if (data->autinc_to)
1091
            to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1092
                                             data->offset);
1093
          else
1094
            to1 = adjust_address (data->to, mode, data->offset);
1095
        }
1096
 
1097
      if (data->autinc_from)
1098
        from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1099
                                           data->offset);
1100
      else
1101
        from1 = adjust_address (data->from, mode, data->offset);
1102
 
1103
      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1104
        emit_insn (gen_add2_insn (data->to_addr,
1105
                                  GEN_INT (-(HOST_WIDE_INT)size)));
1106
      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1107
        emit_insn (gen_add2_insn (data->from_addr,
1108
                                  GEN_INT (-(HOST_WIDE_INT)size)));
1109
 
1110
      if (data->to)
1111
        emit_insn ((*genfun) (to1, from1));
1112
      else
1113
        {
1114
#ifdef PUSH_ROUNDING
1115
          emit_single_push_insn (mode, from1, NULL);
1116
#else
1117
          gcc_unreachable ();
1118
#endif
1119
        }
1120
 
1121
      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1122
        emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1123
      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1124
        emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1125
 
1126
      if (! data->reverse)
1127
        data->offset += size;
1128
 
1129
      data->len -= size;
1130
    }
1131
}
1132
 
1133
/* Emit code to move a block Y to a block X.  This may be done with
1134
   string-move instructions, with multiple scalar move instructions,
1135
   or with a library call.
1136
 
1137
   Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1138
   SIZE is an rtx that says how long they are.
1139
   ALIGN is the maximum alignment we can assume they have.
1140
   METHOD describes what kind of copy this is, and what mechanisms may be used.
1141
 
1142
   Return the address of the new block, if memcpy is called and returns it,
1143
 
1144
 
1145
rtx
1146
emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1147
{
1148
  bool may_use_call;
1149
  rtx retval = 0;
1150
  unsigned int align;
1151
 
1152
  switch (method)
1153
    {
1154
    case BLOCK_OP_NORMAL:
1155
    case BLOCK_OP_TAILCALL:
1156
      may_use_call = true;
1157
      break;
1158
 
1159
    case BLOCK_OP_CALL_PARM:
1160
      may_use_call = block_move_libcall_safe_for_call_parm ();
1161
 
1162
      /* Make inhibit_defer_pop nonzero around the library call
1163
         to force it to pop the arguments right away.  */
1164
      NO_DEFER_POP;
1165
      break;
1166
 
1167
    case BLOCK_OP_NO_LIBCALL:
1168
      may_use_call = false;
1169
      break;
1170
 
1171
    default:
1172
      gcc_unreachable ();
1173
    }
1174
 
1175
  align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1176
 
1177
  gcc_assert (MEM_P (x));
1178
  gcc_assert (MEM_P (y));
1179
  gcc_assert (size);
1180
 
1181
  /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1182
     block copy is more efficient for other large modes, e.g. DCmode.  */
1183
  x = adjust_address (x, BLKmode, 0);
1184
  y = adjust_address (y, BLKmode, 0);
1185
 
1186
  /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1187
     can be incorrect is coming from __builtin_memcpy.  */
1188
  if (GET_CODE (size) == CONST_INT)
1189
    {
1190
      if (INTVAL (size) == 0)
1191
        return 0;
1192
 
1193
      x = shallow_copy_rtx (x);
1194
      y = shallow_copy_rtx (y);
1195
      set_mem_size (x, size);
1196
      set_mem_size (y, size);
1197
    }
1198
 
1199
  if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1200
    move_by_pieces (x, y, INTVAL (size), align, 0);
1201
  else if (emit_block_move_via_movmem (x, y, size, align))
1202
    ;
1203
  else if (may_use_call)
1204
    retval = emit_block_move_via_libcall (x, y, size,
1205
                                          method == BLOCK_OP_TAILCALL);
1206
  else
1207
    emit_block_move_via_loop (x, y, size, align);
1208
 
1209
  if (method == BLOCK_OP_CALL_PARM)
1210
    OK_DEFER_POP;
1211
 
1212
  return retval;
1213
}
1214
 
1215
/* A subroutine of emit_block_move.  Returns true if calling the
1216
   block move libcall will not clobber any parameters which may have
1217
   already been placed on the stack.  */
1218
 
1219
static bool
1220
block_move_libcall_safe_for_call_parm (void)
1221
{
1222
  /* If arguments are pushed on the stack, then they're safe.  */
1223
  if (PUSH_ARGS)
1224
    return true;
1225
 
1226
  /* If registers go on the stack anyway, any argument is sure to clobber
1227
     an outgoing argument.  */
1228
#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1229
  {
1230
    tree fn = emit_block_move_libcall_fn (false);
1231
    (void) fn;
1232
    if (REG_PARM_STACK_SPACE (fn) != 0)
1233
      return false;
1234
  }
1235
#endif
1236
 
1237
  /* If any argument goes in memory, then it might clobber an outgoing
1238
     argument.  */
1239
  {
1240
    CUMULATIVE_ARGS args_so_far;
1241
    tree fn, arg;
1242
 
1243
    fn = emit_block_move_libcall_fn (false);
1244
    INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1245
 
1246
    arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1247
    for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1248
      {
1249
        enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1250
        rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1251
        if (!tmp || !REG_P (tmp))
1252
          return false;
1253
        if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1254
          return false;
1255
        FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1256
      }
1257
  }
1258
  return true;
1259
}
1260
 
1261
/* A subroutine of emit_block_move.  Expand a movmem pattern;
1262
   return true if successful.  */
1263
 
1264
static bool
1265
emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1266
{
1267
  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1268
  int save_volatile_ok = volatile_ok;
1269
  enum machine_mode mode;
1270
 
1271
  /* Since this is a move insn, we don't care about volatility.  */
1272
  volatile_ok = 1;
1273
 
1274
  /* Try the most limited insn first, because there's no point
1275
     including more than one in the machine description unless
1276
     the more limited one has some advantage.  */
1277
 
1278
  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1279
       mode = GET_MODE_WIDER_MODE (mode))
1280
    {
1281
      enum insn_code code = movmem_optab[(int) mode];
1282
      insn_operand_predicate_fn pred;
1283
 
1284
      if (code != CODE_FOR_nothing
1285
          /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1286
             here because if SIZE is less than the mode mask, as it is
1287
             returned by the macro, it will definitely be less than the
1288
             actual mode mask.  */
1289
          && ((GET_CODE (size) == CONST_INT
1290
               && ((unsigned HOST_WIDE_INT) INTVAL (size)
1291
                   <= (GET_MODE_MASK (mode) >> 1)))
1292
              || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1293
          && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1294
              || (*pred) (x, BLKmode))
1295
          && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1296
              || (*pred) (y, BLKmode))
1297
          && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1298
              || (*pred) (opalign, VOIDmode)))
1299
        {
1300
          rtx op2;
1301
          rtx last = get_last_insn ();
1302
          rtx pat;
1303
 
1304
          op2 = convert_to_mode (mode, size, 1);
1305
          pred = insn_data[(int) code].operand[2].predicate;
1306
          if (pred != 0 && ! (*pred) (op2, mode))
1307
            op2 = copy_to_mode_reg (mode, op2);
1308
 
1309
          /* ??? When called via emit_block_move_for_call, it'd be
1310
             nice if there were some way to inform the backend, so
1311
             that it doesn't fail the expansion because it thinks
1312
             emitting the libcall would be more efficient.  */
1313
 
1314
          pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1315
          if (pat)
1316
            {
1317
              emit_insn (pat);
1318
              volatile_ok = save_volatile_ok;
1319
              return true;
1320
            }
1321
          else
1322
            delete_insns_since (last);
1323
        }
1324
    }
1325
 
1326
  volatile_ok = save_volatile_ok;
1327
  return false;
1328
}
1329
 
1330
/* A subroutine of emit_block_move.  Expand a call to memcpy.
1331
   Return the return value from memcpy, 0 otherwise.  */
1332
 
1333
static rtx
1334
emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1335
{
1336
  rtx dst_addr, src_addr;
1337
  tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1338
  enum machine_mode size_mode;
1339
  rtx retval;
1340
 
1341
  /* Emit code to copy the addresses of DST and SRC and SIZE into new
1342
     pseudos.  We can then place those new pseudos into a VAR_DECL and
1343
     use them later.  */
1344
 
1345
  dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1346
  src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1347
 
1348
  dst_addr = convert_memory_address (ptr_mode, dst_addr);
1349
  src_addr = convert_memory_address (ptr_mode, src_addr);
1350
 
1351
  dst_tree = make_tree (ptr_type_node, dst_addr);
1352
  src_tree = make_tree (ptr_type_node, src_addr);
1353
 
1354
  size_mode = TYPE_MODE (sizetype);
1355
 
1356
  size = convert_to_mode (size_mode, size, 1);
1357
  size = copy_to_mode_reg (size_mode, size);
1358
 
1359
  /* It is incorrect to use the libcall calling conventions to call
1360
     memcpy in this context.  This could be a user call to memcpy and
1361
     the user may wish to examine the return value from memcpy.  For
1362
     targets where libcalls and normal calls have different conventions
1363
     for returning pointers, we could end up generating incorrect code.  */
1364
 
1365
  size_tree = make_tree (sizetype, size);
1366
 
1367
  fn = emit_block_move_libcall_fn (true);
1368
  arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1369
  arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1370
  arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1371
 
1372
  /* Now we have to build up the CALL_EXPR itself.  */
1373
  call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1374
  call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1375
                      call_expr, arg_list, NULL_TREE);
1376
  CALL_EXPR_TAILCALL (call_expr) = tailcall;
1377
 
1378
  retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1379
 
1380
  return retval;
1381
}
1382
 
1383
/* A subroutine of emit_block_move_via_libcall.  Create the tree node
1384
   for the function we use for block copies.  The first time FOR_CALL
1385
   is true, we call assemble_external.  */
1386
 
1387
static GTY(()) tree block_move_fn;
1388
 
1389
void
1390
init_block_move_fn (const char *asmspec)
1391
{
1392
  if (!block_move_fn)
1393
    {
1394
      tree args, fn;
1395
 
1396
      fn = get_identifier ("memcpy");
1397
      args = build_function_type_list (ptr_type_node, ptr_type_node,
1398
                                       const_ptr_type_node, sizetype,
1399
                                       NULL_TREE);
1400
 
1401
      fn = build_decl (FUNCTION_DECL, fn, args);
1402
      DECL_EXTERNAL (fn) = 1;
1403
      TREE_PUBLIC (fn) = 1;
1404
      DECL_ARTIFICIAL (fn) = 1;
1405
      TREE_NOTHROW (fn) = 1;
1406
      DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1407
      DECL_VISIBILITY_SPECIFIED (fn) = 1;
1408
 
1409
      block_move_fn = fn;
1410
    }
1411
 
1412
  if (asmspec)
1413
    set_user_assembler_name (block_move_fn, asmspec);
1414
}
1415
 
1416
static tree
1417
emit_block_move_libcall_fn (int for_call)
1418
{
1419
  static bool emitted_extern;
1420
 
1421
  if (!block_move_fn)
1422
    init_block_move_fn (NULL);
1423
 
1424
  if (for_call && !emitted_extern)
1425
    {
1426
      emitted_extern = true;
1427
      make_decl_rtl (block_move_fn);
1428
      assemble_external (block_move_fn);
1429
    }
1430
 
1431
  return block_move_fn;
1432
}
1433
 
1434
/* A subroutine of emit_block_move.  Copy the data via an explicit
1435
   loop.  This is used only when libcalls are forbidden.  */
1436
/* ??? It'd be nice to copy in hunks larger than QImode.  */
1437
 
1438
static void
1439
emit_block_move_via_loop (rtx x, rtx y, rtx size,
1440
                          unsigned int align ATTRIBUTE_UNUSED)
1441
{
1442
  rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1443
  enum machine_mode iter_mode;
1444
 
1445
  iter_mode = GET_MODE (size);
1446
  if (iter_mode == VOIDmode)
1447
    iter_mode = word_mode;
1448
 
1449
  top_label = gen_label_rtx ();
1450
  cmp_label = gen_label_rtx ();
1451
  iter = gen_reg_rtx (iter_mode);
1452
 
1453
  emit_move_insn (iter, const0_rtx);
1454
 
1455
  x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1456
  y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1457
  do_pending_stack_adjust ();
1458
 
1459
  emit_jump (cmp_label);
1460
  emit_label (top_label);
1461
 
1462
  tmp = convert_modes (Pmode, iter_mode, iter, true);
1463
  x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1464
  y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1465
  x = change_address (x, QImode, x_addr);
1466
  y = change_address (y, QImode, y_addr);
1467
 
1468
  emit_move_insn (x, y);
1469
 
1470
  tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1471
                             true, OPTAB_LIB_WIDEN);
1472
  if (tmp != iter)
1473
    emit_move_insn (iter, tmp);
1474
 
1475
  emit_label (cmp_label);
1476
 
1477
  emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1478
                           true, top_label);
1479
}
1480
 
1481
/* Copy all or part of a value X into registers starting at REGNO.
1482
   The number of registers to be filled is NREGS.  */
1483
 
1484
void
1485
move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1486
{
1487
  int i;
1488
#ifdef HAVE_load_multiple
1489
  rtx pat;
1490
  rtx last;
1491
#endif
1492
 
1493
  if (nregs == 0)
1494
    return;
1495
 
1496
  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1497
    x = validize_mem (force_const_mem (mode, x));
1498
 
1499
  /* See if the machine can do this with a load multiple insn.  */
1500
#ifdef HAVE_load_multiple
1501
  if (HAVE_load_multiple)
1502
    {
1503
      last = get_last_insn ();
1504
      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1505
                               GEN_INT (nregs));
1506
      if (pat)
1507
        {
1508
          emit_insn (pat);
1509
          return;
1510
        }
1511
      else
1512
        delete_insns_since (last);
1513
    }
1514
#endif
1515
 
1516
  for (i = 0; i < nregs; i++)
1517
    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1518
                    operand_subword_force (x, i, mode));
1519
}
1520
 
1521
/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1522
   The number of registers to be filled is NREGS.  */
1523
 
1524
void
1525
move_block_from_reg (int regno, rtx x, int nregs)
1526
{
1527
  int i;
1528
 
1529
  if (nregs == 0)
1530
    return;
1531
 
1532
  /* See if the machine can do this with a store multiple insn.  */
1533
#ifdef HAVE_store_multiple
1534
  if (HAVE_store_multiple)
1535
    {
1536
      rtx last = get_last_insn ();
1537
      rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1538
                                    GEN_INT (nregs));
1539
      if (pat)
1540
        {
1541
          emit_insn (pat);
1542
          return;
1543
        }
1544
      else
1545
        delete_insns_since (last);
1546
    }
1547
#endif
1548
 
1549
  for (i = 0; i < nregs; i++)
1550
    {
1551
      rtx tem = operand_subword (x, i, 1, BLKmode);
1552
 
1553
      gcc_assert (tem);
1554
 
1555
      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1556
    }
1557
}
1558
 
1559
/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1560
   ORIG, where ORIG is a non-consecutive group of registers represented by
1561
   a PARALLEL.  The clone is identical to the original except in that the
1562
   original set of registers is replaced by a new set of pseudo registers.
1563
   The new set has the same modes as the original set.  */
1564
 
1565
rtx
1566
gen_group_rtx (rtx orig)
1567
{
1568
  int i, length;
1569
  rtx *tmps;
1570
 
1571
  gcc_assert (GET_CODE (orig) == PARALLEL);
1572
 
1573
  length = XVECLEN (orig, 0);
1574
  tmps = alloca (sizeof (rtx) * length);
1575
 
1576
  /* Skip a NULL entry in first slot.  */
1577
  i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1578
 
1579
  if (i)
1580
    tmps[0] = 0;
1581
 
1582
  for (; i < length; i++)
1583
    {
1584
      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1585
      rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1586
 
1587
      tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1588
    }
1589
 
1590
  return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1591
}
1592
 
1593
/* A subroutine of emit_group_load.  Arguments as for emit_group_load,
1594
   except that values are placed in TMPS[i], and must later be moved
1595
   into corresponding XEXP (XVECEXP (DST, 0, i), 0) element.  */
1596
 
1597
static void
1598
emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1599
{
1600
  rtx src;
1601
  int start, i;
1602
  enum machine_mode m = GET_MODE (orig_src);
1603
 
1604
  gcc_assert (GET_CODE (dst) == PARALLEL);
1605
 
1606
  if (m != VOIDmode
1607
      && !SCALAR_INT_MODE_P (m)
1608
      && !MEM_P (orig_src)
1609
      && GET_CODE (orig_src) != CONCAT)
1610
    {
1611
      enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1612
      if (imode == BLKmode)
1613
        src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1614
      else
1615
        src = gen_reg_rtx (imode);
1616
      if (imode != BLKmode)
1617
        src = gen_lowpart (GET_MODE (orig_src), src);
1618
      emit_move_insn (src, orig_src);
1619
      /* ...and back again.  */
1620
      if (imode != BLKmode)
1621
        src = gen_lowpart (imode, src);
1622
      emit_group_load_1 (tmps, dst, src, type, ssize);
1623
      return;
1624
    }
1625
 
1626
  /* Check for a NULL entry, used to indicate that the parameter goes
1627
     both on the stack and in registers.  */
1628
  if (XEXP (XVECEXP (dst, 0, 0), 0))
1629
    start = 0;
1630
  else
1631
    start = 1;
1632
 
1633
  /* Process the pieces.  */
1634
  for (i = start; i < XVECLEN (dst, 0); i++)
1635
    {
1636
      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1637
      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1638
      unsigned int bytelen = GET_MODE_SIZE (mode);
1639
      int shift = 0;
1640
 
1641
      /* Handle trailing fragments that run over the size of the struct.  */
1642
      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1643
        {
1644
          /* Arrange to shift the fragment to where it belongs.
1645
             extract_bit_field loads to the lsb of the reg.  */
1646
          if (
1647
#ifdef BLOCK_REG_PADDING
1648
              BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1649
              == (BYTES_BIG_ENDIAN ? upward : downward)
1650
#else
1651
              BYTES_BIG_ENDIAN
1652
#endif
1653
              )
1654
            shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1655
          bytelen = ssize - bytepos;
1656
          gcc_assert (bytelen > 0);
1657
        }
1658
 
1659
      /* If we won't be loading directly from memory, protect the real source
1660
         from strange tricks we might play; but make sure that the source can
1661
         be loaded directly into the destination.  */
1662
      src = orig_src;
1663
      if (!MEM_P (orig_src)
1664
          && (!CONSTANT_P (orig_src)
1665
              || (GET_MODE (orig_src) != mode
1666
                  && GET_MODE (orig_src) != VOIDmode)))
1667
        {
1668
          if (GET_MODE (orig_src) == VOIDmode)
1669
            src = gen_reg_rtx (mode);
1670
          else
1671
            src = gen_reg_rtx (GET_MODE (orig_src));
1672
 
1673
          emit_move_insn (src, orig_src);
1674
        }
1675
 
1676
      /* Optimize the access just a bit.  */
1677
      if (MEM_P (src)
1678
          && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1679
              || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1680
          && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1681
          && bytelen == GET_MODE_SIZE (mode))
1682
        {
1683
          tmps[i] = gen_reg_rtx (mode);
1684
          emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1685
        }
1686
      else if (COMPLEX_MODE_P (mode)
1687
               && GET_MODE (src) == mode
1688
               && bytelen == GET_MODE_SIZE (mode))
1689
        /* Let emit_move_complex do the bulk of the work.  */
1690
        tmps[i] = src;
1691
      else if (GET_CODE (src) == CONCAT)
1692
        {
1693
          unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1694
          unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1695
 
1696
          if ((bytepos == 0 && bytelen == slen0)
1697
              || (bytepos != 0 && bytepos + bytelen <= slen))
1698
            {
1699
              /* The following assumes that the concatenated objects all
1700
                 have the same size.  In this case, a simple calculation
1701
                 can be used to determine the object and the bit field
1702
                 to be extracted.  */
1703
              tmps[i] = XEXP (src, bytepos / slen0);
1704
              if (! CONSTANT_P (tmps[i])
1705
                  && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1706
                tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1707
                                             (bytepos % slen0) * BITS_PER_UNIT,
1708
                                             1, NULL_RTX, mode, mode);
1709
            }
1710
          else
1711
            {
1712
              rtx mem;
1713
 
1714
              gcc_assert (!bytepos);
1715
              mem = assign_stack_temp (GET_MODE (src), slen, 0);
1716
              emit_move_insn (mem, src);
1717
              tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1718
                                           0, 1, NULL_RTX, mode, mode);
1719
            }
1720
        }
1721
      /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1722
         SIMD register, which is currently broken.  While we get GCC
1723
         to emit proper RTL for these cases, let's dump to memory.  */
1724
      else if (VECTOR_MODE_P (GET_MODE (dst))
1725
               && REG_P (src))
1726
        {
1727
          int slen = GET_MODE_SIZE (GET_MODE (src));
1728
          rtx mem;
1729
 
1730
          mem = assign_stack_temp (GET_MODE (src), slen, 0);
1731
          emit_move_insn (mem, src);
1732
          tmps[i] = adjust_address (mem, mode, (int) bytepos);
1733
        }
1734
      else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1735
               && XVECLEN (dst, 0) > 1)
1736
        tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1737
      else if (CONSTANT_P (src)
1738
               || (REG_P (src) && GET_MODE (src) == mode))
1739
        tmps[i] = src;
1740
      else
1741
        tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1742
                                     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1743
                                     mode, mode);
1744
 
1745
      if (shift)
1746
        tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1747
                                build_int_cst (NULL_TREE, shift), tmps[i], 0);
1748
    }
1749
}
1750
 
1751
/* Emit code to move a block SRC of type TYPE to a block DST,
1752
   where DST is non-consecutive registers represented by a PARALLEL.
1753
   SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1754
   if not known.  */
1755
 
1756
void
1757
emit_group_load (rtx dst, rtx src, tree type, int ssize)
1758
{
1759
  rtx *tmps;
1760
  int i;
1761
 
1762
  tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1763
  emit_group_load_1 (tmps, dst, src, type, ssize);
1764
 
1765
  /* Copy the extracted pieces into the proper (probable) hard regs.  */
1766
  for (i = 0; i < XVECLEN (dst, 0); i++)
1767
    {
1768
      rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1769
      if (d == NULL)
1770
        continue;
1771
      emit_move_insn (d, tmps[i]);
1772
    }
1773
}
1774
 
1775
/* Similar, but load SRC into new pseudos in a format that looks like
1776
   PARALLEL.  This can later be fed to emit_group_move to get things
1777
   in the right place.  */
1778
 
1779
rtx
1780
emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1781
{
1782
  rtvec vec;
1783
  int i;
1784
 
1785
  vec = rtvec_alloc (XVECLEN (parallel, 0));
1786
  emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1787
 
1788
  /* Convert the vector to look just like the original PARALLEL, except
1789
     with the computed values.  */
1790
  for (i = 0; i < XVECLEN (parallel, 0); i++)
1791
    {
1792
      rtx e = XVECEXP (parallel, 0, i);
1793
      rtx d = XEXP (e, 0);
1794
 
1795
      if (d)
1796
        {
1797
          d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1798
          e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1799
        }
1800
      RTVEC_ELT (vec, i) = e;
1801
    }
1802
 
1803
  return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1804
}
1805
 
1806
/* Emit code to move a block SRC to block DST, where SRC and DST are
1807
   non-consecutive groups of registers, each represented by a PARALLEL.  */
1808
 
1809
void
1810
emit_group_move (rtx dst, rtx src)
1811
{
1812
  int i;
1813
 
1814
  gcc_assert (GET_CODE (src) == PARALLEL
1815
              && GET_CODE (dst) == PARALLEL
1816
              && XVECLEN (src, 0) == XVECLEN (dst, 0));
1817
 
1818
  /* Skip first entry if NULL.  */
1819
  for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1820
    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1821
                    XEXP (XVECEXP (src, 0, i), 0));
1822
}
1823
 
1824
/* Move a group of registers represented by a PARALLEL into pseudos.  */
1825
 
1826
rtx
1827
emit_group_move_into_temps (rtx src)
1828
{
1829
  rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1830
  int i;
1831
 
1832
  for (i = 0; i < XVECLEN (src, 0); i++)
1833
    {
1834
      rtx e = XVECEXP (src, 0, i);
1835
      rtx d = XEXP (e, 0);
1836
 
1837
      if (d)
1838
        e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1839
      RTVEC_ELT (vec, i) = e;
1840
    }
1841
 
1842
  return gen_rtx_PARALLEL (GET_MODE (src), vec);
1843
}
1844
 
1845
/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1846
   where SRC is non-consecutive registers represented by a PARALLEL.
1847
   SSIZE represents the total size of block ORIG_DST, or -1 if not
1848
   known.  */
1849
 
1850
void
1851
emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1852
{
1853
  rtx *tmps, dst;
1854
  int start, i;
1855
  enum machine_mode m = GET_MODE (orig_dst);
1856
 
1857
  gcc_assert (GET_CODE (src) == PARALLEL);
1858
 
1859
  if (!SCALAR_INT_MODE_P (m)
1860
      && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1861
    {
1862
      enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1863
      if (imode == BLKmode)
1864
        dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1865
      else
1866
        dst = gen_reg_rtx (imode);
1867
      emit_group_store (dst, src, type, ssize);
1868
      if (imode != BLKmode)
1869
        dst = gen_lowpart (GET_MODE (orig_dst), dst);
1870
      emit_move_insn (orig_dst, dst);
1871
      return;
1872
    }
1873
 
1874
  /* Check for a NULL entry, used to indicate that the parameter goes
1875
     both on the stack and in registers.  */
1876
  if (XEXP (XVECEXP (src, 0, 0), 0))
1877
    start = 0;
1878
  else
1879
    start = 1;
1880
 
1881
  tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1882
 
1883
  /* Copy the (probable) hard regs into pseudos.  */
1884
  for (i = start; i < XVECLEN (src, 0); i++)
1885
    {
1886
      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1887
      tmps[i] = gen_reg_rtx (GET_MODE (reg));
1888
      emit_move_insn (tmps[i], reg);
1889
    }
1890
 
1891
  /* If we won't be storing directly into memory, protect the real destination
1892
     from strange tricks we might play.  */
1893
  dst = orig_dst;
1894
  if (GET_CODE (dst) == PARALLEL)
1895
    {
1896
      rtx temp;
1897
 
1898
      /* We can get a PARALLEL dst if there is a conditional expression in
1899
         a return statement.  In that case, the dst and src are the same,
1900
         so no action is necessary.  */
1901
      if (rtx_equal_p (dst, src))
1902
        return;
1903
 
1904
      /* It is unclear if we can ever reach here, but we may as well handle
1905
         it.  Allocate a temporary, and split this into a store/load to/from
1906
         the temporary.  */
1907
 
1908
      temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1909
      emit_group_store (temp, src, type, ssize);
1910
      emit_group_load (dst, temp, type, ssize);
1911
      return;
1912
    }
1913
  else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1914
    {
1915
      dst = gen_reg_rtx (GET_MODE (orig_dst));
1916
      /* Make life a bit easier for combine.  */
1917
      emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1918
    }
1919
 
1920
  /* Process the pieces.  */
1921
  for (i = start; i < XVECLEN (src, 0); i++)
1922
    {
1923
      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1924
      enum machine_mode mode = GET_MODE (tmps[i]);
1925
      unsigned int bytelen = GET_MODE_SIZE (mode);
1926
      rtx dest = dst;
1927
 
1928
      /* Handle trailing fragments that run over the size of the struct.  */
1929
      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1930
        {
1931
          /* store_bit_field always takes its value from the lsb.
1932
             Move the fragment to the lsb if it's not already there.  */
1933
          if (
1934
#ifdef BLOCK_REG_PADDING
1935
              BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1936
              == (BYTES_BIG_ENDIAN ? upward : downward)
1937
#else
1938
              BYTES_BIG_ENDIAN
1939
#endif
1940
              )
1941
            {
1942
              int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1943
              tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1944
                                      build_int_cst (NULL_TREE, shift),
1945
                                      tmps[i], 0);
1946
            }
1947
          bytelen = ssize - bytepos;
1948
        }
1949
 
1950
      if (GET_CODE (dst) == CONCAT)
1951
        {
1952
          if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1953
            dest = XEXP (dst, 0);
1954
          else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1955
            {
1956
              bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1957
              dest = XEXP (dst, 1);
1958
            }
1959
          else
1960
            {
1961
              gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1962
              dest = assign_stack_temp (GET_MODE (dest),
1963
                                        GET_MODE_SIZE (GET_MODE (dest)), 0);
1964
              emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1965
                              tmps[i]);
1966
              dst = dest;
1967
              break;
1968
            }
1969
        }
1970
 
1971
      /* Optimize the access just a bit.  */
1972
      if (MEM_P (dest)
1973
          && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1974
              || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1975
          && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1976
          && bytelen == GET_MODE_SIZE (mode))
1977
        emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1978
      else
1979
        store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1980
                         mode, tmps[i]);
1981
    }
1982
 
1983
  /* Copy from the pseudo into the (probable) hard reg.  */
1984
  if (orig_dst != dst)
1985
    emit_move_insn (orig_dst, dst);
1986
}
1987
 
1988
/* Generate code to copy a BLKmode object of TYPE out of a
1989
   set of registers starting with SRCREG into TGTBLK.  If TGTBLK
1990
   is null, a stack temporary is created.  TGTBLK is returned.
1991
 
1992
   The purpose of this routine is to handle functions that return
1993
   BLKmode structures in registers.  Some machines (the PA for example)
1994
   want to return all small structures in registers regardless of the
1995
   structure's alignment.  */
1996
 
1997
rtx
1998
copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1999
{
2000
  unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2001
  rtx src = NULL, dst = NULL;
2002
  unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2003
  unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2004
 
2005
  if (tgtblk == 0)
2006
    {
2007
      tgtblk = assign_temp (build_qualified_type (type,
2008
                                                  (TYPE_QUALS (type)
2009
                                                   | TYPE_QUAL_CONST)),
2010
                            0, 1, 1);
2011
      preserve_temp_slots (tgtblk);
2012
    }
2013
 
2014
  /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2015
     into a new pseudo which is a full word.  */
2016
 
2017
  if (GET_MODE (srcreg) != BLKmode
2018
      && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2019
    srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2020
 
2021
  /* If the structure doesn't take up a whole number of words, see whether
2022
     SRCREG is padded on the left or on the right.  If it's on the left,
2023
     set PADDING_CORRECTION to the number of bits to skip.
2024
 
2025
     In most ABIs, the structure will be returned at the least end of
2026
     the register, which translates to right padding on little-endian
2027
     targets and left padding on big-endian targets.  The opposite
2028
     holds if the structure is returned at the most significant
2029
     end of the register.  */
2030
  if (bytes % UNITS_PER_WORD != 0
2031
      && (targetm.calls.return_in_msb (type)
2032
          ? !BYTES_BIG_ENDIAN
2033
          : BYTES_BIG_ENDIAN))
2034
    padding_correction
2035
      = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2036
 
2037
  /* Copy the structure BITSIZE bites at a time.
2038
 
2039
     We could probably emit more efficient code for machines which do not use
2040
     strict alignment, but it doesn't seem worth the effort at the current
2041
     time.  */
2042
  for (bitpos = 0, xbitpos = padding_correction;
2043
       bitpos < bytes * BITS_PER_UNIT;
2044
       bitpos += bitsize, xbitpos += bitsize)
2045
    {
2046
      /* We need a new source operand each time xbitpos is on a
2047
         word boundary and when xbitpos == padding_correction
2048
         (the first time through).  */
2049
      if (xbitpos % BITS_PER_WORD == 0
2050
          || xbitpos == padding_correction)
2051
        src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2052
                                     GET_MODE (srcreg));
2053
 
2054
      /* We need a new destination operand each time bitpos is on
2055
         a word boundary.  */
2056
      if (bitpos % BITS_PER_WORD == 0)
2057
        dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2058
 
2059
      /* Use xbitpos for the source extraction (right justified) and
2060
         xbitpos for the destination store (left justified).  */
2061
      store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2062
                       extract_bit_field (src, bitsize,
2063
                                          xbitpos % BITS_PER_WORD, 1,
2064
                                          NULL_RTX, word_mode, word_mode));
2065
    }
2066
 
2067
  return tgtblk;
2068
}
2069
 
2070
/* Add a USE expression for REG to the (possibly empty) list pointed
2071
   to by CALL_FUSAGE.  REG must denote a hard register.  */
2072
 
2073
void
2074
use_reg (rtx *call_fusage, rtx reg)
2075
{
2076
  gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2077
 
2078
  *call_fusage
2079
    = gen_rtx_EXPR_LIST (VOIDmode,
2080
                         gen_rtx_USE (VOIDmode, reg), *call_fusage);
2081
}
2082
 
2083
/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2084
   starting at REGNO.  All of these registers must be hard registers.  */
2085
 
2086
void
2087
use_regs (rtx *call_fusage, int regno, int nregs)
2088
{
2089
  int i;
2090
 
2091
  gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2092
 
2093
  for (i = 0; i < nregs; i++)
2094
    use_reg (call_fusage, regno_reg_rtx[regno + i]);
2095
}
2096
 
2097
/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2098
   PARALLEL REGS.  This is for calls that pass values in multiple
2099
   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2100
 
2101
void
2102
use_group_regs (rtx *call_fusage, rtx regs)
2103
{
2104
  int i;
2105
 
2106
  for (i = 0; i < XVECLEN (regs, 0); i++)
2107
    {
2108
      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2109
 
2110
      /* A NULL entry means the parameter goes both on the stack and in
2111
         registers.  This can also be a MEM for targets that pass values
2112
         partially on the stack and partially in registers.  */
2113
      if (reg != 0 && REG_P (reg))
2114
        use_reg (call_fusage, reg);
2115
    }
2116
}
2117
 
2118
 
2119
/* Determine whether the LEN bytes generated by CONSTFUN can be
2120
   stored to memory using several move instructions.  CONSTFUNDATA is
2121
   a pointer which will be passed as argument in every CONSTFUN call.
2122
   ALIGN is maximum alignment we can assume.  Return nonzero if a
2123
   call to store_by_pieces should succeed.  */
2124
 
2125
int
2126
can_store_by_pieces (unsigned HOST_WIDE_INT len,
2127
                     rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2128
                     void *constfundata, unsigned int align)
2129
{
2130
  unsigned HOST_WIDE_INT l;
2131
  unsigned int max_size;
2132
  HOST_WIDE_INT offset = 0;
2133
  enum machine_mode mode, tmode;
2134
  enum insn_code icode;
2135
  int reverse;
2136
  rtx cst;
2137
 
2138
  if (len == 0)
2139
    return 1;
2140
 
2141
  if (! STORE_BY_PIECES_P (len, align))
2142
    return 0;
2143
 
2144
  tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2145
  if (align >= GET_MODE_ALIGNMENT (tmode))
2146
    align = GET_MODE_ALIGNMENT (tmode);
2147
  else
2148
    {
2149
      enum machine_mode xmode;
2150
 
2151
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2152
           tmode != VOIDmode;
2153
           xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2154
        if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2155
            || SLOW_UNALIGNED_ACCESS (tmode, align))
2156
          break;
2157
 
2158
      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2159
    }
2160
 
2161
  /* We would first store what we can in the largest integer mode, then go to
2162
     successively smaller modes.  */
2163
 
2164
  for (reverse = 0;
2165
       reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2166
       reverse++)
2167
    {
2168
      l = len;
2169
      mode = VOIDmode;
2170
      max_size = STORE_MAX_PIECES + 1;
2171
      while (max_size > 1)
2172
        {
2173
          for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2174
               tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2175
            if (GET_MODE_SIZE (tmode) < max_size)
2176
              mode = tmode;
2177
 
2178
          if (mode == VOIDmode)
2179
            break;
2180
 
2181
          icode = mov_optab->handlers[(int) mode].insn_code;
2182
          if (icode != CODE_FOR_nothing
2183
              && align >= GET_MODE_ALIGNMENT (mode))
2184
            {
2185
              unsigned int size = GET_MODE_SIZE (mode);
2186
 
2187
              while (l >= size)
2188
                {
2189
                  if (reverse)
2190
                    offset -= size;
2191
 
2192
                  cst = (*constfun) (constfundata, offset, mode);
2193
                  if (!LEGITIMATE_CONSTANT_P (cst))
2194
                    return 0;
2195
 
2196
                  if (!reverse)
2197
                    offset += size;
2198
 
2199
                  l -= size;
2200
                }
2201
            }
2202
 
2203
          max_size = GET_MODE_SIZE (mode);
2204
        }
2205
 
2206
      /* The code above should have handled everything.  */
2207
      gcc_assert (!l);
2208
    }
2209
 
2210
  return 1;
2211
}
2212
 
2213
/* Generate several move instructions to store LEN bytes generated by
2214
   CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2215
   pointer which will be passed as argument in every CONSTFUN call.
2216
   ALIGN is maximum alignment we can assume.
2217
   If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2218
   mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2219
   stpcpy.  */
2220
 
2221
rtx
2222
store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2223
                 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2224
                 void *constfundata, unsigned int align, int endp)
2225
{
2226
  struct store_by_pieces data;
2227
 
2228
  if (len == 0)
2229
    {
2230
      gcc_assert (endp != 2);
2231
      return to;
2232
    }
2233
 
2234
  gcc_assert (STORE_BY_PIECES_P (len, align));
2235
  data.constfun = constfun;
2236
  data.constfundata = constfundata;
2237
  data.len = len;
2238
  data.to = to;
2239
  store_by_pieces_1 (&data, align);
2240
  if (endp)
2241
    {
2242
      rtx to1;
2243
 
2244
      gcc_assert (!data.reverse);
2245
      if (data.autinc_to)
2246
        {
2247
          if (endp == 2)
2248
            {
2249
              if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2250
                emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2251
              else
2252
                data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2253
                                                                -1));
2254
            }
2255
          to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2256
                                           data.offset);
2257
        }
2258
      else
2259
        {
2260
          if (endp == 2)
2261
            --data.offset;
2262
          to1 = adjust_address (data.to, QImode, data.offset);
2263
        }
2264
      return to1;
2265
    }
2266
  else
2267
    return data.to;
2268
}
2269
 
2270
/* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2271
   rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2272
 
2273
static void
2274
clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2275
{
2276
  struct store_by_pieces data;
2277
 
2278
  if (len == 0)
2279
    return;
2280
 
2281
  data.constfun = clear_by_pieces_1;
2282
  data.constfundata = NULL;
2283
  data.len = len;
2284
  data.to = to;
2285
  store_by_pieces_1 (&data, align);
2286
}
2287
 
2288
/* Callback routine for clear_by_pieces.
2289
   Return const0_rtx unconditionally.  */
2290
 
2291
static rtx
2292
clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2293
                   HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2294
                   enum machine_mode mode ATTRIBUTE_UNUSED)
2295
{
2296
  return const0_rtx;
2297
}
2298
 
2299
/* Subroutine of clear_by_pieces and store_by_pieces.
2300
   Generate several move instructions to store LEN bytes of block TO.  (A MEM
2301
   rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2302
 
2303
static void
2304
store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2305
                   unsigned int align ATTRIBUTE_UNUSED)
2306
{
2307
  rtx to_addr = XEXP (data->to, 0);
2308
  unsigned int max_size = STORE_MAX_PIECES + 1;
2309
  enum machine_mode mode = VOIDmode, tmode;
2310
  enum insn_code icode;
2311
 
2312
  data->offset = 0;
2313
  data->to_addr = to_addr;
2314
  data->autinc_to
2315
    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2316
       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2317
 
2318
  data->explicit_inc_to = 0;
2319
  data->reverse
2320
    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2321
  if (data->reverse)
2322
    data->offset = data->len;
2323
 
2324
  /* If storing requires more than two move insns,
2325
     copy addresses to registers (to make displacements shorter)
2326
     and use post-increment if available.  */
2327
  if (!data->autinc_to
2328
      && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2329
    {
2330
      /* Determine the main mode we'll be using.  */
2331
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2332
           tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2333
        if (GET_MODE_SIZE (tmode) < max_size)
2334
          mode = tmode;
2335
 
2336
      if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2337
        {
2338
          data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2339
          data->autinc_to = 1;
2340
          data->explicit_inc_to = -1;
2341
        }
2342
 
2343
      if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2344
          && ! data->autinc_to)
2345
        {
2346
          data->to_addr = copy_addr_to_reg (to_addr);
2347
          data->autinc_to = 1;
2348
          data->explicit_inc_to = 1;
2349
        }
2350
 
2351
      if ( !data->autinc_to && CONSTANT_P (to_addr))
2352
        data->to_addr = copy_addr_to_reg (to_addr);
2353
    }
2354
 
2355
  tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2356
  if (align >= GET_MODE_ALIGNMENT (tmode))
2357
    align = GET_MODE_ALIGNMENT (tmode);
2358
  else
2359
    {
2360
      enum machine_mode xmode;
2361
 
2362
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2363
           tmode != VOIDmode;
2364
           xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2365
        if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2366
            || SLOW_UNALIGNED_ACCESS (tmode, align))
2367
          break;
2368
 
2369
      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2370
    }
2371
 
2372
  /* First store what we can in the largest integer mode, then go to
2373
     successively smaller modes.  */
2374
 
2375
  while (max_size > 1)
2376
    {
2377
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2378
           tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2379
        if (GET_MODE_SIZE (tmode) < max_size)
2380
          mode = tmode;
2381
 
2382
      if (mode == VOIDmode)
2383
        break;
2384
 
2385
      icode = mov_optab->handlers[(int) mode].insn_code;
2386
      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2387
        store_by_pieces_2 (GEN_FCN (icode), mode, data);
2388
 
2389
      max_size = GET_MODE_SIZE (mode);
2390
    }
2391
 
2392
  /* The code above should have handled everything.  */
2393
  gcc_assert (!data->len);
2394
}
2395
 
2396
/* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2397
   with move instructions for mode MODE.  GENFUN is the gen_... function
2398
   to make a move insn for that mode.  DATA has all the other info.  */
2399
 
2400
static void
2401
store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2402
                   struct store_by_pieces *data)
2403
{
2404
  unsigned int size = GET_MODE_SIZE (mode);
2405
  rtx to1, cst;
2406
 
2407
  while (data->len >= size)
2408
    {
2409
      if (data->reverse)
2410
        data->offset -= size;
2411
 
2412
      if (data->autinc_to)
2413
        to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2414
                                         data->offset);
2415
      else
2416
        to1 = adjust_address (data->to, mode, data->offset);
2417
 
2418
      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2419
        emit_insn (gen_add2_insn (data->to_addr,
2420
                                  GEN_INT (-(HOST_WIDE_INT) size)));
2421
 
2422
      cst = (*data->constfun) (data->constfundata, data->offset, mode);
2423
      emit_insn ((*genfun) (to1, cst));
2424
 
2425
      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2426
        emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2427
 
2428
      if (! data->reverse)
2429
        data->offset += size;
2430
 
2431
      data->len -= size;
2432
    }
2433
}
2434
 
2435
/* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2436
   its length in bytes.  */
2437
 
2438
rtx
2439
clear_storage (rtx object, rtx size, enum block_op_methods method)
2440
{
2441
  enum machine_mode mode = GET_MODE (object);
2442
  unsigned int align;
2443
 
2444
  gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2445
 
2446
  /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2447
     just move a zero.  Otherwise, do this a piece at a time.  */
2448
  if (mode != BLKmode
2449
      && GET_CODE (size) == CONST_INT
2450
      && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2451
    {
2452
      rtx zero = CONST0_RTX (mode);
2453
      if (zero != NULL)
2454
        {
2455
          emit_move_insn (object, zero);
2456
          return NULL;
2457
        }
2458
 
2459
      if (COMPLEX_MODE_P (mode))
2460
        {
2461
          zero = CONST0_RTX (GET_MODE_INNER (mode));
2462
          if (zero != NULL)
2463
            {
2464
              write_complex_part (object, zero, 0);
2465
              write_complex_part (object, zero, 1);
2466
              return NULL;
2467
            }
2468
        }
2469
    }
2470
 
2471
  if (size == const0_rtx)
2472
    return NULL;
2473
 
2474
  align = MEM_ALIGN (object);
2475
 
2476
  if (GET_CODE (size) == CONST_INT
2477
      && CLEAR_BY_PIECES_P (INTVAL (size), align))
2478
    clear_by_pieces (object, INTVAL (size), align);
2479
  else if (set_storage_via_setmem (object, size, const0_rtx, align))
2480
    ;
2481
  else
2482
    return clear_storage_via_libcall (object, size,
2483
                                      method == BLOCK_OP_TAILCALL);
2484
 
2485
  return NULL;
2486
}
2487
 
2488
/* A subroutine of clear_storage.  Expand a call to memset.
2489
   Return the return value of memset, 0 otherwise.  */
2490
 
2491
static rtx
2492
clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2493
{
2494
  tree call_expr, arg_list, fn, object_tree, size_tree;
2495
  enum machine_mode size_mode;
2496
  rtx retval;
2497
 
2498
  /* Emit code to copy OBJECT and SIZE into new pseudos.  We can then
2499
     place those into new pseudos into a VAR_DECL and use them later.  */
2500
 
2501
  object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2502
 
2503
  size_mode = TYPE_MODE (sizetype);
2504
  size = convert_to_mode (size_mode, size, 1);
2505
  size = copy_to_mode_reg (size_mode, size);
2506
 
2507
  /* It is incorrect to use the libcall calling conventions to call
2508
     memset in this context.  This could be a user call to memset and
2509
     the user may wish to examine the return value from memset.  For
2510
     targets where libcalls and normal calls have different conventions
2511
     for returning pointers, we could end up generating incorrect code.  */
2512
 
2513
  object_tree = make_tree (ptr_type_node, object);
2514
  size_tree = make_tree (sizetype, size);
2515
 
2516
  fn = clear_storage_libcall_fn (true);
2517
  arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2518
  arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2519
  arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2520
 
2521
  /* Now we have to build up the CALL_EXPR itself.  */
2522
  call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2523
  call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2524
                      call_expr, arg_list, NULL_TREE);
2525
  CALL_EXPR_TAILCALL (call_expr) = tailcall;
2526
 
2527
  retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2528
 
2529
  return retval;
2530
}
2531
 
2532
/* A subroutine of clear_storage_via_libcall.  Create the tree node
2533
   for the function we use for block clears.  The first time FOR_CALL
2534
   is true, we call assemble_external.  */
2535
 
2536
static GTY(()) tree block_clear_fn;
2537
 
2538
void
2539
init_block_clear_fn (const char *asmspec)
2540
{
2541
  if (!block_clear_fn)
2542
    {
2543
      tree fn, args;
2544
 
2545
      fn = get_identifier ("memset");
2546
      args = build_function_type_list (ptr_type_node, ptr_type_node,
2547
                                       integer_type_node, sizetype,
2548
                                       NULL_TREE);
2549
 
2550
      fn = build_decl (FUNCTION_DECL, fn, args);
2551
      DECL_EXTERNAL (fn) = 1;
2552
      TREE_PUBLIC (fn) = 1;
2553
      DECL_ARTIFICIAL (fn) = 1;
2554
      TREE_NOTHROW (fn) = 1;
2555
      DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2556
      DECL_VISIBILITY_SPECIFIED (fn) = 1;
2557
 
2558
      block_clear_fn = fn;
2559
    }
2560
 
2561
  if (asmspec)
2562
    set_user_assembler_name (block_clear_fn, asmspec);
2563
}
2564
 
2565
static tree
2566
clear_storage_libcall_fn (int for_call)
2567
{
2568
  static bool emitted_extern;
2569
 
2570
  if (!block_clear_fn)
2571
    init_block_clear_fn (NULL);
2572
 
2573
  if (for_call && !emitted_extern)
2574
    {
2575
      emitted_extern = true;
2576
      make_decl_rtl (block_clear_fn);
2577
      assemble_external (block_clear_fn);
2578
    }
2579
 
2580
  return block_clear_fn;
2581
}
2582
 
2583
/* Expand a setmem pattern; return true if successful.  */
2584
 
2585
bool
2586
set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2587
{
2588
  /* Try the most limited insn first, because there's no point
2589
     including more than one in the machine description unless
2590
     the more limited one has some advantage.  */
2591
 
2592
  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2593
  enum machine_mode mode;
2594
 
2595
  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2596
       mode = GET_MODE_WIDER_MODE (mode))
2597
    {
2598
      enum insn_code code = setmem_optab[(int) mode];
2599
      insn_operand_predicate_fn pred;
2600
 
2601
      if (code != CODE_FOR_nothing
2602
          /* We don't need MODE to be narrower than
2603
             BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2604
             the mode mask, as it is returned by the macro, it will
2605
             definitely be less than the actual mode mask.  */
2606
          && ((GET_CODE (size) == CONST_INT
2607
               && ((unsigned HOST_WIDE_INT) INTVAL (size)
2608
                   <= (GET_MODE_MASK (mode) >> 1)))
2609
              || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2610
          && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2611
              || (*pred) (object, BLKmode))
2612
          && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2613
              || (*pred) (opalign, VOIDmode)))
2614
        {
2615
          rtx opsize, opchar;
2616
          enum machine_mode char_mode;
2617
          rtx last = get_last_insn ();
2618
          rtx pat;
2619
 
2620
          opsize = convert_to_mode (mode, size, 1);
2621
          pred = insn_data[(int) code].operand[1].predicate;
2622
          if (pred != 0 && ! (*pred) (opsize, mode))
2623
            opsize = copy_to_mode_reg (mode, opsize);
2624
 
2625
          opchar = val;
2626
          char_mode = insn_data[(int) code].operand[2].mode;
2627
          if (char_mode != VOIDmode)
2628
            {
2629
              opchar = convert_to_mode (char_mode, opchar, 1);
2630
              pred = insn_data[(int) code].operand[2].predicate;
2631
              if (pred != 0 && ! (*pred) (opchar, char_mode))
2632
                opchar = copy_to_mode_reg (char_mode, opchar);
2633
            }
2634
 
2635
          pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2636
          if (pat)
2637
            {
2638
              emit_insn (pat);
2639
              return true;
2640
            }
2641
          else
2642
            delete_insns_since (last);
2643
        }
2644
    }
2645
 
2646
  return false;
2647
}
2648
 
2649
 
2650
/* Write to one of the components of the complex value CPLX.  Write VAL to
2651
   the real part if IMAG_P is false, and the imaginary part if its true.  */
2652
 
2653
static void
2654
write_complex_part (rtx cplx, rtx val, bool imag_p)
2655
{
2656
  enum machine_mode cmode;
2657
  enum machine_mode imode;
2658
  unsigned ibitsize;
2659
 
2660
  if (GET_CODE (cplx) == CONCAT)
2661
    {
2662
      emit_move_insn (XEXP (cplx, imag_p), val);
2663
      return;
2664
    }
2665
 
2666
  cmode = GET_MODE (cplx);
2667
  imode = GET_MODE_INNER (cmode);
2668
  ibitsize = GET_MODE_BITSIZE (imode);
2669
 
2670
  /* For MEMs simplify_gen_subreg may generate an invalid new address
2671
     because, e.g., the original address is considered mode-dependent
2672
     by the target, which restricts simplify_subreg from invoking
2673
     adjust_address_nv.  Instead of preparing fallback support for an
2674
     invalid address, we call adjust_address_nv directly.  */
2675
  if (MEM_P (cplx))
2676
    {
2677
      emit_move_insn (adjust_address_nv (cplx, imode,
2678
                                         imag_p ? GET_MODE_SIZE (imode) : 0),
2679
                      val);
2680
      return;
2681
    }
2682
 
2683
  /* If the sub-object is at least word sized, then we know that subregging
2684
     will work.  This special case is important, since store_bit_field
2685
     wants to operate on integer modes, and there's rarely an OImode to
2686
     correspond to TCmode.  */
2687
  if (ibitsize >= BITS_PER_WORD
2688
      /* For hard regs we have exact predicates.  Assume we can split
2689
         the original object if it spans an even number of hard regs.
2690
         This special case is important for SCmode on 64-bit platforms
2691
         where the natural size of floating-point regs is 32-bit.  */
2692
      || (REG_P (cplx)
2693
          && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2694
          && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2695
    {
2696
      rtx part = simplify_gen_subreg (imode, cplx, cmode,
2697
                                      imag_p ? GET_MODE_SIZE (imode) : 0);
2698
      if (part)
2699
        {
2700
          emit_move_insn (part, val);
2701
          return;
2702
        }
2703
      else
2704
        /* simplify_gen_subreg may fail for sub-word MEMs.  */
2705
        gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2706
    }
2707
 
2708
  store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2709
}
2710
 
2711
/* Extract one of the components of the complex value CPLX.  Extract the
2712
   real part if IMAG_P is false, and the imaginary part if it's true.  */
2713
 
2714
static rtx
2715
read_complex_part (rtx cplx, bool imag_p)
2716
{
2717
  enum machine_mode cmode, imode;
2718
  unsigned ibitsize;
2719
 
2720
  if (GET_CODE (cplx) == CONCAT)
2721
    return XEXP (cplx, imag_p);
2722
 
2723
  cmode = GET_MODE (cplx);
2724
  imode = GET_MODE_INNER (cmode);
2725
  ibitsize = GET_MODE_BITSIZE (imode);
2726
 
2727
  /* Special case reads from complex constants that got spilled to memory.  */
2728
  if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2729
    {
2730
      tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2731
      if (decl && TREE_CODE (decl) == COMPLEX_CST)
2732
        {
2733
          tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2734
          if (CONSTANT_CLASS_P (part))
2735
            return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2736
        }
2737
    }
2738
 
2739
  /* For MEMs simplify_gen_subreg may generate an invalid new address
2740
     because, e.g., the original address is considered mode-dependent
2741
     by the target, which restricts simplify_subreg from invoking
2742
     adjust_address_nv.  Instead of preparing fallback support for an
2743
     invalid address, we call adjust_address_nv directly.  */
2744
  if (MEM_P (cplx))
2745
    return adjust_address_nv (cplx, imode,
2746
                              imag_p ? GET_MODE_SIZE (imode) : 0);
2747
 
2748
  /* If the sub-object is at least word sized, then we know that subregging
2749
     will work.  This special case is important, since extract_bit_field
2750
     wants to operate on integer modes, and there's rarely an OImode to
2751
     correspond to TCmode.  */
2752
  if (ibitsize >= BITS_PER_WORD
2753
      /* For hard regs we have exact predicates.  Assume we can split
2754
         the original object if it spans an even number of hard regs.
2755
         This special case is important for SCmode on 64-bit platforms
2756
         where the natural size of floating-point regs is 32-bit.  */
2757
      || (REG_P (cplx)
2758
          && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2759
          && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2760
    {
2761
      rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2762
                                     imag_p ? GET_MODE_SIZE (imode) : 0);
2763
      if (ret)
2764
        return ret;
2765
      else
2766
        /* simplify_gen_subreg may fail for sub-word MEMs.  */
2767
        gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2768
    }
2769
 
2770
  return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2771
                            true, NULL_RTX, imode, imode);
2772
}
2773
 
2774
/* A subroutine of emit_move_insn_1.  Yet another lowpart generator.
2775
   NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
2776
   represented in NEW_MODE.  If FORCE is true, this will never happen, as
2777
   we'll force-create a SUBREG if needed.  */
2778
 
2779
static rtx
2780
emit_move_change_mode (enum machine_mode new_mode,
2781
                       enum machine_mode old_mode, rtx x, bool force)
2782
{
2783
  rtx ret;
2784
 
2785
  if (reload_in_progress && MEM_P (x))
2786
    {
2787
      /* We can't use gen_lowpart here because it may call change_address
2788
         which is not appropriate if we were called when a reload was in
2789
         progress.  We don't have to worry about changing the address since
2790
         the size in bytes is supposed to be the same.  Copy the MEM to
2791
         change the mode and move any substitutions from the old MEM to
2792
         the new one.  */
2793
 
2794
      ret = adjust_address_nv (x, new_mode, 0);
2795
      copy_replacements (x, ret);
2796
    }
2797
  else
2798
    {
2799
      /* Note that we do want simplify_subreg's behavior of validating
2800
         that the new mode is ok for a hard register.  If we were to use
2801
         simplify_gen_subreg, we would create the subreg, but would
2802
         probably run into the target not being able to implement it.  */
2803
      /* Except, of course, when FORCE is true, when this is exactly what
2804
         we want.  Which is needed for CCmodes on some targets.  */
2805
      if (force)
2806
        ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2807
      else
2808
        ret = simplify_subreg (new_mode, x, old_mode, 0);
2809
    }
2810
 
2811
  return ret;
2812
}
2813
 
2814
/* A subroutine of emit_move_insn_1.  Generate a move from Y into X using
2815
   an integer mode of the same size as MODE.  Returns the instruction
2816
   emitted, or NULL if such a move could not be generated.  */
2817
 
2818
static rtx
2819
emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2820
{
2821
  enum machine_mode imode;
2822
  enum insn_code code;
2823
 
2824
  /* There must exist a mode of the exact size we require.  */
2825
  imode = int_mode_for_mode (mode);
2826
  if (imode == BLKmode)
2827
    return NULL_RTX;
2828
 
2829
  /* The target must support moves in this mode.  */
2830
  code = mov_optab->handlers[imode].insn_code;
2831
  if (code == CODE_FOR_nothing)
2832
    return NULL_RTX;
2833
 
2834
  x = emit_move_change_mode (imode, mode, x, force);
2835
  if (x == NULL_RTX)
2836
    return NULL_RTX;
2837
  y = emit_move_change_mode (imode, mode, y, force);
2838
  if (y == NULL_RTX)
2839
    return NULL_RTX;
2840
  return emit_insn (GEN_FCN (code) (x, y));
2841
}
2842
 
2843
/* A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
2844
   Return an equivalent MEM that does not use an auto-increment.  */
2845
 
2846
static rtx
2847
emit_move_resolve_push (enum machine_mode mode, rtx x)
2848
{
2849
  enum rtx_code code = GET_CODE (XEXP (x, 0));
2850
  HOST_WIDE_INT adjust;
2851
  rtx temp;
2852
 
2853
  adjust = GET_MODE_SIZE (mode);
2854
#ifdef PUSH_ROUNDING
2855
  adjust = PUSH_ROUNDING (adjust);
2856
#endif
2857
  if (code == PRE_DEC || code == POST_DEC)
2858
    adjust = -adjust;
2859
  else if (code == PRE_MODIFY || code == POST_MODIFY)
2860
    {
2861
      rtx expr = XEXP (XEXP (x, 0), 1);
2862
      HOST_WIDE_INT val;
2863
 
2864
      gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2865
      gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2866
      val = INTVAL (XEXP (expr, 1));
2867
      if (GET_CODE (expr) == MINUS)
2868
        val = -val;
2869
      gcc_assert (adjust == val || adjust == -val);
2870
      adjust = val;
2871
    }
2872
 
2873
  /* Do not use anti_adjust_stack, since we don't want to update
2874
     stack_pointer_delta.  */
2875
  temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2876
                              GEN_INT (adjust), stack_pointer_rtx,
2877
                              0, OPTAB_LIB_WIDEN);
2878
  if (temp != stack_pointer_rtx)
2879
    emit_move_insn (stack_pointer_rtx, temp);
2880
 
2881
  switch (code)
2882
    {
2883
    case PRE_INC:
2884
    case PRE_DEC:
2885
    case PRE_MODIFY:
2886
      temp = stack_pointer_rtx;
2887
      break;
2888
    case POST_INC:
2889
    case POST_DEC:
2890
    case POST_MODIFY:
2891
      temp = plus_constant (stack_pointer_rtx, -adjust);
2892
      break;
2893
    default:
2894
      gcc_unreachable ();
2895
    }
2896
 
2897
  return replace_equiv_address (x, temp);
2898
}
2899
 
2900
/* A subroutine of emit_move_complex.  Generate a move from Y into X.
2901
   X is known to satisfy push_operand, and MODE is known to be complex.
2902
   Returns the last instruction emitted.  */
2903
 
2904
static rtx
2905
emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2906
{
2907
  enum machine_mode submode = GET_MODE_INNER (mode);
2908
  bool imag_first;
2909
 
2910
#ifdef PUSH_ROUNDING
2911
  unsigned int submodesize = GET_MODE_SIZE (submode);
2912
 
2913
  /* In case we output to the stack, but the size is smaller than the
2914
     machine can push exactly, we need to use move instructions.  */
2915
  if (PUSH_ROUNDING (submodesize) != submodesize)
2916
    {
2917
      x = emit_move_resolve_push (mode, x);
2918
      return emit_move_insn (x, y);
2919
    }
2920
#endif
2921
 
2922
  /* Note that the real part always precedes the imag part in memory
2923
     regardless of machine's endianness.  */
2924
  switch (GET_CODE (XEXP (x, 0)))
2925
    {
2926
    case PRE_DEC:
2927
    case POST_DEC:
2928
      imag_first = true;
2929
      break;
2930
    case PRE_INC:
2931
    case POST_INC:
2932
      imag_first = false;
2933
      break;
2934
    default:
2935
      gcc_unreachable ();
2936
    }
2937
 
2938
  emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2939
                  read_complex_part (y, imag_first));
2940
  return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2941
                         read_complex_part (y, !imag_first));
2942
}
2943
 
2944
/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
2945
   MODE is known to be complex.  Returns the last instruction emitted.  */
2946
 
2947
static rtx
2948
emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2949
{
2950
  bool try_int;
2951
 
2952
  /* Need to take special care for pushes, to maintain proper ordering
2953
     of the data, and possibly extra padding.  */
2954
  if (push_operand (x, mode))
2955
    return emit_move_complex_push (mode, x, y);
2956
 
2957
  /* See if we can coerce the target into moving both values at once.  */
2958
 
2959
  /* Move floating point as parts.  */
2960
  if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
2961
      && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
2962
    try_int = false;
2963
  /* Not possible if the values are inherently not adjacent.  */
2964
  else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
2965
    try_int = false;
2966
  /* Is possible if both are registers (or subregs of registers).  */
2967
  else if (register_operand (x, mode) && register_operand (y, mode))
2968
    try_int = true;
2969
  /* If one of the operands is a memory, and alignment constraints
2970
     are friendly enough, we may be able to do combined memory operations.
2971
     We do not attempt this if Y is a constant because that combination is
2972
     usually better with the by-parts thing below.  */
2973
  else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2974
           && (!STRICT_ALIGNMENT
2975
               || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2976
    try_int = true;
2977
  else
2978
    try_int = false;
2979
 
2980
  if (try_int)
2981
    {
2982
      rtx ret;
2983
 
2984
      /* For memory to memory moves, optimal behavior can be had with the
2985
         existing block move logic.  */
2986
      if (MEM_P (x) && MEM_P (y))
2987
        {
2988
          emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2989
                           BLOCK_OP_NO_LIBCALL);
2990
          return get_last_insn ();
2991
        }
2992
 
2993
      ret = emit_move_via_integer (mode, x, y, true);
2994
      if (ret)
2995
        return ret;
2996
    }
2997
 
2998
  /* Show the output dies here.  This is necessary for SUBREGs
2999
     of pseudos since we cannot track their lifetimes correctly;
3000
     hard regs shouldn't appear here except as return values.  */
3001
  if (!reload_completed && !reload_in_progress
3002
      && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3003
    emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3004
 
3005
  write_complex_part (x, read_complex_part (y, false), false);
3006
  write_complex_part (x, read_complex_part (y, true), true);
3007
  return get_last_insn ();
3008
}
3009
 
3010
/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3011
   MODE is known to be MODE_CC.  Returns the last instruction emitted.  */
3012
 
3013
static rtx
3014
emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3015
{
3016
  rtx ret;
3017
 
3018
  /* Assume all MODE_CC modes are equivalent; if we have movcc, use it.  */
3019
  if (mode != CCmode)
3020
    {
3021
      enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3022
      if (code != CODE_FOR_nothing)
3023
        {
3024
          x = emit_move_change_mode (CCmode, mode, x, true);
3025
          y = emit_move_change_mode (CCmode, mode, y, true);
3026
          return emit_insn (GEN_FCN (code) (x, y));
3027
        }
3028
    }
3029
 
3030
  /* Otherwise, find the MODE_INT mode of the same width.  */
3031
  ret = emit_move_via_integer (mode, x, y, false);
3032
  gcc_assert (ret != NULL);
3033
  return ret;
3034
}
3035
 
3036
/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3037
   MODE is any multi-word or full-word mode that lacks a move_insn
3038
   pattern.  Note that you will get better code if you define such
3039
   patterns, even if they must turn into multiple assembler instructions.  */
3040
 
3041
static rtx
3042
emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3043
{
3044
  rtx last_insn = 0;
3045
  rtx seq, inner;
3046
  bool need_clobber;
3047
  int i;
3048
 
3049
  gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3050
 
3051
  /* If X is a push on the stack, do the push now and replace
3052
     X with a reference to the stack pointer.  */
3053
  if (push_operand (x, mode))
3054
    x = emit_move_resolve_push (mode, x);
3055
 
3056
  /* If we are in reload, see if either operand is a MEM whose address
3057
     is scheduled for replacement.  */
3058
  if (reload_in_progress && MEM_P (x)
3059
      && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3060
    x = replace_equiv_address_nv (x, inner);
3061
  if (reload_in_progress && MEM_P (y)
3062
      && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3063
    y = replace_equiv_address_nv (y, inner);
3064
 
3065
  start_sequence ();
3066
 
3067
  need_clobber = false;
3068
  for (i = 0;
3069
       i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3070
       i++)
3071
    {
3072
      rtx xpart = operand_subword (x, i, 1, mode);
3073
      rtx ypart = operand_subword (y, i, 1, mode);
3074
 
3075
      /* If we can't get a part of Y, put Y into memory if it is a
3076
         constant.  Otherwise, force it into a register.  Then we must
3077
         be able to get a part of Y.  */
3078
      if (ypart == 0 && CONSTANT_P (y))
3079
        {
3080
          y = force_const_mem (mode, y);
3081
          ypart = operand_subword (y, i, 1, mode);
3082
        }
3083
      else if (ypart == 0)
3084
        ypart = operand_subword_force (y, i, mode);
3085
 
3086
      gcc_assert (xpart && ypart);
3087
 
3088
      need_clobber |= (GET_CODE (xpart) == SUBREG);
3089
 
3090
      last_insn = emit_move_insn (xpart, ypart);
3091
    }
3092
 
3093
  seq = get_insns ();
3094
  end_sequence ();
3095
 
3096
  /* Show the output dies here.  This is necessary for SUBREGs
3097
     of pseudos since we cannot track their lifetimes correctly;
3098
     hard regs shouldn't appear here except as return values.
3099
     We never want to emit such a clobber after reload.  */
3100
  if (x != y
3101
      && ! (reload_in_progress || reload_completed)
3102
      && need_clobber != 0)
3103
    emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3104
 
3105
  emit_insn (seq);
3106
 
3107
  return last_insn;
3108
}
3109
 
3110
/* Low level part of emit_move_insn.
3111
   Called just like emit_move_insn, but assumes X and Y
3112
   are basically valid.  */
3113
 
3114
rtx
3115
emit_move_insn_1 (rtx x, rtx y)
3116
{
3117
  enum machine_mode mode = GET_MODE (x);
3118
  enum insn_code code;
3119
 
3120
  gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3121
 
3122
  code = mov_optab->handlers[mode].insn_code;
3123
  if (code != CODE_FOR_nothing)
3124
    return emit_insn (GEN_FCN (code) (x, y));
3125
 
3126
  /* Expand complex moves by moving real part and imag part.  */
3127
  if (COMPLEX_MODE_P (mode))
3128
    return emit_move_complex (mode, x, y);
3129
 
3130
  if (GET_MODE_CLASS (mode) == MODE_CC)
3131
    return emit_move_ccmode (mode, x, y);
3132
 
3133
  /* Try using a move pattern for the corresponding integer mode.  This is
3134
     only safe when simplify_subreg can convert MODE constants into integer
3135
     constants.  At present, it can only do this reliably if the value
3136
     fits within a HOST_WIDE_INT.  */
3137
  if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3138
    {
3139
      rtx ret = emit_move_via_integer (mode, x, y, false);
3140
      if (ret)
3141
        return ret;
3142
    }
3143
 
3144
  return emit_move_multi_word (mode, x, y);
3145
}
3146
 
3147
/* Generate code to copy Y into X.
3148
   Both Y and X must have the same mode, except that
3149
   Y can be a constant with VOIDmode.
3150
   This mode cannot be BLKmode; use emit_block_move for that.
3151
 
3152
   Return the last instruction emitted.  */
3153
 
3154
rtx
3155
emit_move_insn (rtx x, rtx y)
3156
{
3157
  enum machine_mode mode = GET_MODE (x);
3158
  rtx y_cst = NULL_RTX;
3159
  rtx last_insn, set;
3160
 
3161
  gcc_assert (mode != BLKmode
3162
              && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3163
 
3164
  if (CONSTANT_P (y))
3165
    {
3166
      if (optimize
3167
          && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3168
          && (last_insn = compress_float_constant (x, y)))
3169
        return last_insn;
3170
 
3171
      y_cst = y;
3172
 
3173
      if (!LEGITIMATE_CONSTANT_P (y))
3174
        {
3175
          y = force_const_mem (mode, y);
3176
 
3177
          /* If the target's cannot_force_const_mem prevented the spill,
3178
             assume that the target's move expanders will also take care
3179
             of the non-legitimate constant.  */
3180
          if (!y)
3181
            y = y_cst;
3182
        }
3183
    }
3184
 
3185
  /* If X or Y are memory references, verify that their addresses are valid
3186
     for the machine.  */
3187
  if (MEM_P (x)
3188
      && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3189
           && ! push_operand (x, GET_MODE (x)))
3190
          || (flag_force_addr
3191
              && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3192
    x = validize_mem (x);
3193
 
3194
  if (MEM_P (y)
3195
      && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3196
          || (flag_force_addr
3197
              && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3198
    y = validize_mem (y);
3199
 
3200
  gcc_assert (mode != BLKmode);
3201
 
3202
  last_insn = emit_move_insn_1 (x, y);
3203
 
3204
  if (y_cst && REG_P (x)
3205
      && (set = single_set (last_insn)) != NULL_RTX
3206
      && SET_DEST (set) == x
3207
      && ! rtx_equal_p (y_cst, SET_SRC (set)))
3208
    set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3209
 
3210
  return last_insn;
3211
}
3212
 
3213
/* If Y is representable exactly in a narrower mode, and the target can
3214
   perform the extension directly from constant or memory, then emit the
3215
   move as an extension.  */
3216
 
3217
static rtx
3218
compress_float_constant (rtx x, rtx y)
3219
{
3220
  enum machine_mode dstmode = GET_MODE (x);
3221
  enum machine_mode orig_srcmode = GET_MODE (y);
3222
  enum machine_mode srcmode;
3223
  REAL_VALUE_TYPE r;
3224
  int oldcost, newcost;
3225
 
3226
  REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3227
 
3228
  if (LEGITIMATE_CONSTANT_P (y))
3229
    oldcost = rtx_cost (y, SET);
3230
  else
3231
    oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3232
 
3233
  for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3234
       srcmode != orig_srcmode;
3235
       srcmode = GET_MODE_WIDER_MODE (srcmode))
3236
    {
3237
      enum insn_code ic;
3238
      rtx trunc_y, last_insn;
3239
 
3240
      /* Skip if the target can't extend this way.  */
3241
      ic = can_extend_p (dstmode, srcmode, 0);
3242
      if (ic == CODE_FOR_nothing)
3243
        continue;
3244
 
3245
      /* Skip if the narrowed value isn't exact.  */
3246
      if (! exact_real_truncate (srcmode, &r))
3247
        continue;
3248
 
3249
      trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3250
 
3251
      if (LEGITIMATE_CONSTANT_P (trunc_y))
3252
        {
3253
          /* Skip if the target needs extra instructions to perform
3254
             the extension.  */
3255
          if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3256
            continue;
3257
          /* This is valid, but may not be cheaper than the original. */
3258
          newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3259
          if (oldcost < newcost)
3260
            continue;
3261
        }
3262
      else if (float_extend_from_mem[dstmode][srcmode])
3263
        {
3264
          trunc_y = force_const_mem (srcmode, trunc_y);
3265
          /* This is valid, but may not be cheaper than the original. */
3266
          newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3267
          if (oldcost < newcost)
3268
            continue;
3269
          trunc_y = validize_mem (trunc_y);
3270
        }
3271
      else
3272
        continue;
3273
 
3274
      emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3275
      last_insn = get_last_insn ();
3276
 
3277
      if (REG_P (x))
3278
        set_unique_reg_note (last_insn, REG_EQUAL, y);
3279
 
3280
      return last_insn;
3281
    }
3282
 
3283
  return NULL_RTX;
3284
}
3285
 
3286
/* Pushing data onto the stack.  */
3287
 
3288
/* Push a block of length SIZE (perhaps variable)
3289
   and return an rtx to address the beginning of the block.
3290
   The value may be virtual_outgoing_args_rtx.
3291
 
3292
   EXTRA is the number of bytes of padding to push in addition to SIZE.
3293
   BELOW nonzero means this padding comes at low addresses;
3294
   otherwise, the padding comes at high addresses.  */
3295
 
3296
rtx
3297
push_block (rtx size, int extra, int below)
3298
{
3299
  rtx temp;
3300
 
3301
  size = convert_modes (Pmode, ptr_mode, size, 1);
3302
  if (CONSTANT_P (size))
3303
    anti_adjust_stack (plus_constant (size, extra));
3304
  else if (REG_P (size) && extra == 0)
3305
    anti_adjust_stack (size);
3306
  else
3307
    {
3308
      temp = copy_to_mode_reg (Pmode, size);
3309
      if (extra != 0)
3310
        temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3311
                             temp, 0, OPTAB_LIB_WIDEN);
3312
      anti_adjust_stack (temp);
3313
    }
3314
 
3315
#ifndef STACK_GROWS_DOWNWARD
3316
  if (0)
3317
#else
3318
  if (1)
3319
#endif
3320
    {
3321
      temp = virtual_outgoing_args_rtx;
3322
      if (extra != 0 && below)
3323
        temp = plus_constant (temp, extra);
3324
    }
3325
  else
3326
    {
3327
      if (GET_CODE (size) == CONST_INT)
3328
        temp = plus_constant (virtual_outgoing_args_rtx,
3329
                              -INTVAL (size) - (below ? 0 : extra));
3330
      else if (extra != 0 && !below)
3331
        temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3332
                             negate_rtx (Pmode, plus_constant (size, extra)));
3333
      else
3334
        temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3335
                             negate_rtx (Pmode, size));
3336
    }
3337
 
3338
  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3339
}
3340
 
3341
#ifdef PUSH_ROUNDING
3342
 
3343
/* Emit single push insn.  */
3344
 
3345
static void
3346
emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3347
{
3348
  rtx dest_addr;
3349
  unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3350
  rtx dest;
3351
  enum insn_code icode;
3352
  insn_operand_predicate_fn pred;
3353
 
3354
  stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3355
  /* If there is push pattern, use it.  Otherwise try old way of throwing
3356
     MEM representing push operation to move expander.  */
3357
  icode = push_optab->handlers[(int) mode].insn_code;
3358
  if (icode != CODE_FOR_nothing)
3359
    {
3360
      if (((pred = insn_data[(int) icode].operand[0].predicate)
3361
           && !((*pred) (x, mode))))
3362
        x = force_reg (mode, x);
3363
      emit_insn (GEN_FCN (icode) (x));
3364
      return;
3365
    }
3366
  if (GET_MODE_SIZE (mode) == rounded_size)
3367
    dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3368
  /* If we are to pad downward, adjust the stack pointer first and
3369
     then store X into the stack location using an offset.  This is
3370
     because emit_move_insn does not know how to pad; it does not have
3371
     access to type.  */
3372
  else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3373
    {
3374
      unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3375
      HOST_WIDE_INT offset;
3376
 
3377
      emit_move_insn (stack_pointer_rtx,
3378
                      expand_binop (Pmode,
3379
#ifdef STACK_GROWS_DOWNWARD
3380
                                    sub_optab,
3381
#else
3382
                                    add_optab,
3383
#endif
3384
                                    stack_pointer_rtx,
3385
                                    GEN_INT (rounded_size),
3386
                                    NULL_RTX, 0, OPTAB_LIB_WIDEN));
3387
 
3388
      offset = (HOST_WIDE_INT) padding_size;
3389
#ifdef STACK_GROWS_DOWNWARD
3390
      if (STACK_PUSH_CODE == POST_DEC)
3391
        /* We have already decremented the stack pointer, so get the
3392
           previous value.  */
3393
        offset += (HOST_WIDE_INT) rounded_size;
3394
#else
3395
      if (STACK_PUSH_CODE == POST_INC)
3396
        /* We have already incremented the stack pointer, so get the
3397
           previous value.  */
3398
        offset -= (HOST_WIDE_INT) rounded_size;
3399
#endif
3400
      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3401
    }
3402
  else
3403
    {
3404
#ifdef STACK_GROWS_DOWNWARD
3405
      /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
3406
      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3407
                                GEN_INT (-(HOST_WIDE_INT) rounded_size));
3408
#else
3409
      /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
3410
      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3411
                                GEN_INT (rounded_size));
3412
#endif
3413
      dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3414
    }
3415
 
3416
  dest = gen_rtx_MEM (mode, dest_addr);
3417
 
3418
  if (type != 0)
3419
    {
3420
      set_mem_attributes (dest, type, 1);
3421
 
3422
      if (flag_optimize_sibling_calls)
3423
        /* Function incoming arguments may overlap with sibling call
3424
           outgoing arguments and we cannot allow reordering of reads
3425
           from function arguments with stores to outgoing arguments
3426
           of sibling calls.  */
3427
        set_mem_alias_set (dest, 0);
3428
    }
3429
  emit_move_insn (dest, x);
3430
}
3431
#endif
3432
 
3433
/* Generate code to push X onto the stack, assuming it has mode MODE and
3434
   type TYPE.
3435
   MODE is redundant except when X is a CONST_INT (since they don't
3436
   carry mode info).
3437
   SIZE is an rtx for the size of data to be copied (in bytes),
3438
   needed only if X is BLKmode.
3439
 
3440
   ALIGN (in bits) is maximum alignment we can assume.
3441
 
3442
   If PARTIAL and REG are both nonzero, then copy that many of the first
3443
   bytes of X into registers starting with REG, and push the rest of X.
3444
   The amount of space pushed is decreased by PARTIAL bytes.
3445
   REG must be a hard register in this case.
3446
   If REG is zero but PARTIAL is not, take any all others actions for an
3447
   argument partially in registers, but do not actually load any
3448
   registers.
3449
 
3450
   EXTRA is the amount in bytes of extra space to leave next to this arg.
3451
   This is ignored if an argument block has already been allocated.
3452
 
3453
   On a machine that lacks real push insns, ARGS_ADDR is the address of
3454
   the bottom of the argument block for this call.  We use indexing off there
3455
   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
3456
   argument block has not been preallocated.
3457
 
3458
   ARGS_SO_FAR is the size of args previously pushed for this call.
3459
 
3460
   REG_PARM_STACK_SPACE is nonzero if functions require stack space
3461
   for arguments passed in registers.  If nonzero, it will be the number
3462
   of bytes required.  */
3463
 
3464
void
3465
emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3466
                unsigned int align, int partial, rtx reg, int extra,
3467
                rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3468
                rtx alignment_pad)
3469
{
3470
  rtx xinner;
3471
  enum direction stack_direction
3472
#ifdef STACK_GROWS_DOWNWARD
3473
    = downward;
3474
#else
3475
    = upward;
3476
#endif
3477
 
3478
  /* Decide where to pad the argument: `downward' for below,
3479
     `upward' for above, or `none' for don't pad it.
3480
     Default is below for small data on big-endian machines; else above.  */
3481
  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3482
 
3483
  /* Invert direction if stack is post-decrement.
3484
     FIXME: why?  */
3485
  if (STACK_PUSH_CODE == POST_DEC)
3486
    if (where_pad != none)
3487
      where_pad = (where_pad == downward ? upward : downward);
3488
 
3489
  xinner = x;
3490
 
3491
  if (mode == BLKmode)
3492
    {
3493
      /* Copy a block into the stack, entirely or partially.  */
3494
 
3495
      rtx temp;
3496
      int used;
3497
      int offset;
3498
      int skip;
3499
 
3500
      offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3501
      used = partial - offset;
3502
 
3503
      gcc_assert (size);
3504
 
3505
      /* USED is now the # of bytes we need not copy to the stack
3506
         because registers will take care of them.  */
3507
 
3508
      if (partial != 0)
3509
        xinner = adjust_address (xinner, BLKmode, used);
3510
 
3511
      /* If the partial register-part of the arg counts in its stack size,
3512
         skip the part of stack space corresponding to the registers.
3513
         Otherwise, start copying to the beginning of the stack space,
3514
         by setting SKIP to 0.  */
3515
      skip = (reg_parm_stack_space == 0) ? 0 : used;
3516
 
3517
#ifdef PUSH_ROUNDING
3518
      /* Do it with several push insns if that doesn't take lots of insns
3519
         and if there is no difficulty with push insns that skip bytes
3520
         on the stack for alignment purposes.  */
3521
      if (args_addr == 0
3522
          && PUSH_ARGS
3523
          && GET_CODE (size) == CONST_INT
3524
          && skip == 0
3525
          && MEM_ALIGN (xinner) >= align
3526
          && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3527
          /* Here we avoid the case of a structure whose weak alignment
3528
             forces many pushes of a small amount of data,
3529
             and such small pushes do rounding that causes trouble.  */
3530
          && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3531
              || align >= BIGGEST_ALIGNMENT
3532
              || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3533
                  == (align / BITS_PER_UNIT)))
3534
          && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3535
        {
3536
          /* Push padding now if padding above and stack grows down,
3537
             or if padding below and stack grows up.
3538
             But if space already allocated, this has already been done.  */
3539
          if (extra && args_addr == 0
3540
              && where_pad != none && where_pad != stack_direction)
3541
            anti_adjust_stack (GEN_INT (extra));
3542
 
3543
          move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3544
        }
3545
      else
3546
#endif /* PUSH_ROUNDING  */
3547
        {
3548
          rtx target;
3549
 
3550
          /* Otherwise make space on the stack and copy the data
3551
             to the address of that space.  */
3552
 
3553
          /* Deduct words put into registers from the size we must copy.  */
3554
          if (partial != 0)
3555
            {
3556
              if (GET_CODE (size) == CONST_INT)
3557
                size = GEN_INT (INTVAL (size) - used);
3558
              else
3559
                size = expand_binop (GET_MODE (size), sub_optab, size,
3560
                                     GEN_INT (used), NULL_RTX, 0,
3561
                                     OPTAB_LIB_WIDEN);
3562
            }
3563
 
3564
          /* Get the address of the stack space.
3565
             In this case, we do not deal with EXTRA separately.
3566
             A single stack adjust will do.  */
3567
          if (! args_addr)
3568
            {
3569
              temp = push_block (size, extra, where_pad == downward);
3570
              extra = 0;
3571
            }
3572
          else if (GET_CODE (args_so_far) == CONST_INT)
3573
            temp = memory_address (BLKmode,
3574
                                   plus_constant (args_addr,
3575
                                                  skip + INTVAL (args_so_far)));
3576
          else
3577
            temp = memory_address (BLKmode,
3578
                                   plus_constant (gen_rtx_PLUS (Pmode,
3579
                                                                args_addr,
3580
                                                                args_so_far),
3581
                                                  skip));
3582
 
3583
          if (!ACCUMULATE_OUTGOING_ARGS)
3584
            {
3585
              /* If the source is referenced relative to the stack pointer,
3586
                 copy it to another register to stabilize it.  We do not need
3587
                 to do this if we know that we won't be changing sp.  */
3588
 
3589
              if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3590
                  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3591
                temp = copy_to_reg (temp);
3592
            }
3593
 
3594
          target = gen_rtx_MEM (BLKmode, temp);
3595
 
3596
          /* We do *not* set_mem_attributes here, because incoming arguments
3597
             may overlap with sibling call outgoing arguments and we cannot
3598
             allow reordering of reads from function arguments with stores
3599
             to outgoing arguments of sibling calls.  We do, however, want
3600
             to record the alignment of the stack slot.  */
3601
          /* ALIGN may well be better aligned than TYPE, e.g. due to
3602
             PARM_BOUNDARY.  Assume the caller isn't lying.  */
3603
          set_mem_align (target, align);
3604
 
3605
          emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3606
        }
3607
    }
3608
  else if (partial > 0)
3609
    {
3610
      /* Scalar partly in registers.  */
3611
 
3612
      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3613
      int i;
3614
      int not_stack;
3615
      /* # bytes of start of argument
3616
         that we must make space for but need not store.  */
3617
      int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3618
      int args_offset = INTVAL (args_so_far);
3619
      int skip;
3620
 
3621
      /* Push padding now if padding above and stack grows down,
3622
         or if padding below and stack grows up.
3623
         But if space already allocated, this has already been done.  */
3624
      if (extra && args_addr == 0
3625
          && where_pad != none && where_pad != stack_direction)
3626
        anti_adjust_stack (GEN_INT (extra));
3627
 
3628
      /* If we make space by pushing it, we might as well push
3629
         the real data.  Otherwise, we can leave OFFSET nonzero
3630
         and leave the space uninitialized.  */
3631
      if (args_addr == 0)
3632
        offset = 0;
3633
 
3634
      /* Now NOT_STACK gets the number of words that we don't need to
3635
         allocate on the stack.  Convert OFFSET to words too.  */
3636
      not_stack = (partial - offset) / UNITS_PER_WORD;
3637
      offset /= UNITS_PER_WORD;
3638
 
3639
      /* If the partial register-part of the arg counts in its stack size,
3640
         skip the part of stack space corresponding to the registers.
3641
         Otherwise, start copying to the beginning of the stack space,
3642
         by setting SKIP to 0.  */
3643
      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3644
 
3645
      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3646
        x = validize_mem (force_const_mem (mode, x));
3647
 
3648
      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3649
         SUBREGs of such registers are not allowed.  */
3650
      if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3651
           && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3652
        x = copy_to_reg (x);
3653
 
3654
      /* Loop over all the words allocated on the stack for this arg.  */
3655
      /* We can do it by words, because any scalar bigger than a word
3656
         has a size a multiple of a word.  */
3657
#ifndef PUSH_ARGS_REVERSED
3658
      for (i = not_stack; i < size; i++)
3659
#else
3660
      for (i = size - 1; i >= not_stack; i--)
3661
#endif
3662
        if (i >= not_stack + offset)
3663
          emit_push_insn (operand_subword_force (x, i, mode),
3664
                          word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3665
                          0, args_addr,
3666
                          GEN_INT (args_offset + ((i - not_stack + skip)
3667
                                                  * UNITS_PER_WORD)),
3668
                          reg_parm_stack_space, alignment_pad);
3669
    }
3670
  else
3671
    {
3672
      rtx addr;
3673
      rtx dest;
3674
 
3675
      /* Push padding now if padding above and stack grows down,
3676
         or if padding below and stack grows up.
3677
         But if space already allocated, this has already been done.  */
3678
      if (extra && args_addr == 0
3679
          && where_pad != none && where_pad != stack_direction)
3680
        anti_adjust_stack (GEN_INT (extra));
3681
 
3682
#ifdef PUSH_ROUNDING
3683
      if (args_addr == 0 && PUSH_ARGS)
3684
        emit_single_push_insn (mode, x, type);
3685
      else
3686
#endif
3687
        {
3688
          if (GET_CODE (args_so_far) == CONST_INT)
3689
            addr
3690
              = memory_address (mode,
3691
                                plus_constant (args_addr,
3692
                                               INTVAL (args_so_far)));
3693
          else
3694
            addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3695
                                                       args_so_far));
3696
          dest = gen_rtx_MEM (mode, addr);
3697
 
3698
          /* We do *not* set_mem_attributes here, because incoming arguments
3699
             may overlap with sibling call outgoing arguments and we cannot
3700
             allow reordering of reads from function arguments with stores
3701
             to outgoing arguments of sibling calls.  We do, however, want
3702
             to record the alignment of the stack slot.  */
3703
          /* ALIGN may well be better aligned than TYPE, e.g. due to
3704
             PARM_BOUNDARY.  Assume the caller isn't lying.  */
3705
          set_mem_align (dest, align);
3706
 
3707
          emit_move_insn (dest, x);
3708
        }
3709
    }
3710
 
3711
  /* If part should go in registers, copy that part
3712
     into the appropriate registers.  Do this now, at the end,
3713
     since mem-to-mem copies above may do function calls.  */
3714
  if (partial > 0 && reg != 0)
3715
    {
3716
      /* Handle calls that pass values in multiple non-contiguous locations.
3717
         The Irix 6 ABI has examples of this.  */
3718
      if (GET_CODE (reg) == PARALLEL)
3719
        emit_group_load (reg, x, type, -1);
3720
      else
3721
        {
3722
          gcc_assert (partial % UNITS_PER_WORD == 0);
3723
          move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3724
        }
3725
    }
3726
 
3727
  if (extra && args_addr == 0 && where_pad == stack_direction)
3728
    anti_adjust_stack (GEN_INT (extra));
3729
 
3730
  if (alignment_pad && args_addr == 0)
3731
    anti_adjust_stack (alignment_pad);
3732
}
3733
 
3734
/* Return X if X can be used as a subtarget in a sequence of arithmetic
3735
   operations.  */
3736
 
3737
static rtx
3738
get_subtarget (rtx x)
3739
{
3740
  return (optimize
3741
          || x == 0
3742
           /* Only registers can be subtargets.  */
3743
           || !REG_P (x)
3744
           /* Don't use hard regs to avoid extending their life.  */
3745
           || REGNO (x) < FIRST_PSEUDO_REGISTER
3746
          ? 0 : x);
3747
}
3748
 
3749
/* A subroutine of expand_assignment.  Optimize FIELD op= VAL, where
3750
   FIELD is a bitfield.  Returns true if the optimization was successful,
3751
   and there's nothing else to do.  */
3752
 
3753
static bool
3754
optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3755
                                 unsigned HOST_WIDE_INT bitpos,
3756
                                 enum machine_mode mode1, rtx str_rtx,
3757
                                 tree to, tree src)
3758
{
3759
  enum machine_mode str_mode = GET_MODE (str_rtx);
3760
  unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3761
  tree op0, op1;
3762
  rtx value, result;
3763
  optab binop;
3764
 
3765
  if (mode1 != VOIDmode
3766
      || bitsize >= BITS_PER_WORD
3767
      || str_bitsize > BITS_PER_WORD
3768
      || TREE_SIDE_EFFECTS (to)
3769
      || TREE_THIS_VOLATILE (to))
3770
    return false;
3771
 
3772
  STRIP_NOPS (src);
3773
  if (!BINARY_CLASS_P (src)
3774
      || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3775
    return false;
3776
 
3777
  op0 = TREE_OPERAND (src, 0);
3778
  op1 = TREE_OPERAND (src, 1);
3779
  STRIP_NOPS (op0);
3780
 
3781
  if (!operand_equal_p (to, op0, 0))
3782
    return false;
3783
 
3784
  if (MEM_P (str_rtx))
3785
    {
3786
      unsigned HOST_WIDE_INT offset1;
3787
 
3788
      if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3789
        str_mode = word_mode;
3790
      str_mode = get_best_mode (bitsize, bitpos,
3791
                                MEM_ALIGN (str_rtx), str_mode, 0);
3792
      if (str_mode == VOIDmode)
3793
        return false;
3794
      str_bitsize = GET_MODE_BITSIZE (str_mode);
3795
 
3796
      offset1 = bitpos;
3797
      bitpos %= str_bitsize;
3798
      offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3799
      str_rtx = adjust_address (str_rtx, str_mode, offset1);
3800
    }
3801
  else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3802
    return false;
3803
 
3804
  /* If the bit field covers the whole REG/MEM, store_field
3805
     will likely generate better code.  */
3806
  if (bitsize >= str_bitsize)
3807
    return false;
3808
 
3809
  /* We can't handle fields split across multiple entities.  */
3810
  if (bitpos + bitsize > str_bitsize)
3811
    return false;
3812
 
3813
  if (BYTES_BIG_ENDIAN)
3814
    bitpos = str_bitsize - bitpos - bitsize;
3815
 
3816
  switch (TREE_CODE (src))
3817
    {
3818
    case PLUS_EXPR:
3819
    case MINUS_EXPR:
3820
      /* For now, just optimize the case of the topmost bitfield
3821
         where we don't need to do any masking and also
3822
         1 bit bitfields where xor can be used.
3823
         We might win by one instruction for the other bitfields
3824
         too if insv/extv instructions aren't used, so that
3825
         can be added later.  */
3826
      if (bitpos + bitsize != str_bitsize
3827
          && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3828
        break;
3829
 
3830
      value = expand_expr (op1, NULL_RTX, str_mode, 0);
3831
      value = convert_modes (str_mode,
3832
                             TYPE_MODE (TREE_TYPE (op1)), value,
3833
                             TYPE_UNSIGNED (TREE_TYPE (op1)));
3834
 
3835
      /* We may be accessing data outside the field, which means
3836
         we can alias adjacent data.  */
3837
      if (MEM_P (str_rtx))
3838
        {
3839
          str_rtx = shallow_copy_rtx (str_rtx);
3840
          set_mem_alias_set (str_rtx, 0);
3841
          set_mem_expr (str_rtx, 0);
3842
        }
3843
 
3844
      binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3845
      if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3846
        {
3847
          value = expand_and (str_mode, value, const1_rtx, NULL);
3848
          binop = xor_optab;
3849
        }
3850
      value = expand_shift (LSHIFT_EXPR, str_mode, value,
3851
                            build_int_cst (NULL_TREE, bitpos),
3852
                            NULL_RTX, 1);
3853
      result = expand_binop (str_mode, binop, str_rtx,
3854
                             value, str_rtx, 1, OPTAB_WIDEN);
3855
      if (result != str_rtx)
3856
        emit_move_insn (str_rtx, result);
3857
      return true;
3858
 
3859
    case BIT_IOR_EXPR:
3860
    case BIT_XOR_EXPR:
3861
      if (TREE_CODE (op1) != INTEGER_CST)
3862
        break;
3863
      value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3864
      value = convert_modes (GET_MODE (str_rtx),
3865
                             TYPE_MODE (TREE_TYPE (op1)), value,
3866
                             TYPE_UNSIGNED (TREE_TYPE (op1)));
3867
 
3868
      /* We may be accessing data outside the field, which means
3869
         we can alias adjacent data.  */
3870
      if (MEM_P (str_rtx))
3871
        {
3872
          str_rtx = shallow_copy_rtx (str_rtx);
3873
          set_mem_alias_set (str_rtx, 0);
3874
          set_mem_expr (str_rtx, 0);
3875
        }
3876
 
3877
      binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3878
      if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3879
        {
3880
          rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
3881
                              - 1);
3882
          value = expand_and (GET_MODE (str_rtx), value, mask,
3883
                              NULL_RTX);
3884
        }
3885
      value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3886
                            build_int_cst (NULL_TREE, bitpos),
3887
                            NULL_RTX, 1);
3888
      result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3889
                             value, str_rtx, 1, OPTAB_WIDEN);
3890
      if (result != str_rtx)
3891
        emit_move_insn (str_rtx, result);
3892
      return true;
3893
 
3894
    default:
3895
      break;
3896
    }
3897
 
3898
  return false;
3899
}
3900
 
3901
 
3902
/* Expand an assignment that stores the value of FROM into TO.  */
3903
 
3904
void
3905
expand_assignment (tree to, tree from)
3906
{
3907
  rtx to_rtx = 0;
3908
  rtx result;
3909
 
3910
  /* Don't crash if the lhs of the assignment was erroneous.  */
3911
 
3912
  if (TREE_CODE (to) == ERROR_MARK)
3913
    {
3914
      result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3915
      return;
3916
    }
3917
 
3918
  /* Assignment of a structure component needs special treatment
3919
     if the structure component's rtx is not simply a MEM.
3920
     Assignment of an array element at a constant index, and assignment of
3921
     an array element in an unaligned packed structure field, has the same
3922
     problem.  */
3923
  if (handled_component_p (to)
3924
      || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3925
    {
3926
      enum machine_mode mode1;
3927
      HOST_WIDE_INT bitsize, bitpos;
3928
      tree offset;
3929
      int unsignedp;
3930
      int volatilep = 0;
3931
      tree tem;
3932
 
3933
      push_temp_slots ();
3934
      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3935
                                 &unsignedp, &volatilep, true);
3936
 
3937
      /* If we are going to use store_bit_field and extract_bit_field,
3938
         make sure to_rtx will be safe for multiple use.  */
3939
 
3940
      to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3941
 
3942
      if (offset != 0)
3943
        {
3944
          rtx offset_rtx;
3945
 
3946
          if (!MEM_P (to_rtx))
3947
            {
3948
              /* We can get constant negative offsets into arrays with broken
3949
                 user code.  Translate this to a trap instead of ICEing.  */
3950
              gcc_assert (TREE_CODE (offset) == INTEGER_CST);
3951
              expand_builtin_trap ();
3952
              to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
3953
            }
3954
 
3955
          offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3956
#ifdef POINTERS_EXTEND_UNSIGNED
3957
          if (GET_MODE (offset_rtx) != Pmode)
3958
            offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3959
#else
3960
          if (GET_MODE (offset_rtx) != ptr_mode)
3961
            offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3962
#endif
3963
 
3964
          /* A constant address in TO_RTX can have VOIDmode, we must not try
3965
             to call force_reg for that case.  Avoid that case.  */
3966
          if (MEM_P (to_rtx)
3967
              && GET_MODE (to_rtx) == BLKmode
3968
              && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3969
              && bitsize > 0
3970
              && (bitpos % bitsize) == 0
3971
              && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3972
              && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3973
            {
3974
              to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3975
              bitpos = 0;
3976
            }
3977
 
3978
          to_rtx = offset_address (to_rtx, offset_rtx,
3979
                                   highest_pow2_factor_for_target (to,
3980
                                                                   offset));
3981
        }
3982
 
3983
      /* Handle expand_expr of a complex value returning a CONCAT.  */
3984
      if (GET_CODE (to_rtx) == CONCAT)
3985
        {
3986
          if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
3987
            {
3988
              gcc_assert (bitpos == 0);
3989
              result = store_expr (from, to_rtx, false);
3990
            }
3991
          else
3992
            {
3993
              gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3994
              result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3995
            }
3996
        }
3997
      else
3998
        {
3999
          if (MEM_P (to_rtx))
4000
            {
4001
              /* If the field is at offset zero, we could have been given the
4002
                 DECL_RTX of the parent struct.  Don't munge it.  */
4003
              to_rtx = shallow_copy_rtx (to_rtx);
4004
 
4005
              set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4006
 
4007
              /* Deal with volatile and readonly fields.  The former is only
4008
                 done for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
4009
              if (volatilep)
4010
                MEM_VOLATILE_P (to_rtx) = 1;
4011
              if (component_uses_parent_alias_set (to))
4012
                MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4013
            }
4014
 
4015
          if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4016
                                               to_rtx, to, from))
4017
            result = NULL;
4018
          else
4019
            result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4020
                                  TREE_TYPE (tem), get_alias_set (to));
4021
        }
4022
 
4023
      if (result)
4024
        preserve_temp_slots (result);
4025
      free_temp_slots ();
4026
      pop_temp_slots ();
4027
      return;
4028
    }
4029
 
4030
  /* If the rhs is a function call and its value is not an aggregate,
4031
     call the function before we start to compute the lhs.
4032
     This is needed for correct code for cases such as
4033
     val = setjmp (buf) on machines where reference to val
4034
     requires loading up part of an address in a separate insn.
4035
 
4036
     Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4037
     since it might be a promoted variable where the zero- or sign- extension
4038
     needs to be done.  Handling this in the normal way is safe because no
4039
     computation is done before the call.  */
4040
  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4041
      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4042
      && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4043
            && REG_P (DECL_RTL (to))))
4044
    {
4045
      rtx value;
4046
 
4047
      push_temp_slots ();
4048
      value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4049
      if (to_rtx == 0)
4050
        to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4051
 
4052
      /* Handle calls that return values in multiple non-contiguous locations.
4053
         The Irix 6 ABI has examples of this.  */
4054
      if (GET_CODE (to_rtx) == PARALLEL)
4055
        emit_group_load (to_rtx, value, TREE_TYPE (from),
4056
                         int_size_in_bytes (TREE_TYPE (from)));
4057
      else if (GET_MODE (to_rtx) == BLKmode)
4058
        emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4059
      else
4060
        {
4061
          if (POINTER_TYPE_P (TREE_TYPE (to)))
4062
            value = convert_memory_address (GET_MODE (to_rtx), value);
4063
          emit_move_insn (to_rtx, value);
4064
        }
4065
      preserve_temp_slots (to_rtx);
4066
      free_temp_slots ();
4067
      pop_temp_slots ();
4068
      return;
4069
    }
4070
 
4071
  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
4072
     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
4073
 
4074
  if (to_rtx == 0)
4075
    to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4076
 
4077
  /* Don't move directly into a return register.  */
4078
  if (TREE_CODE (to) == RESULT_DECL
4079
      && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4080
    {
4081
      rtx temp;
4082
 
4083
      push_temp_slots ();
4084
      temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4085
 
4086
      if (GET_CODE (to_rtx) == PARALLEL)
4087
        emit_group_load (to_rtx, temp, TREE_TYPE (from),
4088
                         int_size_in_bytes (TREE_TYPE (from)));
4089
      else
4090
        emit_move_insn (to_rtx, temp);
4091
 
4092
      preserve_temp_slots (to_rtx);
4093
      free_temp_slots ();
4094
      pop_temp_slots ();
4095
      return;
4096
    }
4097
 
4098
  /* In case we are returning the contents of an object which overlaps
4099
     the place the value is being stored, use a safe function when copying
4100
     a value through a pointer into a structure value return block.  */
4101
  if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4102
      && current_function_returns_struct
4103
      && !current_function_returns_pcc_struct)
4104
    {
4105
      rtx from_rtx, size;
4106
 
4107
      push_temp_slots ();
4108
      size = expr_size (from);
4109
      from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4110
 
4111
      emit_library_call (memmove_libfunc, LCT_NORMAL,
4112
                         VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4113
                         XEXP (from_rtx, 0), Pmode,
4114
                         convert_to_mode (TYPE_MODE (sizetype),
4115
                                          size, TYPE_UNSIGNED (sizetype)),
4116
                         TYPE_MODE (sizetype));
4117
 
4118
      preserve_temp_slots (to_rtx);
4119
      free_temp_slots ();
4120
      pop_temp_slots ();
4121
      return;
4122
    }
4123
 
4124
  /* Compute FROM and store the value in the rtx we got.  */
4125
 
4126
  push_temp_slots ();
4127
  result = store_expr (from, to_rtx, 0);
4128
  preserve_temp_slots (result);
4129
  free_temp_slots ();
4130
  pop_temp_slots ();
4131
  return;
4132
}
4133
 
4134
/* Generate code for computing expression EXP,
4135
   and storing the value into TARGET.
4136
 
4137
   If the mode is BLKmode then we may return TARGET itself.
4138
   It turns out that in BLKmode it doesn't cause a problem.
4139
   because C has no operators that could combine two different
4140
   assignments into the same BLKmode object with different values
4141
   with no sequence point.  Will other languages need this to
4142
   be more thorough?
4143
 
4144
   If CALL_PARAM_P is nonzero, this is a store into a call param on the
4145
   stack, and block moves may need to be treated specially.  */
4146
 
4147
rtx
4148
store_expr (tree exp, rtx target, int call_param_p)
4149
{
4150
  rtx temp;
4151
  rtx alt_rtl = NULL_RTX;
4152
  int dont_return_target = 0;
4153
 
4154
  if (VOID_TYPE_P (TREE_TYPE (exp)))
4155
    {
4156
      /* C++ can generate ?: expressions with a throw expression in one
4157
         branch and an rvalue in the other. Here, we resolve attempts to
4158
         store the throw expression's nonexistent result.  */
4159
      gcc_assert (!call_param_p);
4160
      expand_expr (exp, const0_rtx, VOIDmode, 0);
4161
      return NULL_RTX;
4162
    }
4163
  if (TREE_CODE (exp) == COMPOUND_EXPR)
4164
    {
4165
      /* Perform first part of compound expression, then assign from second
4166
         part.  */
4167
      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4168
                   call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4169
      return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4170
    }
4171
  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4172
    {
4173
      /* For conditional expression, get safe form of the target.  Then
4174
         test the condition, doing the appropriate assignment on either
4175
         side.  This avoids the creation of unnecessary temporaries.
4176
         For non-BLKmode, it is more efficient not to do this.  */
4177
 
4178
      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4179
 
4180
      do_pending_stack_adjust ();
4181
      NO_DEFER_POP;
4182
      jumpifnot (TREE_OPERAND (exp, 0), lab1);
4183
      store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4184
      emit_jump_insn (gen_jump (lab2));
4185
      emit_barrier ();
4186
      emit_label (lab1);
4187
      store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4188
      emit_label (lab2);
4189
      OK_DEFER_POP;
4190
 
4191
      return NULL_RTX;
4192
    }
4193
  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4194
    /* If this is a scalar in a register that is stored in a wider mode
4195
       than the declared mode, compute the result into its declared mode
4196
       and then convert to the wider mode.  Our value is the computed
4197
       expression.  */
4198
    {
4199
      rtx inner_target = 0;
4200
 
4201
      /* We can do the conversion inside EXP, which will often result
4202
         in some optimizations.  Do the conversion in two steps: first
4203
         change the signedness, if needed, then the extend.  But don't
4204
         do this if the type of EXP is a subtype of something else
4205
         since then the conversion might involve more than just
4206
         converting modes.  */
4207
      if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4208
          && TREE_TYPE (TREE_TYPE (exp)) == 0
4209
          && (!lang_hooks.reduce_bit_field_operations
4210
              || (GET_MODE_PRECISION (GET_MODE (target))
4211
                  == TYPE_PRECISION (TREE_TYPE (exp)))))
4212
        {
4213
          if (TYPE_UNSIGNED (TREE_TYPE (exp))
4214
              != SUBREG_PROMOTED_UNSIGNED_P (target))
4215
            exp = convert
4216
              (lang_hooks.types.signed_or_unsigned_type
4217
               (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4218
 
4219
          exp = convert (lang_hooks.types.type_for_mode
4220
                         (GET_MODE (SUBREG_REG (target)),
4221
                          SUBREG_PROMOTED_UNSIGNED_P (target)),
4222
                         exp);
4223
 
4224
          inner_target = SUBREG_REG (target);
4225
        }
4226
 
4227
      temp = expand_expr (exp, inner_target, VOIDmode,
4228
                          call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4229
 
4230
      /* If TEMP is a VOIDmode constant, use convert_modes to make
4231
         sure that we properly convert it.  */
4232
      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4233
        {
4234
          temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4235
                                temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4236
          temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4237
                                GET_MODE (target), temp,
4238
                                SUBREG_PROMOTED_UNSIGNED_P (target));
4239
        }
4240
 
4241
      convert_move (SUBREG_REG (target), temp,
4242
                    SUBREG_PROMOTED_UNSIGNED_P (target));
4243
 
4244
      return NULL_RTX;
4245
    }
4246
  else
4247
    {
4248
      temp = expand_expr_real (exp, target, GET_MODE (target),
4249
                               (call_param_p
4250
                                ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4251
                               &alt_rtl);
4252
      /* Return TARGET if it's a specified hardware register.
4253
         If TARGET is a volatile mem ref, either return TARGET
4254
         or return a reg copied *from* TARGET; ANSI requires this.
4255
 
4256
         Otherwise, if TEMP is not TARGET, return TEMP
4257
         if it is constant (for efficiency),
4258
         or if we really want the correct value.  */
4259
      if (!(target && REG_P (target)
4260
            && REGNO (target) < FIRST_PSEUDO_REGISTER)
4261
          && !(MEM_P (target) && MEM_VOLATILE_P (target))
4262
          && ! rtx_equal_p (temp, target)
4263
          && CONSTANT_P (temp))
4264
        dont_return_target = 1;
4265
    }
4266
 
4267
  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4268
     the same as that of TARGET, adjust the constant.  This is needed, for
4269
     example, in case it is a CONST_DOUBLE and we want only a word-sized
4270
     value.  */
4271
  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4272
      && TREE_CODE (exp) != ERROR_MARK
4273
      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4274
    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4275
                          temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4276
 
4277
  /* If value was not generated in the target, store it there.
4278
     Convert the value to TARGET's type first if necessary and emit the
4279
     pending incrementations that have been queued when expanding EXP.
4280
     Note that we cannot emit the whole queue blindly because this will
4281
     effectively disable the POST_INC optimization later.
4282
 
4283
     If TEMP and TARGET compare equal according to rtx_equal_p, but
4284
     one or both of them are volatile memory refs, we have to distinguish
4285
     two cases:
4286
     - expand_expr has used TARGET.  In this case, we must not generate
4287
       another copy.  This can be detected by TARGET being equal according
4288
       to == .
4289
     - expand_expr has not used TARGET - that means that the source just
4290
       happens to have the same RTX form.  Since temp will have been created
4291
       by expand_expr, it will compare unequal according to == .
4292
       We must generate a copy in this case, to reach the correct number
4293
       of volatile memory references.  */
4294
 
4295
  if ((! rtx_equal_p (temp, target)
4296
       || (temp != target && (side_effects_p (temp)
4297
                              || side_effects_p (target))))
4298
      && TREE_CODE (exp) != ERROR_MARK
4299
      /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4300
         but TARGET is not valid memory reference, TEMP will differ
4301
         from TARGET although it is really the same location.  */
4302
      && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4303
      /* If there's nothing to copy, don't bother.  Don't call
4304
         expr_size unless necessary, because some front-ends (C++)
4305
         expr_size-hook must not be given objects that are not
4306
         supposed to be bit-copied or bit-initialized.  */
4307
      && expr_size (exp) != const0_rtx)
4308
    {
4309
      if (GET_MODE (temp) != GET_MODE (target)
4310
          && GET_MODE (temp) != VOIDmode)
4311
        {
4312
          int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4313
          if (dont_return_target)
4314
            {
4315
              /* In this case, we will return TEMP,
4316
                 so make sure it has the proper mode.
4317
                 But don't forget to store the value into TARGET.  */
4318
              temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4319
              emit_move_insn (target, temp);
4320
            }
4321
          else
4322
            convert_move (target, temp, unsignedp);
4323
        }
4324
 
4325
      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4326
        {
4327
          /* Handle copying a string constant into an array.  The string
4328
             constant may be shorter than the array.  So copy just the string's
4329
             actual length, and clear the rest.  First get the size of the data
4330
             type of the string, which is actually the size of the target.  */
4331
          rtx size = expr_size (exp);
4332
 
4333
          if (GET_CODE (size) == CONST_INT
4334
              && INTVAL (size) < TREE_STRING_LENGTH (exp))
4335
            emit_block_move (target, temp, size,
4336
                             (call_param_p
4337
                              ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4338
          else
4339
            {
4340
              /* Compute the size of the data to copy from the string.  */
4341
              tree copy_size
4342
                = size_binop (MIN_EXPR,
4343
                              make_tree (sizetype, size),
4344
                              size_int (TREE_STRING_LENGTH (exp)));
4345
              rtx copy_size_rtx
4346
                = expand_expr (copy_size, NULL_RTX, VOIDmode,
4347
                               (call_param_p
4348
                                ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4349
              rtx label = 0;
4350
 
4351
              /* Copy that much.  */
4352
              copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4353
                                               TYPE_UNSIGNED (sizetype));
4354
              emit_block_move (target, temp, copy_size_rtx,
4355
                               (call_param_p
4356
                                ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4357
 
4358
              /* Figure out how much is left in TARGET that we have to clear.
4359
                 Do all calculations in ptr_mode.  */
4360
              if (GET_CODE (copy_size_rtx) == CONST_INT)
4361
                {
4362
                  size = plus_constant (size, -INTVAL (copy_size_rtx));
4363
                  target = adjust_address (target, BLKmode,
4364
                                           INTVAL (copy_size_rtx));
4365
                }
4366
              else
4367
                {
4368
                  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4369
                                       copy_size_rtx, NULL_RTX, 0,
4370
                                       OPTAB_LIB_WIDEN);
4371
 
4372
#ifdef POINTERS_EXTEND_UNSIGNED
4373
                  if (GET_MODE (copy_size_rtx) != Pmode)
4374
                    copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4375
                                                     TYPE_UNSIGNED (sizetype));
4376
#endif
4377
 
4378
                  target = offset_address (target, copy_size_rtx,
4379
                                           highest_pow2_factor (copy_size));
4380
                  label = gen_label_rtx ();
4381
                  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4382
                                           GET_MODE (size), 0, label);
4383
                }
4384
 
4385
              if (size != const0_rtx)
4386
                clear_storage (target, size, BLOCK_OP_NORMAL);
4387
 
4388
              if (label)
4389
                emit_label (label);
4390
            }
4391
        }
4392
      /* Handle calls that return values in multiple non-contiguous locations.
4393
         The Irix 6 ABI has examples of this.  */
4394
      else if (GET_CODE (target) == PARALLEL)
4395
        emit_group_load (target, temp, TREE_TYPE (exp),
4396
                         int_size_in_bytes (TREE_TYPE (exp)));
4397
      else if (GET_MODE (temp) == BLKmode)
4398
        emit_block_move (target, temp, expr_size (exp),
4399
                         (call_param_p
4400
                          ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4401
      else
4402
        {
4403
          temp = force_operand (temp, target);
4404
          if (temp != target)
4405
            emit_move_insn (target, temp);
4406
        }
4407
    }
4408
 
4409
  return NULL_RTX;
4410
}
4411
 
4412
/* Examine CTOR to discover:
4413
   * how many scalar fields are set to nonzero values,
4414
     and place it in *P_NZ_ELTS;
4415
   * how many scalar fields are set to non-constant values,
4416
     and place it in  *P_NC_ELTS; and
4417
   * how many scalar fields in total are in CTOR,
4418
     and place it in *P_ELT_COUNT.
4419
   * if a type is a union, and the initializer from the constructor
4420
     is not the largest element in the union, then set *p_must_clear.  */
4421
 
4422
static void
4423
categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4424
                            HOST_WIDE_INT *p_nc_elts,
4425
                            HOST_WIDE_INT *p_elt_count,
4426
                            bool *p_must_clear)
4427
{
4428
  unsigned HOST_WIDE_INT idx;
4429
  HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4430
  tree value, purpose;
4431
 
4432
  nz_elts = 0;
4433
  nc_elts = 0;
4434
  elt_count = 0;
4435
 
4436
  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4437
    {
4438
      HOST_WIDE_INT mult;
4439
 
4440
      mult = 1;
4441
      if (TREE_CODE (purpose) == RANGE_EXPR)
4442
        {
4443
          tree lo_index = TREE_OPERAND (purpose, 0);
4444
          tree hi_index = TREE_OPERAND (purpose, 1);
4445
 
4446
          if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4447
            mult = (tree_low_cst (hi_index, 1)
4448
                    - tree_low_cst (lo_index, 1) + 1);
4449
        }
4450
 
4451
      switch (TREE_CODE (value))
4452
        {
4453
        case CONSTRUCTOR:
4454
          {
4455
            HOST_WIDE_INT nz = 0, nc = 0, ic = 0;
4456
            categorize_ctor_elements_1 (value, &nz, &nc, &ic, p_must_clear);
4457
            nz_elts += mult * nz;
4458
            nc_elts += mult * nc;
4459
            elt_count += mult * ic;
4460
          }
4461
          break;
4462
 
4463
        case INTEGER_CST:
4464
        case REAL_CST:
4465
          if (!initializer_zerop (value))
4466
            nz_elts += mult;
4467
          elt_count += mult;
4468
          break;
4469
 
4470
        case STRING_CST:
4471
          nz_elts += mult * TREE_STRING_LENGTH (value);
4472
          elt_count += mult * TREE_STRING_LENGTH (value);
4473
          break;
4474
 
4475
        case COMPLEX_CST:
4476
          if (!initializer_zerop (TREE_REALPART (value)))
4477
            nz_elts += mult;
4478
          if (!initializer_zerop (TREE_IMAGPART (value)))
4479
            nz_elts += mult;
4480
          elt_count += mult;
4481
          break;
4482
 
4483
        case VECTOR_CST:
4484
          {
4485
            tree v;
4486
            for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4487
              {
4488
                if (!initializer_zerop (TREE_VALUE (v)))
4489
                  nz_elts += mult;
4490
                elt_count += mult;
4491
              }
4492
          }
4493
          break;
4494
 
4495
        default:
4496
          nz_elts += mult;
4497
          elt_count += mult;
4498
          if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4499
            nc_elts += mult;
4500
          break;
4501
        }
4502
    }
4503
 
4504
  if (!*p_must_clear
4505
      && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4506
          || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4507
    {
4508
      tree init_sub_type;
4509
      bool clear_this = true;
4510
 
4511
      if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4512
        {
4513
          /* We don't expect more than one element of the union to be
4514
             initialized.  Not sure what we should do otherwise... */
4515
          gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4516
                      == 1);
4517
 
4518
          init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4519
                                                CONSTRUCTOR_ELTS (ctor),
4520
                                                0)->value);
4521
 
4522
          /* ??? We could look at each element of the union, and find the
4523
             largest element.  Which would avoid comparing the size of the
4524
             initialized element against any tail padding in the union.
4525
             Doesn't seem worth the effort...  */
4526
          if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4527
                                TYPE_SIZE (init_sub_type)) == 1)
4528
            {
4529
              /* And now we have to find out if the element itself is fully
4530
                 constructed.  E.g. for union { struct { int a, b; } s; } u
4531
                 = { .s = { .a = 1 } }.  */
4532
              if (elt_count == count_type_elements (init_sub_type, false))
4533
                clear_this = false;
4534
            }
4535
        }
4536
 
4537
      *p_must_clear = clear_this;
4538
    }
4539
 
4540
  *p_nz_elts += nz_elts;
4541
  *p_nc_elts += nc_elts;
4542
  *p_elt_count += elt_count;
4543
}
4544
 
4545
void
4546
categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4547
                          HOST_WIDE_INT *p_nc_elts,
4548
                          HOST_WIDE_INT *p_elt_count,
4549
                          bool *p_must_clear)
4550
{
4551
  *p_nz_elts = 0;
4552
  *p_nc_elts = 0;
4553
  *p_elt_count = 0;
4554
  *p_must_clear = false;
4555
  categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count,
4556
                              p_must_clear);
4557
}
4558
 
4559
/* Count the number of scalars in TYPE.  Return -1 on overflow or
4560
   variable-sized.  If ALLOW_FLEXARR is true, don't count flexible
4561
   array member at the end of the structure.  */
4562
 
4563
HOST_WIDE_INT
4564
count_type_elements (tree type, bool allow_flexarr)
4565
{
4566
  const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4567
  switch (TREE_CODE (type))
4568
    {
4569
    case ARRAY_TYPE:
4570
      {
4571
        tree telts = array_type_nelts (type);
4572
        if (telts && host_integerp (telts, 1))
4573
          {
4574
            HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4575
            HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4576
            if (n == 0)
4577
              return 0;
4578
            else if (max / n > m)
4579
              return n * m;
4580
          }
4581
        return -1;
4582
      }
4583
 
4584
    case RECORD_TYPE:
4585
      {
4586
        HOST_WIDE_INT n = 0, t;
4587
        tree f;
4588
 
4589
        for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4590
          if (TREE_CODE (f) == FIELD_DECL)
4591
            {
4592
              t = count_type_elements (TREE_TYPE (f), false);
4593
              if (t < 0)
4594
                {
4595
                  /* Check for structures with flexible array member.  */
4596
                  tree tf = TREE_TYPE (f);
4597
                  if (allow_flexarr
4598
                      && TREE_CHAIN (f) == NULL
4599
                      && TREE_CODE (tf) == ARRAY_TYPE
4600
                      && TYPE_DOMAIN (tf)
4601
                      && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4602
                      && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4603
                      && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4604
                      && int_size_in_bytes (type) >= 0)
4605
                    break;
4606
 
4607
                  return -1;
4608
                }
4609
              n += t;
4610
            }
4611
 
4612
        return n;
4613
      }
4614
 
4615
    case UNION_TYPE:
4616
    case QUAL_UNION_TYPE:
4617
      {
4618
        /* Ho hum.  How in the world do we guess here?  Clearly it isn't
4619
           right to count the fields.  Guess based on the number of words.  */
4620
        HOST_WIDE_INT n = int_size_in_bytes (type);
4621
        if (n < 0)
4622
          return -1;
4623
        return n / UNITS_PER_WORD;
4624
      }
4625
 
4626
    case COMPLEX_TYPE:
4627
      return 2;
4628
 
4629
    case VECTOR_TYPE:
4630
      return TYPE_VECTOR_SUBPARTS (type);
4631
 
4632
    case INTEGER_TYPE:
4633
    case REAL_TYPE:
4634
    case ENUMERAL_TYPE:
4635
    case BOOLEAN_TYPE:
4636
    case CHAR_TYPE:
4637
    case POINTER_TYPE:
4638
    case OFFSET_TYPE:
4639
    case REFERENCE_TYPE:
4640
      return 1;
4641
 
4642
    case VOID_TYPE:
4643
    case METHOD_TYPE:
4644
    case FUNCTION_TYPE:
4645
    case LANG_TYPE:
4646
    default:
4647
      gcc_unreachable ();
4648
    }
4649
}
4650
 
4651
/* Return 1 if EXP contains mostly (3/4)  zeros.  */
4652
 
4653
static int
4654
mostly_zeros_p (tree exp)
4655
{
4656
  if (TREE_CODE (exp) == CONSTRUCTOR)
4657
 
4658
    {
4659
      HOST_WIDE_INT nz_elts, nc_elts, count, elts;
4660
      bool must_clear;
4661
 
4662
      categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4663
      if (must_clear)
4664
        return 1;
4665
 
4666
      elts = count_type_elements (TREE_TYPE (exp), false);
4667
 
4668
      return nz_elts < elts / 4;
4669
    }
4670
 
4671
  return initializer_zerop (exp);
4672
}
4673
 
4674
/* Return 1 if EXP contains all zeros.  */
4675
 
4676
static int
4677
all_zeros_p (tree exp)
4678
{
4679
  if (TREE_CODE (exp) == CONSTRUCTOR)
4680
 
4681
    {
4682
      HOST_WIDE_INT nz_elts, nc_elts, count;
4683
      bool must_clear;
4684
 
4685
      categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4686
      return nz_elts == 0;
4687
    }
4688
 
4689
  return initializer_zerop (exp);
4690
}
4691
 
4692
/* Helper function for store_constructor.
4693
   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4694
   TYPE is the type of the CONSTRUCTOR, not the element type.
4695
   CLEARED is as for store_constructor.
4696
   ALIAS_SET is the alias set to use for any stores.
4697
 
4698
   This provides a recursive shortcut back to store_constructor when it isn't
4699
   necessary to go through store_field.  This is so that we can pass through
4700
   the cleared field to let store_constructor know that we may not have to
4701
   clear a substructure if the outer structure has already been cleared.  */
4702
 
4703
static void
4704
store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4705
                         HOST_WIDE_INT bitpos, enum machine_mode mode,
4706
                         tree exp, tree type, int cleared, int alias_set)
4707
{
4708
  if (TREE_CODE (exp) == CONSTRUCTOR
4709
      /* We can only call store_constructor recursively if the size and
4710
         bit position are on a byte boundary.  */
4711
      && bitpos % BITS_PER_UNIT == 0
4712
      && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4713
      /* If we have a nonzero bitpos for a register target, then we just
4714
         let store_field do the bitfield handling.  This is unlikely to
4715
         generate unnecessary clear instructions anyways.  */
4716
      && (bitpos == 0 || MEM_P (target)))
4717
    {
4718
      if (MEM_P (target))
4719
        target
4720
          = adjust_address (target,
4721
                            GET_MODE (target) == BLKmode
4722
                            || 0 != (bitpos
4723
                                     % GET_MODE_ALIGNMENT (GET_MODE (target)))
4724
                            ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4725
 
4726
 
4727
      /* Update the alias set, if required.  */
4728
      if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4729
          && MEM_ALIAS_SET (target) != 0)
4730
        {
4731
          target = copy_rtx (target);
4732
          set_mem_alias_set (target, alias_set);
4733
        }
4734
 
4735
      store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4736
    }
4737
  else
4738
    store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4739
}
4740
 
4741
/* Store the value of constructor EXP into the rtx TARGET.
4742
   TARGET is either a REG or a MEM; we know it cannot conflict, since
4743
   safe_from_p has been called.
4744
   CLEARED is true if TARGET is known to have been zero'd.
4745
   SIZE is the number of bytes of TARGET we are allowed to modify: this
4746
   may not be the same as the size of EXP if we are assigning to a field
4747
   which has been packed to exclude padding bits.  */
4748
 
4749
static void
4750
store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4751
{
4752
  tree type = TREE_TYPE (exp);
4753
#ifdef WORD_REGISTER_OPERATIONS
4754
  HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4755
#endif
4756
 
4757
  switch (TREE_CODE (type))
4758
    {
4759
    case RECORD_TYPE:
4760
    case UNION_TYPE:
4761
    case QUAL_UNION_TYPE:
4762
      {
4763
        unsigned HOST_WIDE_INT idx;
4764
        tree field, value;
4765
 
4766
        /* If size is zero or the target is already cleared, do nothing.  */
4767
        if (size == 0 || cleared)
4768
          cleared = 1;
4769
        /* We either clear the aggregate or indicate the value is dead.  */
4770
        else if ((TREE_CODE (type) == UNION_TYPE
4771
                  || TREE_CODE (type) == QUAL_UNION_TYPE)
4772
                 && ! CONSTRUCTOR_ELTS (exp))
4773
          /* If the constructor is empty, clear the union.  */
4774
          {
4775
            clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4776
            cleared = 1;
4777
          }
4778
 
4779
        /* If we are building a static constructor into a register,
4780
           set the initial value as zero so we can fold the value into
4781
           a constant.  But if more than one register is involved,
4782
           this probably loses.  */
4783
        else if (REG_P (target) && TREE_STATIC (exp)
4784
                 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4785
          {
4786
            emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4787
            cleared = 1;
4788
          }
4789
 
4790
        /* If the constructor has fewer fields than the structure or
4791
           if we are initializing the structure to mostly zeros, clear
4792
           the whole structure first.  Don't do this if TARGET is a
4793
           register whose mode size isn't equal to SIZE since
4794
           clear_storage can't handle this case.  */
4795
        else if (size > 0
4796
                 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4797
                      != fields_length (type))
4798
                     || mostly_zeros_p (exp))
4799
                 && (!REG_P (target)
4800
                     || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4801
                         == size)))
4802
          {
4803
            clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4804
            cleared = 1;
4805
          }
4806
 
4807
        if (! cleared)
4808
          emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4809
 
4810
        /* Store each element of the constructor into the
4811
           corresponding field of TARGET.  */
4812
        FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4813
          {
4814
            enum machine_mode mode;
4815
            HOST_WIDE_INT bitsize;
4816
            HOST_WIDE_INT bitpos = 0;
4817
            tree offset;
4818
            rtx to_rtx = target;
4819
 
4820
            /* Just ignore missing fields.  We cleared the whole
4821
               structure, above, if any fields are missing.  */
4822
            if (field == 0)
4823
              continue;
4824
 
4825
            if (cleared && initializer_zerop (value))
4826
              continue;
4827
 
4828
            if (host_integerp (DECL_SIZE (field), 1))
4829
              bitsize = tree_low_cst (DECL_SIZE (field), 1);
4830
            else
4831
              bitsize = -1;
4832
 
4833
            mode = DECL_MODE (field);
4834
            if (DECL_BIT_FIELD (field))
4835
              mode = VOIDmode;
4836
 
4837
            offset = DECL_FIELD_OFFSET (field);
4838
            if (host_integerp (offset, 0)
4839
                && host_integerp (bit_position (field), 0))
4840
              {
4841
                bitpos = int_bit_position (field);
4842
                offset = 0;
4843
              }
4844
            else
4845
              bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4846
 
4847
            if (offset)
4848
              {
4849
                rtx offset_rtx;
4850
 
4851
                offset
4852
                  = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4853
                                                    make_tree (TREE_TYPE (exp),
4854
                                                               target));
4855
 
4856
                offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4857
                gcc_assert (MEM_P (to_rtx));
4858
 
4859
#ifdef POINTERS_EXTEND_UNSIGNED
4860
                if (GET_MODE (offset_rtx) != Pmode)
4861
                  offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4862
#else
4863
                if (GET_MODE (offset_rtx) != ptr_mode)
4864
                  offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4865
#endif
4866
 
4867
                to_rtx = offset_address (to_rtx, offset_rtx,
4868
                                         highest_pow2_factor (offset));
4869
              }
4870
 
4871
#ifdef WORD_REGISTER_OPERATIONS
4872
            /* If this initializes a field that is smaller than a
4873
               word, at the start of a word, try to widen it to a full
4874
               word.  This special case allows us to output C++ member
4875
               function initializations in a form that the optimizers
4876
               can understand.  */
4877
            if (REG_P (target)
4878
                && bitsize < BITS_PER_WORD
4879
                && bitpos % BITS_PER_WORD == 0
4880
                && GET_MODE_CLASS (mode) == MODE_INT
4881
                && TREE_CODE (value) == INTEGER_CST
4882
                && exp_size >= 0
4883
                && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4884
              {
4885
                tree type = TREE_TYPE (value);
4886
 
4887
                if (TYPE_PRECISION (type) < BITS_PER_WORD)
4888
                  {
4889
                    type = lang_hooks.types.type_for_size
4890
                      (BITS_PER_WORD, TYPE_UNSIGNED (type));
4891
                    value = convert (type, value);
4892
                  }
4893
 
4894
                if (BYTES_BIG_ENDIAN)
4895
                  value
4896
                   = fold_build2 (LSHIFT_EXPR, type, value,
4897
                                   build_int_cst (NULL_TREE,
4898
                                                  BITS_PER_WORD - bitsize));
4899
                bitsize = BITS_PER_WORD;
4900
                mode = word_mode;
4901
              }
4902
#endif
4903
 
4904
            if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4905
                && DECL_NONADDRESSABLE_P (field))
4906
              {
4907
                to_rtx = copy_rtx (to_rtx);
4908
                MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4909
              }
4910
 
4911
            store_constructor_field (to_rtx, bitsize, bitpos, mode,
4912
                                     value, type, cleared,
4913
                                     get_alias_set (TREE_TYPE (field)));
4914
          }
4915
        break;
4916
      }
4917
    case ARRAY_TYPE:
4918
      {
4919
        tree value, index;
4920
        unsigned HOST_WIDE_INT i;
4921
        int need_to_clear;
4922
        tree domain;
4923
        tree elttype = TREE_TYPE (type);
4924
        int const_bounds_p;
4925
        HOST_WIDE_INT minelt = 0;
4926
        HOST_WIDE_INT maxelt = 0;
4927
 
4928
        domain = TYPE_DOMAIN (type);
4929
        const_bounds_p = (TYPE_MIN_VALUE (domain)
4930
                          && TYPE_MAX_VALUE (domain)
4931
                          && host_integerp (TYPE_MIN_VALUE (domain), 0)
4932
                          && host_integerp (TYPE_MAX_VALUE (domain), 0));
4933
 
4934
        /* If we have constant bounds for the range of the type, get them.  */
4935
        if (const_bounds_p)
4936
          {
4937
            minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4938
            maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4939
          }
4940
 
4941
        /* If the constructor has fewer elements than the array, clear
4942
           the whole array first.  Similarly if this is static
4943
           constructor of a non-BLKmode object.  */
4944
        if (cleared)
4945
          need_to_clear = 0;
4946
        else if (REG_P (target) && TREE_STATIC (exp))
4947
          need_to_clear = 1;
4948
        else
4949
          {
4950
            unsigned HOST_WIDE_INT idx;
4951
            tree index, value;
4952
            HOST_WIDE_INT count = 0, zero_count = 0;
4953
            need_to_clear = ! const_bounds_p;
4954
 
4955
            /* This loop is a more accurate version of the loop in
4956
               mostly_zeros_p (it handles RANGE_EXPR in an index).  It
4957
               is also needed to check for missing elements.  */
4958
            FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
4959
              {
4960
                HOST_WIDE_INT this_node_count;
4961
 
4962
                if (need_to_clear)
4963
                  break;
4964
 
4965
                if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4966
                  {
4967
                    tree lo_index = TREE_OPERAND (index, 0);
4968
                    tree hi_index = TREE_OPERAND (index, 1);
4969
 
4970
                    if (! host_integerp (lo_index, 1)
4971
                        || ! host_integerp (hi_index, 1))
4972
                      {
4973
                        need_to_clear = 1;
4974
                        break;
4975
                      }
4976
 
4977
                    this_node_count = (tree_low_cst (hi_index, 1)
4978
                                       - tree_low_cst (lo_index, 1) + 1);
4979
                  }
4980
                else
4981
                  this_node_count = 1;
4982
 
4983
                count += this_node_count;
4984
                if (mostly_zeros_p (value))
4985
                  zero_count += this_node_count;
4986
              }
4987
 
4988
            /* Clear the entire array first if there are any missing
4989
               elements, or if the incidence of zero elements is >=
4990
               75%.  */
4991
            if (! need_to_clear
4992
                && (count < maxelt - minelt + 1
4993
                    || 4 * zero_count >= 3 * count))
4994
              need_to_clear = 1;
4995
          }
4996
 
4997
        if (need_to_clear && size > 0)
4998
          {
4999
            if (REG_P (target))
5000
              emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
5001
            else
5002
              clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5003
            cleared = 1;
5004
          }
5005
 
5006
        if (!cleared && REG_P (target))
5007
          /* Inform later passes that the old value is dead.  */
5008
          emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5009
 
5010
        /* Store each element of the constructor into the
5011
           corresponding element of TARGET, determined by counting the
5012
           elements.  */
5013
        FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5014
          {
5015
            enum machine_mode mode;
5016
            HOST_WIDE_INT bitsize;
5017
            HOST_WIDE_INT bitpos;
5018
            int unsignedp;
5019
            rtx xtarget = target;
5020
 
5021
            if (cleared && initializer_zerop (value))
5022
              continue;
5023
 
5024
            unsignedp = TYPE_UNSIGNED (elttype);
5025
            mode = TYPE_MODE (elttype);
5026
            if (mode == BLKmode)
5027
              bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5028
                         ? tree_low_cst (TYPE_SIZE (elttype), 1)
5029
                         : -1);
5030
            else
5031
              bitsize = GET_MODE_BITSIZE (mode);
5032
 
5033
            if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5034
              {
5035
                tree lo_index = TREE_OPERAND (index, 0);
5036
                tree hi_index = TREE_OPERAND (index, 1);
5037
                rtx index_r, pos_rtx;
5038
                HOST_WIDE_INT lo, hi, count;
5039
                tree position;
5040
 
5041
                /* If the range is constant and "small", unroll the loop.  */
5042
                if (const_bounds_p
5043
                    && host_integerp (lo_index, 0)
5044
                    && host_integerp (hi_index, 0)
5045
                    && (lo = tree_low_cst (lo_index, 0),
5046
                        hi = tree_low_cst (hi_index, 0),
5047
                        count = hi - lo + 1,
5048
                        (!MEM_P (target)
5049
                         || count <= 2
5050
                         || (host_integerp (TYPE_SIZE (elttype), 1)
5051
                             && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5052
                                 <= 40 * 8)))))
5053
                  {
5054
                    lo -= minelt;  hi -= minelt;
5055
                    for (; lo <= hi; lo++)
5056
                      {
5057
                        bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5058
 
5059
                        if (MEM_P (target)
5060
                            && !MEM_KEEP_ALIAS_SET_P (target)
5061
                            && TREE_CODE (type) == ARRAY_TYPE
5062
                            && TYPE_NONALIASED_COMPONENT (type))
5063
                          {
5064
                            target = copy_rtx (target);
5065
                            MEM_KEEP_ALIAS_SET_P (target) = 1;
5066
                          }
5067
 
5068
                        store_constructor_field
5069
                          (target, bitsize, bitpos, mode, value, type, cleared,
5070
                           get_alias_set (elttype));
5071
                      }
5072
                  }
5073
                else
5074
                  {
5075
                    rtx loop_start = gen_label_rtx ();
5076
                    rtx loop_end = gen_label_rtx ();
5077
                    tree exit_cond;
5078
 
5079
                    expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5080
                    unsignedp = TYPE_UNSIGNED (domain);
5081
 
5082
                    index = build_decl (VAR_DECL, NULL_TREE, domain);
5083
 
5084
                    index_r
5085
                      = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5086
                                                   &unsignedp, 0));
5087
                    SET_DECL_RTL (index, index_r);
5088
                    store_expr (lo_index, index_r, 0);
5089
 
5090
                    /* Build the head of the loop.  */
5091
                    do_pending_stack_adjust ();
5092
                    emit_label (loop_start);
5093
 
5094
                    /* Assign value to element index.  */
5095
                    position
5096
                      = convert (ssizetype,
5097
                                 fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5098
                                              index, TYPE_MIN_VALUE (domain)));
5099
                    position = size_binop (MULT_EXPR, position,
5100
                                           convert (ssizetype,
5101
                                                    TYPE_SIZE_UNIT (elttype)));
5102
 
5103
                    pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5104
                    xtarget = offset_address (target, pos_rtx,
5105
                                              highest_pow2_factor (position));
5106
                    xtarget = adjust_address (xtarget, mode, 0);
5107
                    if (TREE_CODE (value) == CONSTRUCTOR)
5108
                      store_constructor (value, xtarget, cleared,
5109
                                         bitsize / BITS_PER_UNIT);
5110
                    else
5111
                      store_expr (value, xtarget, 0);
5112
 
5113
                    /* Generate a conditional jump to exit the loop.  */
5114
                    exit_cond = build2 (LT_EXPR, integer_type_node,
5115
                                        index, hi_index);
5116
                    jumpif (exit_cond, loop_end);
5117
 
5118
                    /* Update the loop counter, and jump to the head of
5119
                       the loop.  */
5120
                    expand_assignment (index,
5121
                                       build2 (PLUS_EXPR, TREE_TYPE (index),
5122
                                               index, integer_one_node));
5123
 
5124
                    emit_jump (loop_start);
5125
 
5126
                    /* Build the end of the loop.  */
5127
                    emit_label (loop_end);
5128
                  }
5129
              }
5130
            else if ((index != 0 && ! host_integerp (index, 0))
5131
                     || ! host_integerp (TYPE_SIZE (elttype), 1))
5132
              {
5133
                tree position;
5134
 
5135
                if (index == 0)
5136
                  index = ssize_int (1);
5137
 
5138
                if (minelt)
5139
                  index = fold_convert (ssizetype,
5140
                                        fold_build2 (MINUS_EXPR,
5141
                                                     TREE_TYPE (index),
5142
                                                     index,
5143
                                                     TYPE_MIN_VALUE (domain)));
5144
 
5145
                position = size_binop (MULT_EXPR, index,
5146
                                       convert (ssizetype,
5147
                                                TYPE_SIZE_UNIT (elttype)));
5148
                xtarget = offset_address (target,
5149
                                          expand_expr (position, 0, VOIDmode, 0),
5150
                                          highest_pow2_factor (position));
5151
                xtarget = adjust_address (xtarget, mode, 0);
5152
                store_expr (value, xtarget, 0);
5153
              }
5154
            else
5155
              {
5156
                if (index != 0)
5157
                  bitpos = ((tree_low_cst (index, 0) - minelt)
5158
                            * tree_low_cst (TYPE_SIZE (elttype), 1));
5159
                else
5160
                  bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5161
 
5162
                if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5163
                    && TREE_CODE (type) == ARRAY_TYPE
5164
                    && TYPE_NONALIASED_COMPONENT (type))
5165
                  {
5166
                    target = copy_rtx (target);
5167
                    MEM_KEEP_ALIAS_SET_P (target) = 1;
5168
                  }
5169
                store_constructor_field (target, bitsize, bitpos, mode, value,
5170
                                         type, cleared, get_alias_set (elttype));
5171
              }
5172
          }
5173
        break;
5174
      }
5175
 
5176
    case VECTOR_TYPE:
5177
      {
5178
        unsigned HOST_WIDE_INT idx;
5179
        constructor_elt *ce;
5180
        int i;
5181
        int need_to_clear;
5182
        int icode = 0;
5183
        tree elttype = TREE_TYPE (type);
5184
        int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5185
        enum machine_mode eltmode = TYPE_MODE (elttype);
5186
        HOST_WIDE_INT bitsize;
5187
        HOST_WIDE_INT bitpos;
5188
        rtvec vector = NULL;
5189
        unsigned n_elts;
5190
 
5191
        gcc_assert (eltmode != BLKmode);
5192
 
5193
        n_elts = TYPE_VECTOR_SUBPARTS (type);
5194
        if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5195
          {
5196
            enum machine_mode mode = GET_MODE (target);
5197
 
5198
            icode = (int) vec_init_optab->handlers[mode].insn_code;
5199
            if (icode != CODE_FOR_nothing)
5200
              {
5201
                unsigned int i;
5202
 
5203
                vector = rtvec_alloc (n_elts);
5204
                for (i = 0; i < n_elts; i++)
5205
                  RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5206
              }
5207
          }
5208
 
5209
        /* If the constructor has fewer elements than the vector,
5210
           clear the whole array first.  Similarly if this is static
5211
           constructor of a non-BLKmode object.  */
5212
        if (cleared)
5213
          need_to_clear = 0;
5214
        else if (REG_P (target) && TREE_STATIC (exp))
5215
          need_to_clear = 1;
5216
        else
5217
          {
5218
            unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5219
            tree value;
5220
 
5221
            FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5222
              {
5223
                int n_elts_here = tree_low_cst
5224
                  (int_const_binop (TRUNC_DIV_EXPR,
5225
                                    TYPE_SIZE (TREE_TYPE (value)),
5226
                                    TYPE_SIZE (elttype), 0), 1);
5227
 
5228
                count += n_elts_here;
5229
                if (mostly_zeros_p (value))
5230
                  zero_count += n_elts_here;
5231
              }
5232
 
5233
            /* Clear the entire vector first if there are any missing elements,
5234
               or if the incidence of zero elements is >= 75%.  */
5235
            need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5236
          }
5237
 
5238
        if (need_to_clear && size > 0 && !vector)
5239
          {
5240
            if (REG_P (target))
5241
              emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
5242
            else
5243
              clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5244
            cleared = 1;
5245
          }
5246
 
5247
        /* Inform later passes that the old value is dead.  */
5248
        if (!cleared && REG_P (target))
5249
          emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5250
 
5251
        /* Store each element of the constructor into the corresponding
5252
           element of TARGET, determined by counting the elements.  */
5253
        for (idx = 0, i = 0;
5254
             VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5255
             idx++, i += bitsize / elt_size)
5256
          {
5257
            HOST_WIDE_INT eltpos;
5258
            tree value = ce->value;
5259
 
5260
            bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5261
            if (cleared && initializer_zerop (value))
5262
              continue;
5263
 
5264
            if (ce->index)
5265
              eltpos = tree_low_cst (ce->index, 1);
5266
            else
5267
              eltpos = i;
5268
 
5269
            if (vector)
5270
              {
5271
                /* Vector CONSTRUCTORs should only be built from smaller
5272
                   vectors in the case of BLKmode vectors.  */
5273
                gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5274
                RTVEC_ELT (vector, eltpos)
5275
                  = expand_expr (value, NULL_RTX, VOIDmode, 0);
5276
              }
5277
            else
5278
              {
5279
                enum machine_mode value_mode =
5280
                  TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5281
                  ? TYPE_MODE (TREE_TYPE (value))
5282
                  : eltmode;
5283
                bitpos = eltpos * elt_size;
5284
                store_constructor_field (target, bitsize, bitpos,
5285
                                         value_mode, value, type,
5286
                                         cleared, get_alias_set (elttype));
5287
              }
5288
          }
5289
 
5290
        if (vector)
5291
          emit_insn (GEN_FCN (icode)
5292
                     (target,
5293
                      gen_rtx_PARALLEL (GET_MODE (target), vector)));
5294
        break;
5295
      }
5296
 
5297
    default:
5298
      gcc_unreachable ();
5299
    }
5300
}
5301
 
5302
/* Store the value of EXP (an expression tree)
5303
   into a subfield of TARGET which has mode MODE and occupies
5304
   BITSIZE bits, starting BITPOS bits from the start of TARGET.
5305
   If MODE is VOIDmode, it means that we are storing into a bit-field.
5306
 
5307
   Always return const0_rtx unless we have something particular to
5308
   return.
5309
 
5310
   TYPE is the type of the underlying object,
5311
 
5312
   ALIAS_SET is the alias set for the destination.  This value will
5313
   (in general) be different from that for TARGET, since TARGET is a
5314
   reference to the containing structure.  */
5315
 
5316
static rtx
5317
store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5318
             enum machine_mode mode, tree exp, tree type, int alias_set)
5319
{
5320
  HOST_WIDE_INT width_mask = 0;
5321
 
5322
  if (TREE_CODE (exp) == ERROR_MARK)
5323
    return const0_rtx;
5324
 
5325
  /* If we have nothing to store, do nothing unless the expression has
5326
     side-effects.  */
5327
  if (bitsize == 0)
5328
    return expand_expr (exp, const0_rtx, VOIDmode, 0);
5329
  else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5330
    width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5331
 
5332
  /* If we are storing into an unaligned field of an aligned union that is
5333
     in a register, we may have the mode of TARGET being an integer mode but
5334
     MODE == BLKmode.  In that case, get an aligned object whose size and
5335
     alignment are the same as TARGET and store TARGET into it (we can avoid
5336
     the store if the field being stored is the entire width of TARGET).  Then
5337
     call ourselves recursively to store the field into a BLKmode version of
5338
     that object.  Finally, load from the object into TARGET.  This is not
5339
     very efficient in general, but should only be slightly more expensive
5340
     than the otherwise-required unaligned accesses.  Perhaps this can be
5341
     cleaned up later.  It's tempting to make OBJECT readonly, but it's set
5342
     twice, once with emit_move_insn and once via store_field.  */
5343
 
5344
  if (mode == BLKmode
5345
      && (REG_P (target) || GET_CODE (target) == SUBREG))
5346
    {
5347
      rtx object = assign_temp (type, 0, 1, 1);
5348
      rtx blk_object = adjust_address (object, BLKmode, 0);
5349
 
5350
      if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5351
        emit_move_insn (object, target);
5352
 
5353
      store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5354
 
5355
      emit_move_insn (target, object);
5356
 
5357
      /* We want to return the BLKmode version of the data.  */
5358
      return blk_object;
5359
    }
5360
 
5361
  if (GET_CODE (target) == CONCAT)
5362
    {
5363
      /* We're storing into a struct containing a single __complex.  */
5364
 
5365
      gcc_assert (!bitpos);
5366
      return store_expr (exp, target, 0);
5367
    }
5368
 
5369
  /* If the structure is in a register or if the component
5370
     is a bit field, we cannot use addressing to access it.
5371
     Use bit-field techniques or SUBREG to store in it.  */
5372
 
5373
  if (mode == VOIDmode
5374
      || (mode != BLKmode && ! direct_store[(int) mode]
5375
          && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5376
          && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5377
      || REG_P (target)
5378
      || GET_CODE (target) == SUBREG
5379
      /* If the field isn't aligned enough to store as an ordinary memref,
5380
         store it as a bit field.  */
5381
      || (mode != BLKmode
5382
          && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5383
                || bitpos % GET_MODE_ALIGNMENT (mode))
5384
               && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5385
              || (bitpos % BITS_PER_UNIT != 0)))
5386
      /* If the RHS and field are a constant size and the size of the
5387
         RHS isn't the same size as the bitfield, we must use bitfield
5388
         operations.  */
5389
      || (bitsize >= 0
5390
          && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5391
          && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5392
    {
5393
      rtx temp;
5394
 
5395
      /* If EXP is a NOP_EXPR of precision less than its mode, then that
5396
         implies a mask operation.  If the precision is the same size as
5397
         the field we're storing into, that mask is redundant.  This is
5398
         particularly common with bit field assignments generated by the
5399
         C front end.  */
5400
      if (TREE_CODE (exp) == NOP_EXPR)
5401
        {
5402
          tree type = TREE_TYPE (exp);
5403
          if (INTEGRAL_TYPE_P (type)
5404
              && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5405
              && bitsize == TYPE_PRECISION (type))
5406
            {
5407
              type = TREE_TYPE (TREE_OPERAND (exp, 0));
5408
              if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5409
                exp = TREE_OPERAND (exp, 0);
5410
            }
5411
        }
5412
 
5413
      temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5414
 
5415
      /* If BITSIZE is narrower than the size of the type of EXP
5416
         we will be narrowing TEMP.  Normally, what's wanted are the
5417
         low-order bits.  However, if EXP's type is a record and this is
5418
         big-endian machine, we want the upper BITSIZE bits.  */
5419
      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5420
          && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5421
          && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5422
        temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5423
                             size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5424
                                       - bitsize),
5425
                             NULL_RTX, 1);
5426
 
5427
      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5428
         MODE.  */
5429
      if (mode != VOIDmode && mode != BLKmode
5430
          && mode != TYPE_MODE (TREE_TYPE (exp)))
5431
        temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5432
 
5433
      /* If the modes of TARGET and TEMP are both BLKmode, both
5434
         must be in memory and BITPOS must be aligned on a byte
5435
         boundary.  If so, we simply do a block copy.  */
5436
      if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5437
        {
5438
          gcc_assert (MEM_P (target) && MEM_P (temp)
5439
                      && !(bitpos % BITS_PER_UNIT));
5440
 
5441
          target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5442
          emit_block_move (target, temp,
5443
                           GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5444
                                    / BITS_PER_UNIT),
5445
                           BLOCK_OP_NORMAL);
5446
 
5447
          return const0_rtx;
5448
        }
5449
 
5450
      /* Store the value in the bitfield.  */
5451
      store_bit_field (target, bitsize, bitpos, mode, temp);
5452
 
5453
      return const0_rtx;
5454
    }
5455
  else
5456
    {
5457
      /* Now build a reference to just the desired component.  */
5458
      rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5459
 
5460
      if (to_rtx == target)
5461
        to_rtx = copy_rtx (to_rtx);
5462
 
5463
      MEM_SET_IN_STRUCT_P (to_rtx, 1);
5464
      if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5465
        set_mem_alias_set (to_rtx, alias_set);
5466
 
5467
      return store_expr (exp, to_rtx, 0);
5468
    }
5469
}
5470
 
5471
/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5472
   an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5473
   codes and find the ultimate containing object, which we return.
5474
 
5475
   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5476
   bit position, and *PUNSIGNEDP to the signedness of the field.
5477
   If the position of the field is variable, we store a tree
5478
   giving the variable offset (in units) in *POFFSET.
5479
   This offset is in addition to the bit position.
5480
   If the position is not variable, we store 0 in *POFFSET.
5481
 
5482
   If any of the extraction expressions is volatile,
5483
   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
5484
 
5485
   If the field is a bit-field, *PMODE is set to VOIDmode.  Otherwise, it
5486
   is a mode that can be used to access the field.  In that case, *PBITSIZE
5487
   is redundant.
5488
 
5489
   If the field describes a variable-sized object, *PMODE is set to
5490
   VOIDmode and *PBITSIZE is set to -1.  An access cannot be made in
5491
   this case, but the address of the object can be found.
5492
 
5493
   If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5494
   look through nodes that serve as markers of a greater alignment than
5495
   the one that can be deduced from the expression.  These nodes make it
5496
   possible for front-ends to prevent temporaries from being created by
5497
   the middle-end on alignment considerations.  For that purpose, the
5498
   normal operating mode at high-level is to always pass FALSE so that
5499
   the ultimate containing object is really returned; moreover, the
5500
   associated predicate handled_component_p will always return TRUE
5501
   on these nodes, thus indicating that they are essentially handled
5502
   by get_inner_reference.  TRUE should only be passed when the caller
5503
   is scanning the expression in order to build another representation
5504
   and specifically knows how to handle these nodes; as such, this is
5505
   the normal operating mode in the RTL expanders.  */
5506
 
5507
tree
5508
get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5509
                     HOST_WIDE_INT *pbitpos, tree *poffset,
5510
                     enum machine_mode *pmode, int *punsignedp,
5511
                     int *pvolatilep, bool keep_aligning)
5512
{
5513
  tree size_tree = 0;
5514
  enum machine_mode mode = VOIDmode;
5515
  tree offset = size_zero_node;
5516
  tree bit_offset = bitsize_zero_node;
5517
  tree tem;
5518
 
5519
  /* First get the mode, signedness, and size.  We do this from just the
5520
     outermost expression.  */
5521
  if (TREE_CODE (exp) == COMPONENT_REF)
5522
    {
5523
      size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5524
      if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5525
        mode = DECL_MODE (TREE_OPERAND (exp, 1));
5526
 
5527
      *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5528
    }
5529
  else if (TREE_CODE (exp) == BIT_FIELD_REF)
5530
    {
5531
      size_tree = TREE_OPERAND (exp, 1);
5532
      *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5533
    }
5534
  else
5535
    {
5536
      mode = TYPE_MODE (TREE_TYPE (exp));
5537
      *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5538
 
5539
      if (mode == BLKmode)
5540
        size_tree = TYPE_SIZE (TREE_TYPE (exp));
5541
      else
5542
        *pbitsize = GET_MODE_BITSIZE (mode);
5543
    }
5544
 
5545
  if (size_tree != 0)
5546
    {
5547
      if (! host_integerp (size_tree, 1))
5548
        mode = BLKmode, *pbitsize = -1;
5549
      else
5550
        *pbitsize = tree_low_cst (size_tree, 1);
5551
    }
5552
 
5553
  /* Compute cumulative bit-offset for nested component-refs and array-refs,
5554
     and find the ultimate containing object.  */
5555
  while (1)
5556
    {
5557
      switch (TREE_CODE (exp))
5558
        {
5559
        case BIT_FIELD_REF:
5560
          bit_offset = size_binop (PLUS_EXPR, bit_offset,
5561
                                   TREE_OPERAND (exp, 2));
5562
          break;
5563
 
5564
        case COMPONENT_REF:
5565
          {
5566
            tree field = TREE_OPERAND (exp, 1);
5567
            tree this_offset = component_ref_field_offset (exp);
5568
 
5569
            /* If this field hasn't been filled in yet, don't go past it.
5570
               This should only happen when folding expressions made during
5571
               type construction.  */
5572
            if (this_offset == 0)
5573
              break;
5574
 
5575
            offset = size_binop (PLUS_EXPR, offset, this_offset);
5576
            bit_offset = size_binop (PLUS_EXPR, bit_offset,
5577
                                     DECL_FIELD_BIT_OFFSET (field));
5578
 
5579
            /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
5580
          }
5581
          break;
5582
 
5583
        case ARRAY_REF:
5584
        case ARRAY_RANGE_REF:
5585
          {
5586
            tree index = TREE_OPERAND (exp, 1);
5587
            tree low_bound = array_ref_low_bound (exp);
5588
            tree unit_size = array_ref_element_size (exp);
5589
 
5590
            /* We assume all arrays have sizes that are a multiple of a byte.
5591
               First subtract the lower bound, if any, in the type of the
5592
               index, then convert to sizetype and multiply by the size of
5593
               the array element.  */
5594
            if (! integer_zerop (low_bound))
5595
              index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5596
                                   index, low_bound);
5597
 
5598
            offset = size_binop (PLUS_EXPR, offset,
5599
                                 size_binop (MULT_EXPR,
5600
                                             convert (sizetype, index),
5601
                                             unit_size));
5602
          }
5603
          break;
5604
 
5605
        case REALPART_EXPR:
5606
          break;
5607
 
5608
        case IMAGPART_EXPR:
5609
          bit_offset = size_binop (PLUS_EXPR, bit_offset,
5610
                                   bitsize_int (*pbitsize));
5611
          break;
5612
 
5613
        case VIEW_CONVERT_EXPR:
5614
          if (keep_aligning && STRICT_ALIGNMENT
5615
              && (TYPE_ALIGN (TREE_TYPE (exp))
5616
               > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5617
              && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5618
                  < BIGGEST_ALIGNMENT)
5619
              && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5620
                  || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5621
            goto done;
5622
          break;
5623
 
5624
        default:
5625
          goto done;
5626
        }
5627
 
5628
      /* If any reference in the chain is volatile, the effect is volatile.  */
5629
      if (TREE_THIS_VOLATILE (exp))
5630
        *pvolatilep = 1;
5631
 
5632
      exp = TREE_OPERAND (exp, 0);
5633
    }
5634
 done:
5635
 
5636
  /* If OFFSET is constant, see if we can return the whole thing as a
5637
     constant bit position.  Otherwise, split it up.  */
5638
  if (host_integerp (offset, 0)
5639
      && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5640
                                 bitsize_unit_node))
5641
      && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5642
      && host_integerp (tem, 0))
5643
    *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5644
  else
5645
    *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5646
 
5647
  *pmode = mode;
5648
  return exp;
5649
}
5650
 
5651
/* Return a tree of sizetype representing the size, in bytes, of the element
5652
   of EXP, an ARRAY_REF.  */
5653
 
5654
tree
5655
array_ref_element_size (tree exp)
5656
{
5657
  tree aligned_size = TREE_OPERAND (exp, 3);
5658
  tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5659
 
5660
  /* If a size was specified in the ARRAY_REF, it's the size measured
5661
     in alignment units of the element type.  So multiply by that value.  */
5662
  if (aligned_size)
5663
    {
5664
      /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5665
         sizetype from another type of the same width and signedness.  */
5666
      if (TREE_TYPE (aligned_size) != sizetype)
5667
        aligned_size = fold_convert (sizetype, aligned_size);
5668
      return size_binop (MULT_EXPR, aligned_size,
5669
                         size_int (TYPE_ALIGN_UNIT (elmt_type)));
5670
    }
5671
 
5672
  /* Otherwise, take the size from that of the element type.  Substitute
5673
     any PLACEHOLDER_EXPR that we have.  */
5674
  else
5675
    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5676
}
5677
 
5678
/* Return a tree representing the lower bound of the array mentioned in
5679
   EXP, an ARRAY_REF.  */
5680
 
5681
tree
5682
array_ref_low_bound (tree exp)
5683
{
5684
  tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5685
 
5686
  /* If a lower bound is specified in EXP, use it.  */
5687
  if (TREE_OPERAND (exp, 2))
5688
    return TREE_OPERAND (exp, 2);
5689
 
5690
  /* Otherwise, if there is a domain type and it has a lower bound, use it,
5691
     substituting for a PLACEHOLDER_EXPR as needed.  */
5692
  if (domain_type && TYPE_MIN_VALUE (domain_type))
5693
    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5694
 
5695
  /* Otherwise, return a zero of the appropriate type.  */
5696
  return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5697
}
5698
 
5699
/* Return a tree representing the upper bound of the array mentioned in
5700
   EXP, an ARRAY_REF.  */
5701
 
5702
tree
5703
array_ref_up_bound (tree exp)
5704
{
5705
  tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5706
 
5707
  /* If there is a domain type and it has an upper bound, use it, substituting
5708
     for a PLACEHOLDER_EXPR as needed.  */
5709
  if (domain_type && TYPE_MAX_VALUE (domain_type))
5710
    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5711
 
5712
  /* Otherwise fail.  */
5713
  return NULL_TREE;
5714
}
5715
 
5716
/* Return a tree representing the offset, in bytes, of the field referenced
5717
   by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
5718
 
5719
tree
5720
component_ref_field_offset (tree exp)
5721
{
5722
  tree aligned_offset = TREE_OPERAND (exp, 2);
5723
  tree field = TREE_OPERAND (exp, 1);
5724
 
5725
  /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5726
     in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
5727
     value.  */
5728
  if (aligned_offset)
5729
    {
5730
      /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5731
         sizetype from another type of the same width and signedness.  */
5732
      if (TREE_TYPE (aligned_offset) != sizetype)
5733
        aligned_offset = fold_convert (sizetype, aligned_offset);
5734
      return size_binop (MULT_EXPR, aligned_offset,
5735
                         size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5736
    }
5737
 
5738
  /* Otherwise, take the offset from that of the field.  Substitute
5739
     any PLACEHOLDER_EXPR that we have.  */
5740
  else
5741
    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5742
}
5743
 
5744
/* Return 1 if T is an expression that get_inner_reference handles.  */
5745
 
5746
int
5747
handled_component_p (tree t)
5748
{
5749
  switch (TREE_CODE (t))
5750
    {
5751
    case BIT_FIELD_REF:
5752
    case COMPONENT_REF:
5753
    case ARRAY_REF:
5754
    case ARRAY_RANGE_REF:
5755
    case VIEW_CONVERT_EXPR:
5756
    case REALPART_EXPR:
5757
    case IMAGPART_EXPR:
5758
      return 1;
5759
 
5760
    default:
5761
      return 0;
5762
    }
5763
}
5764
 
5765
/* Given an rtx VALUE that may contain additions and multiplications, return
5766
   an equivalent value that just refers to a register, memory, or constant.
5767
   This is done by generating instructions to perform the arithmetic and
5768
   returning a pseudo-register containing the value.
5769
 
5770
   The returned value may be a REG, SUBREG, MEM or constant.  */
5771
 
5772
rtx
5773
force_operand (rtx value, rtx target)
5774
{
5775
  rtx op1, op2;
5776
  /* Use subtarget as the target for operand 0 of a binary operation.  */
5777
  rtx subtarget = get_subtarget (target);
5778
  enum rtx_code code = GET_CODE (value);
5779
 
5780
  /* Check for subreg applied to an expression produced by loop optimizer.  */
5781
  if (code == SUBREG
5782
      && !REG_P (SUBREG_REG (value))
5783
      && !MEM_P (SUBREG_REG (value)))
5784
    {
5785
      value = simplify_gen_subreg (GET_MODE (value),
5786
                                   force_reg (GET_MODE (SUBREG_REG (value)),
5787
                                              force_operand (SUBREG_REG (value),
5788
                                                             NULL_RTX)),
5789
                                   GET_MODE (SUBREG_REG (value)),
5790
                                   SUBREG_BYTE (value));
5791
      code = GET_CODE (value);
5792
    }
5793
 
5794
  /* Check for a PIC address load.  */
5795
  if ((code == PLUS || code == MINUS)
5796
      && XEXP (value, 0) == pic_offset_table_rtx
5797
      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5798
          || GET_CODE (XEXP (value, 1)) == LABEL_REF
5799
          || GET_CODE (XEXP (value, 1)) == CONST))
5800
    {
5801
      if (!subtarget)
5802
        subtarget = gen_reg_rtx (GET_MODE (value));
5803
      emit_move_insn (subtarget, value);
5804
      return subtarget;
5805
    }
5806
 
5807
  if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5808
    {
5809
      if (!target)
5810
        target = gen_reg_rtx (GET_MODE (value));
5811
      convert_move (target, force_operand (XEXP (value, 0), NULL),
5812
                    code == ZERO_EXTEND);
5813
      return target;
5814
    }
5815
 
5816
  if (ARITHMETIC_P (value))
5817
    {
5818
      op2 = XEXP (value, 1);
5819
      if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5820
        subtarget = 0;
5821
      if (code == MINUS && GET_CODE (op2) == CONST_INT)
5822
        {
5823
          code = PLUS;
5824
          op2 = negate_rtx (GET_MODE (value), op2);
5825
        }
5826
 
5827
      /* Check for an addition with OP2 a constant integer and our first
5828
         operand a PLUS of a virtual register and something else.  In that
5829
         case, we want to emit the sum of the virtual register and the
5830
         constant first and then add the other value.  This allows virtual
5831
         register instantiation to simply modify the constant rather than
5832
         creating another one around this addition.  */
5833
      if (code == PLUS && GET_CODE (op2) == CONST_INT
5834
          && GET_CODE (XEXP (value, 0)) == PLUS
5835
          && REG_P (XEXP (XEXP (value, 0), 0))
5836
          && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5837
          && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5838
        {
5839
          rtx temp = expand_simple_binop (GET_MODE (value), code,
5840
                                          XEXP (XEXP (value, 0), 0), op2,
5841
                                          subtarget, 0, OPTAB_LIB_WIDEN);
5842
          return expand_simple_binop (GET_MODE (value), code, temp,
5843
                                      force_operand (XEXP (XEXP (value,
5844
                                                                 0), 1), 0),
5845
                                      target, 0, OPTAB_LIB_WIDEN);
5846
        }
5847
 
5848
      op1 = force_operand (XEXP (value, 0), subtarget);
5849
      op2 = force_operand (op2, NULL_RTX);
5850
      switch (code)
5851
        {
5852
        case MULT:
5853
          return expand_mult (GET_MODE (value), op1, op2, target, 1);
5854
        case DIV:
5855
          if (!INTEGRAL_MODE_P (GET_MODE (value)))
5856
            return expand_simple_binop (GET_MODE (value), code, op1, op2,
5857
                                        target, 1, OPTAB_LIB_WIDEN);
5858
          else
5859
            return expand_divmod (0,
5860
                                  FLOAT_MODE_P (GET_MODE (value))
5861
                                  ? RDIV_EXPR : TRUNC_DIV_EXPR,
5862
                                  GET_MODE (value), op1, op2, target, 0);
5863
          break;
5864
        case MOD:
5865
          return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5866
                                target, 0);
5867
          break;
5868
        case UDIV:
5869
          return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5870
                                target, 1);
5871
          break;
5872
        case UMOD:
5873
          return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5874
                                target, 1);
5875
          break;
5876
        case ASHIFTRT:
5877
          return expand_simple_binop (GET_MODE (value), code, op1, op2,
5878
                                      target, 0, OPTAB_LIB_WIDEN);
5879
          break;
5880
        default:
5881
          return expand_simple_binop (GET_MODE (value), code, op1, op2,
5882
                                      target, 1, OPTAB_LIB_WIDEN);
5883
        }
5884
    }
5885
  if (UNARY_P (value))
5886
    {
5887
      op1 = force_operand (XEXP (value, 0), NULL_RTX);
5888
      return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5889
    }
5890
 
5891
#ifdef INSN_SCHEDULING
5892
  /* On machines that have insn scheduling, we want all memory reference to be
5893
     explicit, so we need to deal with such paradoxical SUBREGs.  */
5894
  if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5895
      && (GET_MODE_SIZE (GET_MODE (value))
5896
          > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5897
    value
5898
      = simplify_gen_subreg (GET_MODE (value),
5899
                             force_reg (GET_MODE (SUBREG_REG (value)),
5900
                                        force_operand (SUBREG_REG (value),
5901
                                                       NULL_RTX)),
5902
                             GET_MODE (SUBREG_REG (value)),
5903
                             SUBREG_BYTE (value));
5904
#endif
5905
 
5906
  return value;
5907
}
5908
 
5909
/* Subroutine of expand_expr: return nonzero iff there is no way that
5910
   EXP can reference X, which is being modified.  TOP_P is nonzero if this
5911
   call is going to be used to determine whether we need a temporary
5912
   for EXP, as opposed to a recursive call to this function.
5913
 
5914
   It is always safe for this routine to return zero since it merely
5915
   searches for optimization opportunities.  */
5916
 
5917
int
5918
safe_from_p (rtx x, tree exp, int top_p)
5919
{
5920
  rtx exp_rtl = 0;
5921
  int i, nops;
5922
 
5923
  if (x == 0
5924
      /* If EXP has varying size, we MUST use a target since we currently
5925
         have no way of allocating temporaries of variable size
5926
         (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5927
         So we assume here that something at a higher level has prevented a
5928
         clash.  This is somewhat bogus, but the best we can do.  Only
5929
         do this when X is BLKmode and when we are at the top level.  */
5930
      || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5931
          && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5932
          && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5933
              || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5934
              || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5935
              != INTEGER_CST)
5936
          && GET_MODE (x) == BLKmode)
5937
      /* If X is in the outgoing argument area, it is always safe.  */
5938
      || (MEM_P (x)
5939
          && (XEXP (x, 0) == virtual_outgoing_args_rtx
5940
              || (GET_CODE (XEXP (x, 0)) == PLUS
5941
                  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5942
    return 1;
5943
 
5944
  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5945
     find the underlying pseudo.  */
5946
  if (GET_CODE (x) == SUBREG)
5947
    {
5948
      x = SUBREG_REG (x);
5949
      if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5950
        return 0;
5951
    }
5952
 
5953
  /* Now look at our tree code and possibly recurse.  */
5954
  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5955
    {
5956
    case tcc_declaration:
5957
      exp_rtl = DECL_RTL_IF_SET (exp);
5958
      break;
5959
 
5960
    case tcc_constant:
5961
      return 1;
5962
 
5963
    case tcc_exceptional:
5964
      if (TREE_CODE (exp) == TREE_LIST)
5965
        {
5966
          while (1)
5967
            {
5968
              if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5969
                return 0;
5970
              exp = TREE_CHAIN (exp);
5971
              if (!exp)
5972
                return 1;
5973
              if (TREE_CODE (exp) != TREE_LIST)
5974
                return safe_from_p (x, exp, 0);
5975
            }
5976
        }
5977
      else if (TREE_CODE (exp) == CONSTRUCTOR)
5978
        {
5979
          constructor_elt *ce;
5980
          unsigned HOST_WIDE_INT idx;
5981
 
5982
          for (idx = 0;
5983
               VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5984
               idx++)
5985
            if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
5986
                || !safe_from_p (x, ce->value, 0))
5987
              return 0;
5988
          return 1;
5989
        }
5990
      else if (TREE_CODE (exp) == ERROR_MARK)
5991
        return 1;       /* An already-visited SAVE_EXPR? */
5992
      else
5993
        return 0;
5994
 
5995
    case tcc_statement:
5996
      /* The only case we look at here is the DECL_INITIAL inside a
5997
         DECL_EXPR.  */
5998
      return (TREE_CODE (exp) != DECL_EXPR
5999
              || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6000
              || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6001
              || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6002
 
6003
    case tcc_binary:
6004
    case tcc_comparison:
6005
      if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6006
        return 0;
6007
      /* Fall through.  */
6008
 
6009
    case tcc_unary:
6010
      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6011
 
6012
    case tcc_expression:
6013
    case tcc_reference:
6014
      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
6015
         the expression.  If it is set, we conflict iff we are that rtx or
6016
         both are in memory.  Otherwise, we check all operands of the
6017
         expression recursively.  */
6018
 
6019
      switch (TREE_CODE (exp))
6020
        {
6021
        case ADDR_EXPR:
6022
          /* If the operand is static or we are static, we can't conflict.
6023
             Likewise if we don't conflict with the operand at all.  */
6024
          if (staticp (TREE_OPERAND (exp, 0))
6025
              || TREE_STATIC (exp)
6026
              || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6027
            return 1;
6028
 
6029
          /* Otherwise, the only way this can conflict is if we are taking
6030
             the address of a DECL a that address if part of X, which is
6031
             very rare.  */
6032
          exp = TREE_OPERAND (exp, 0);
6033
          if (DECL_P (exp))
6034
            {
6035
              if (!DECL_RTL_SET_P (exp)
6036
                  || !MEM_P (DECL_RTL (exp)))
6037
                return 0;
6038
              else
6039
                exp_rtl = XEXP (DECL_RTL (exp), 0);
6040
            }
6041
          break;
6042
 
6043
        case MISALIGNED_INDIRECT_REF:
6044
        case ALIGN_INDIRECT_REF:
6045
        case INDIRECT_REF:
6046
          if (MEM_P (x)
6047
              && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6048
                                        get_alias_set (exp)))
6049
            return 0;
6050
          break;
6051
 
6052
        case CALL_EXPR:
6053
          /* Assume that the call will clobber all hard registers and
6054
             all of memory.  */
6055
          if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6056
              || MEM_P (x))
6057
            return 0;
6058
          break;
6059
 
6060
        case WITH_CLEANUP_EXPR:
6061
        case CLEANUP_POINT_EXPR:
6062
          /* Lowered by gimplify.c.  */
6063
          gcc_unreachable ();
6064
 
6065
        case SAVE_EXPR:
6066
          return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6067
 
6068
        default:
6069
          break;
6070
        }
6071
 
6072
      /* If we have an rtx, we do not need to scan our operands.  */
6073
      if (exp_rtl)
6074
        break;
6075
 
6076
      nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6077
      for (i = 0; i < nops; i++)
6078
        if (TREE_OPERAND (exp, i) != 0
6079
            && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6080
          return 0;
6081
 
6082
      /* If this is a language-specific tree code, it may require
6083
         special handling.  */
6084
      if ((unsigned int) TREE_CODE (exp)
6085
          >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6086
          && !lang_hooks.safe_from_p (x, exp))
6087
        return 0;
6088
      break;
6089
 
6090
    case tcc_type:
6091
      /* Should never get a type here.  */
6092
      gcc_unreachable ();
6093
    }
6094
 
6095
  /* If we have an rtl, find any enclosed object.  Then see if we conflict
6096
     with it.  */
6097
  if (exp_rtl)
6098
    {
6099
      if (GET_CODE (exp_rtl) == SUBREG)
6100
        {
6101
          exp_rtl = SUBREG_REG (exp_rtl);
6102
          if (REG_P (exp_rtl)
6103
              && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6104
            return 0;
6105
        }
6106
 
6107
      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
6108
         are memory and they conflict.  */
6109
      return ! (rtx_equal_p (x, exp_rtl)
6110
                || (MEM_P (x) && MEM_P (exp_rtl)
6111
                    && true_dependence (exp_rtl, VOIDmode, x,
6112
                                        rtx_addr_varies_p)));
6113
    }
6114
 
6115
  /* If we reach here, it is safe.  */
6116
  return 1;
6117
}
6118
 
6119
 
6120
/* Return the highest power of two that EXP is known to be a multiple of.
6121
   This is used in updating alignment of MEMs in array references.  */
6122
 
6123
unsigned HOST_WIDE_INT
6124
highest_pow2_factor (tree exp)
6125
{
6126
  unsigned HOST_WIDE_INT c0, c1;
6127
 
6128
  switch (TREE_CODE (exp))
6129
    {
6130
    case INTEGER_CST:
6131
      /* We can find the lowest bit that's a one.  If the low
6132
         HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6133
         We need to handle this case since we can find it in a COND_EXPR,
6134
         a MIN_EXPR, or a MAX_EXPR.  If the constant overflows, we have an
6135
         erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6136
         later ICE.  */
6137
      if (TREE_CONSTANT_OVERFLOW (exp))
6138
        return BIGGEST_ALIGNMENT;
6139
      else
6140
        {
6141
          /* Note: tree_low_cst is intentionally not used here,
6142
             we don't care about the upper bits.  */
6143
          c0 = TREE_INT_CST_LOW (exp);
6144
          c0 &= -c0;
6145
          return c0 ? c0 : BIGGEST_ALIGNMENT;
6146
        }
6147
      break;
6148
 
6149
    case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
6150
      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6151
      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6152
      return MIN (c0, c1);
6153
 
6154
    case MULT_EXPR:
6155
      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6156
      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6157
      return c0 * c1;
6158
 
6159
    case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
6160
    case CEIL_DIV_EXPR:
6161
      if (integer_pow2p (TREE_OPERAND (exp, 1))
6162
          && host_integerp (TREE_OPERAND (exp, 1), 1))
6163
        {
6164
          c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6165
          c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6166
          return MAX (1, c0 / c1);
6167
        }
6168
      break;
6169
 
6170
    case NON_LVALUE_EXPR:  case NOP_EXPR:  case CONVERT_EXPR:
6171
    case SAVE_EXPR:
6172
      return highest_pow2_factor (TREE_OPERAND (exp, 0));
6173
 
6174
    case COMPOUND_EXPR:
6175
      return highest_pow2_factor (TREE_OPERAND (exp, 1));
6176
 
6177
    case COND_EXPR:
6178
      c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6179
      c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6180
      return MIN (c0, c1);
6181
 
6182
    default:
6183
      break;
6184
    }
6185
 
6186
  return 1;
6187
}
6188
 
6189
/* Similar, except that the alignment requirements of TARGET are
6190
   taken into account.  Assume it is at least as aligned as its
6191
   type, unless it is a COMPONENT_REF in which case the layout of
6192
   the structure gives the alignment.  */
6193
 
6194
static unsigned HOST_WIDE_INT
6195
highest_pow2_factor_for_target (tree target, tree exp)
6196
{
6197
  unsigned HOST_WIDE_INT target_align, factor;
6198
 
6199
  factor = highest_pow2_factor (exp);
6200
  if (TREE_CODE (target) == COMPONENT_REF)
6201
    target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6202
  else
6203
    target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6204
  return MAX (factor, target_align);
6205
}
6206
 
6207
/* Expands variable VAR.  */
6208
 
6209
void
6210
expand_var (tree var)
6211
{
6212
  if (DECL_EXTERNAL (var))
6213
    return;
6214
 
6215
  if (TREE_STATIC (var))
6216
    /* If this is an inlined copy of a static local variable,
6217
       look up the original decl.  */
6218
    var = DECL_ORIGIN (var);
6219
 
6220
  if (TREE_STATIC (var)
6221
      ? !TREE_ASM_WRITTEN (var)
6222
      : !DECL_RTL_SET_P (var))
6223
    {
6224
      if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6225
        /* Should be ignored.  */;
6226
      else if (lang_hooks.expand_decl (var))
6227
        /* OK.  */;
6228
      else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6229
        expand_decl (var);
6230
      else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6231
        rest_of_decl_compilation (var, 0, 0);
6232
      else
6233
        /* No expansion needed.  */
6234
        gcc_assert (TREE_CODE (var) == TYPE_DECL
6235
                    || TREE_CODE (var) == CONST_DECL
6236
                    || TREE_CODE (var) == FUNCTION_DECL
6237
                    || TREE_CODE (var) == LABEL_DECL);
6238
    }
6239
}
6240
 
6241
/* Subroutine of expand_expr.  Expand the two operands of a binary
6242
   expression EXP0 and EXP1 placing the results in OP0 and OP1.
6243
   The value may be stored in TARGET if TARGET is nonzero.  The
6244
   MODIFIER argument is as documented by expand_expr.  */
6245
 
6246
static void
6247
expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6248
                 enum expand_modifier modifier)
6249
{
6250
  if (! safe_from_p (target, exp1, 1))
6251
    target = 0;
6252
  if (operand_equal_p (exp0, exp1, 0))
6253
    {
6254
      *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6255
      *op1 = copy_rtx (*op0);
6256
    }
6257
  else
6258
    {
6259
      /* If we need to preserve evaluation order, copy exp0 into its own
6260
         temporary variable so that it can't be clobbered by exp1.  */
6261
      if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6262
        exp0 = save_expr (exp0);
6263
      *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6264
      *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6265
    }
6266
}
6267
 
6268
 
6269
/* A subroutine of expand_expr_addr_expr.  Evaluate the address of EXP.
6270
   The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
6271
 
6272
static rtx
6273
expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6274
                         enum expand_modifier modifier)
6275
{
6276
  rtx result, subtarget;
6277
  tree inner, offset;
6278
  HOST_WIDE_INT bitsize, bitpos;
6279
  int volatilep, unsignedp;
6280
  enum machine_mode mode1;
6281
 
6282
  /* If we are taking the address of a constant and are at the top level,
6283
     we have to use output_constant_def since we can't call force_const_mem
6284
     at top level.  */
6285
  /* ??? This should be considered a front-end bug.  We should not be
6286
     generating ADDR_EXPR of something that isn't an LVALUE.  The only
6287
     exception here is STRING_CST.  */
6288
  if (TREE_CODE (exp) == CONSTRUCTOR
6289
      || CONSTANT_CLASS_P (exp))
6290
    return XEXP (output_constant_def (exp, 0), 0);
6291
 
6292
  /* Everything must be something allowed by is_gimple_addressable.  */
6293
  switch (TREE_CODE (exp))
6294
    {
6295
    case INDIRECT_REF:
6296
      /* This case will happen via recursion for &a->b.  */
6297
      return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6298
 
6299
    case CONST_DECL:
6300
      /* Recurse and make the output_constant_def clause above handle this.  */
6301
      return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6302
                                      tmode, modifier);
6303
 
6304
    case REALPART_EXPR:
6305
      /* The real part of the complex number is always first, therefore
6306
         the address is the same as the address of the parent object.  */
6307
      offset = 0;
6308
      bitpos = 0;
6309
      inner = TREE_OPERAND (exp, 0);
6310
      break;
6311
 
6312
    case IMAGPART_EXPR:
6313
      /* The imaginary part of the complex number is always second.
6314
         The expression is therefore always offset by the size of the
6315
         scalar type.  */
6316
      offset = 0;
6317
      bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6318
      inner = TREE_OPERAND (exp, 0);
6319
      break;
6320
 
6321
    default:
6322
      /* If the object is a DECL, then expand it for its rtl.  Don't bypass
6323
         expand_expr, as that can have various side effects; LABEL_DECLs for
6324
         example, may not have their DECL_RTL set yet.  Assume language
6325
         specific tree nodes can be expanded in some interesting way.  */
6326
      if (DECL_P (exp)
6327
          || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6328
        {
6329
          result = expand_expr (exp, target, tmode,
6330
                                modifier == EXPAND_INITIALIZER
6331
                                ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6332
 
6333
          /* If the DECL isn't in memory, then the DECL wasn't properly
6334
             marked TREE_ADDRESSABLE, which will be either a front-end
6335
             or a tree optimizer bug.  */
6336
          gcc_assert (MEM_P (result));
6337
          result = XEXP (result, 0);
6338
 
6339
          /* ??? Is this needed anymore?  */
6340
          if (DECL_P (exp) && !TREE_USED (exp) == 0)
6341
            {
6342
              assemble_external (exp);
6343
              TREE_USED (exp) = 1;
6344
            }
6345
 
6346
          if (modifier != EXPAND_INITIALIZER
6347
              && modifier != EXPAND_CONST_ADDRESS)
6348
            result = force_operand (result, target);
6349
          return result;
6350
        }
6351
 
6352
      /* Pass FALSE as the last argument to get_inner_reference although
6353
         we are expanding to RTL.  The rationale is that we know how to
6354
         handle "aligning nodes" here: we can just bypass them because
6355
         they won't change the final object whose address will be returned
6356
         (they actually exist only for that purpose).  */
6357
      inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6358
                                   &mode1, &unsignedp, &volatilep, false);
6359
      break;
6360
    }
6361
 
6362
  /* We must have made progress.  */
6363
  gcc_assert (inner != exp);
6364
 
6365
  subtarget = offset || bitpos ? NULL_RTX : target;
6366
  result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6367
 
6368
  if (offset)
6369
    {
6370
      rtx tmp;
6371
 
6372
      if (modifier != EXPAND_NORMAL)
6373
        result = force_operand (result, NULL);
6374
      tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6375
 
6376
      result = convert_memory_address (tmode, result);
6377
      tmp = convert_memory_address (tmode, tmp);
6378
 
6379
      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6380
        result = gen_rtx_PLUS (tmode, result, tmp);
6381
      else
6382
        {
6383
          subtarget = bitpos ? NULL_RTX : target;
6384
          result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6385
                                        1, OPTAB_LIB_WIDEN);
6386
        }
6387
    }
6388
 
6389
  if (bitpos)
6390
    {
6391
      /* Someone beforehand should have rejected taking the address
6392
         of such an object.  */
6393
      gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6394
 
6395
      result = plus_constant (result, bitpos / BITS_PER_UNIT);
6396
      if (modifier < EXPAND_SUM)
6397
        result = force_operand (result, target);
6398
    }
6399
 
6400
  return result;
6401
}
6402
 
6403
/* A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
6404
   The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
6405
 
6406
static rtx
6407
expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6408
                       enum expand_modifier modifier)
6409
{
6410
  enum machine_mode rmode;
6411
  rtx result;
6412
 
6413
  /* Target mode of VOIDmode says "whatever's natural".  */
6414
  if (tmode == VOIDmode)
6415
    tmode = TYPE_MODE (TREE_TYPE (exp));
6416
 
6417
  /* We can get called with some Weird Things if the user does silliness
6418
     like "(short) &a".  In that case, convert_memory_address won't do
6419
     the right thing, so ignore the given target mode.  */
6420
  if (tmode != Pmode && tmode != ptr_mode)
6421
    tmode = Pmode;
6422
 
6423
  result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6424
                                    tmode, modifier);
6425
 
6426
  /* Despite expand_expr claims concerning ignoring TMODE when not
6427
     strictly convenient, stuff breaks if we don't honor it.  Note
6428
     that combined with the above, we only do this for pointer modes.  */
6429
  rmode = GET_MODE (result);
6430
  if (rmode == VOIDmode)
6431
    rmode = tmode;
6432
  if (rmode != tmode)
6433
    result = convert_memory_address (tmode, result);
6434
 
6435
  return result;
6436
}
6437
 
6438
 
6439
/* expand_expr: generate code for computing expression EXP.
6440
   An rtx for the computed value is returned.  The value is never null.
6441
   In the case of a void EXP, const0_rtx is returned.
6442
 
6443
   The value may be stored in TARGET if TARGET is nonzero.
6444
   TARGET is just a suggestion; callers must assume that
6445
   the rtx returned may not be the same as TARGET.
6446
 
6447
   If TARGET is CONST0_RTX, it means that the value will be ignored.
6448
 
6449
   If TMODE is not VOIDmode, it suggests generating the
6450
   result in mode TMODE.  But this is done only when convenient.
6451
   Otherwise, TMODE is ignored and the value generated in its natural mode.
6452
   TMODE is just a suggestion; callers must assume that
6453
   the rtx returned may not have mode TMODE.
6454
 
6455
   Note that TARGET may have neither TMODE nor MODE.  In that case, it
6456
   probably will not be used.
6457
 
6458
   If MODIFIER is EXPAND_SUM then when EXP is an addition
6459
   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6460
   or a nest of (PLUS ...) and (MINUS ...) where the terms are
6461
   products as above, or REG or MEM, or constant.
6462
   Ordinarily in such cases we would output mul or add instructions
6463
   and then return a pseudo reg containing the sum.
6464
 
6465
   EXPAND_INITIALIZER is much like EXPAND_SUM except that
6466
   it also marks a label as absolutely required (it can't be dead).
6467
   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6468
   This is used for outputting expressions used in initializers.
6469
 
6470
   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6471
   with a constant address even if that address is not normally legitimate.
6472
   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6473
 
6474
   EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6475
   a call parameter.  Such targets require special care as we haven't yet
6476
   marked TARGET so that it's safe from being trashed by libcalls.  We
6477
   don't want to use TARGET for anything but the final result;
6478
   Intermediate values must go elsewhere.   Additionally, calls to
6479
   emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6480
 
6481
   If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6482
   address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6483
   DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
6484
   COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6485
   recursively.  */
6486
 
6487
static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6488
                               enum expand_modifier, rtx *);
6489
 
6490
rtx
6491
expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6492
                  enum expand_modifier modifier, rtx *alt_rtl)
6493
{
6494
  int rn = -1;
6495
  rtx ret, last = NULL;
6496
 
6497
  /* Handle ERROR_MARK before anybody tries to access its type.  */
6498
  if (TREE_CODE (exp) == ERROR_MARK
6499
      || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6500
    {
6501
      ret = CONST0_RTX (tmode);
6502
      return ret ? ret : const0_rtx;
6503
    }
6504
 
6505
  if (flag_non_call_exceptions)
6506
    {
6507
      rn = lookup_stmt_eh_region (exp);
6508
      /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw.  */
6509
      if (rn >= 0)
6510
        last = get_last_insn ();
6511
    }
6512
 
6513
  /* If this is an expression of some kind and it has an associated line
6514
     number, then emit the line number before expanding the expression.
6515
 
6516
     We need to save and restore the file and line information so that
6517
     errors discovered during expansion are emitted with the right
6518
     information.  It would be better of the diagnostic routines
6519
     used the file/line information embedded in the tree nodes rather
6520
     than globals.  */
6521
  if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6522
    {
6523
      location_t saved_location = input_location;
6524
      input_location = EXPR_LOCATION (exp);
6525
      emit_line_note (input_location);
6526
 
6527
      /* Record where the insns produced belong.  */
6528
      record_block_change (TREE_BLOCK (exp));
6529
 
6530
      ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6531
 
6532
      input_location = saved_location;
6533
    }
6534
  else
6535
    {
6536
      ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6537
    }
6538
 
6539
  /* If using non-call exceptions, mark all insns that may trap.
6540
     expand_call() will mark CALL_INSNs before we get to this code,
6541
     but it doesn't handle libcalls, and these may trap.  */
6542
  if (rn >= 0)
6543
    {
6544
      rtx insn;
6545
      for (insn = next_real_insn (last); insn;
6546
           insn = next_real_insn (insn))
6547
        {
6548
          if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6549
              /* If we want exceptions for non-call insns, any
6550
                 may_trap_p instruction may throw.  */
6551
              && GET_CODE (PATTERN (insn)) != CLOBBER
6552
              && GET_CODE (PATTERN (insn)) != USE
6553
              && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6554
            {
6555
              REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6556
                                                  REG_NOTES (insn));
6557
            }
6558
        }
6559
    }
6560
 
6561
  return ret;
6562
}
6563
 
6564
static rtx
6565
expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6566
                    enum expand_modifier modifier, rtx *alt_rtl)
6567
{
6568
  rtx op0, op1, temp;
6569
  tree type = TREE_TYPE (exp);
6570
  int unsignedp;
6571
  enum machine_mode mode;
6572
  enum tree_code code = TREE_CODE (exp);
6573
  optab this_optab;
6574
  rtx subtarget, original_target;
6575
  int ignore;
6576
  tree context;
6577
  bool reduce_bit_field = false;
6578
#define REDUCE_BIT_FIELD(expr)  (reduce_bit_field && !ignore              \
6579
                                 ? reduce_to_bit_field_precision ((expr), \
6580
                                                                  target, \
6581
                                                                  type)   \
6582
                                 : (expr))
6583
 
6584
  mode = TYPE_MODE (type);
6585
  unsignedp = TYPE_UNSIGNED (type);
6586
  if (lang_hooks.reduce_bit_field_operations
6587
      && TREE_CODE (type) == INTEGER_TYPE
6588
      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6589
    {
6590
      /* An operation in what may be a bit-field type needs the
6591
         result to be reduced to the precision of the bit-field type,
6592
         which is narrower than that of the type's mode.  */
6593
      reduce_bit_field = true;
6594
      if (modifier == EXPAND_STACK_PARM)
6595
        target = 0;
6596
    }
6597
 
6598
  /* Use subtarget as the target for operand 0 of a binary operation.  */
6599
  subtarget = get_subtarget (target);
6600
  original_target = target;
6601
  ignore = (target == const0_rtx
6602
            || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6603
                 || code == CONVERT_EXPR || code == COND_EXPR
6604
                 || code == VIEW_CONVERT_EXPR)
6605
                && TREE_CODE (type) == VOID_TYPE));
6606
 
6607
  /* If we are going to ignore this result, we need only do something
6608
     if there is a side-effect somewhere in the expression.  If there
6609
     is, short-circuit the most common cases here.  Note that we must
6610
     not call expand_expr with anything but const0_rtx in case this
6611
     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
6612
 
6613
  if (ignore)
6614
    {
6615
      if (! TREE_SIDE_EFFECTS (exp))
6616
        return const0_rtx;
6617
 
6618
      /* Ensure we reference a volatile object even if value is ignored, but
6619
         don't do this if all we are doing is taking its address.  */
6620
      if (TREE_THIS_VOLATILE (exp)
6621
          && TREE_CODE (exp) != FUNCTION_DECL
6622
          && mode != VOIDmode && mode != BLKmode
6623
          && modifier != EXPAND_CONST_ADDRESS)
6624
        {
6625
          temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6626
          if (MEM_P (temp))
6627
            temp = copy_to_reg (temp);
6628
          return const0_rtx;
6629
        }
6630
 
6631
      if (TREE_CODE_CLASS (code) == tcc_unary
6632
          || code == COMPONENT_REF || code == INDIRECT_REF)
6633
        return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6634
                            modifier);
6635
 
6636
      else if (TREE_CODE_CLASS (code) == tcc_binary
6637
               || TREE_CODE_CLASS (code) == tcc_comparison
6638
               || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6639
        {
6640
          expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6641
          expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6642
          return const0_rtx;
6643
        }
6644
      else if (code == BIT_FIELD_REF)
6645
        {
6646
          expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6647
          expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6648
          expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6649
          return const0_rtx;
6650
        }
6651
 
6652
      target = 0;
6653
    }
6654
 
6655
 
6656
  switch (code)
6657
    {
6658
    case LABEL_DECL:
6659
      {
6660
        tree function = decl_function_context (exp);
6661
 
6662
        temp = label_rtx (exp);
6663
        temp = gen_rtx_LABEL_REF (Pmode, temp);
6664
 
6665
        if (function != current_function_decl
6666
            && function != 0)
6667
          LABEL_REF_NONLOCAL_P (temp) = 1;
6668
 
6669
        temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6670
        return temp;
6671
      }
6672
 
6673
    case SSA_NAME:
6674
      return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6675
                                 NULL);
6676
 
6677
    case PARM_DECL:
6678
    case VAR_DECL:
6679
      /* If a static var's type was incomplete when the decl was written,
6680
         but the type is complete now, lay out the decl now.  */
6681
      if (DECL_SIZE (exp) == 0
6682
          && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6683
          && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6684
        layout_decl (exp, 0);
6685
 
6686
      /* ... fall through ...  */
6687
 
6688
    case FUNCTION_DECL:
6689
    case RESULT_DECL:
6690
      gcc_assert (DECL_RTL (exp));
6691
 
6692
      /* Ensure variable marked as used even if it doesn't go through
6693
         a parser.  If it hasn't be used yet, write out an external
6694
         definition.  */
6695
      if (! TREE_USED (exp))
6696
        {
6697
          assemble_external (exp);
6698
          TREE_USED (exp) = 1;
6699
        }
6700
 
6701
      /* Show we haven't gotten RTL for this yet.  */
6702
      temp = 0;
6703
 
6704
      /* Variables inherited from containing functions should have
6705
         been lowered by this point.  */
6706
      context = decl_function_context (exp);
6707
      gcc_assert (!context
6708
                  || context == current_function_decl
6709
                  || TREE_STATIC (exp)
6710
                  /* ??? C++ creates functions that are not TREE_STATIC.  */
6711
                  || TREE_CODE (exp) == FUNCTION_DECL);
6712
 
6713
      /* This is the case of an array whose size is to be determined
6714
         from its initializer, while the initializer is still being parsed.
6715
         See expand_decl.  */
6716
 
6717
      if (MEM_P (DECL_RTL (exp))
6718
               && REG_P (XEXP (DECL_RTL (exp), 0)))
6719
        temp = validize_mem (DECL_RTL (exp));
6720
 
6721
      /* If DECL_RTL is memory, we are in the normal case and either
6722
         the address is not valid or it is not a register and -fforce-addr
6723
         is specified, get the address into a register.  */
6724
 
6725
      else if (MEM_P (DECL_RTL (exp))
6726
               && modifier != EXPAND_CONST_ADDRESS
6727
               && modifier != EXPAND_SUM
6728
               && modifier != EXPAND_INITIALIZER
6729
               && (! memory_address_p (DECL_MODE (exp),
6730
                                       XEXP (DECL_RTL (exp), 0))
6731
                   || (flag_force_addr
6732
                       && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6733
        {
6734
          if (alt_rtl)
6735
            *alt_rtl = DECL_RTL (exp);
6736
          temp = replace_equiv_address (DECL_RTL (exp),
6737
                                        copy_rtx (XEXP (DECL_RTL (exp), 0)));
6738
        }
6739
 
6740
      /* If we got something, return it.  But first, set the alignment
6741
         if the address is a register.  */
6742
      if (temp != 0)
6743
        {
6744
          if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6745
            mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6746
 
6747
          return temp;
6748
        }
6749
 
6750
      /* If the mode of DECL_RTL does not match that of the decl, it
6751
         must be a promoted value.  We return a SUBREG of the wanted mode,
6752
         but mark it so that we know that it was already extended.  */
6753
 
6754
      if (REG_P (DECL_RTL (exp))
6755
          && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6756
        {
6757
          enum machine_mode pmode;
6758
 
6759
          /* Get the signedness used for this variable.  Ensure we get the
6760
             same mode we got when the variable was declared.  */
6761
          pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6762
                                (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6763
          gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6764
 
6765
          temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6766
          SUBREG_PROMOTED_VAR_P (temp) = 1;
6767
          SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6768
          return temp;
6769
        }
6770
 
6771
      return DECL_RTL (exp);
6772
 
6773
    case INTEGER_CST:
6774
      temp = immed_double_const (TREE_INT_CST_LOW (exp),
6775
                                 TREE_INT_CST_HIGH (exp), mode);
6776
 
6777
      /* ??? If overflow is set, fold will have done an incomplete job,
6778
         which can result in (plus xx (const_int 0)), which can get
6779
         simplified by validate_replace_rtx during virtual register
6780
         instantiation, which can result in unrecognizable insns.
6781
         Avoid this by forcing all overflows into registers.  */
6782
      if (TREE_CONSTANT_OVERFLOW (exp)
6783
          && modifier != EXPAND_INITIALIZER)
6784
        temp = force_reg (mode, temp);
6785
 
6786
      return temp;
6787
 
6788
    case VECTOR_CST:
6789
      if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6790
          || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6791
        return const_vector_from_tree (exp);
6792
      else
6793
        return expand_expr (build_constructor_from_list
6794
                            (TREE_TYPE (exp),
6795
                             TREE_VECTOR_CST_ELTS (exp)),
6796
                            ignore ? const0_rtx : target, tmode, modifier);
6797
 
6798
    case CONST_DECL:
6799
      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6800
 
6801
    case REAL_CST:
6802
      /* If optimized, generate immediate CONST_DOUBLE
6803
         which will be turned into memory by reload if necessary.
6804
 
6805
         We used to force a register so that loop.c could see it.  But
6806
         this does not allow gen_* patterns to perform optimizations with
6807
         the constants.  It also produces two insns in cases like "x = 1.0;".
6808
         On most machines, floating-point constants are not permitted in
6809
         many insns, so we'd end up copying it to a register in any case.
6810
 
6811
         Now, we do the copying in expand_binop, if appropriate.  */
6812
      return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6813
                                           TYPE_MODE (TREE_TYPE (exp)));
6814
 
6815
    case COMPLEX_CST:
6816
      /* Handle evaluating a complex constant in a CONCAT target.  */
6817
      if (original_target && GET_CODE (original_target) == CONCAT)
6818
        {
6819
          enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6820
          rtx rtarg, itarg;
6821
 
6822
          rtarg = XEXP (original_target, 0);
6823
          itarg = XEXP (original_target, 1);
6824
 
6825
          /* Move the real and imaginary parts separately.  */
6826
          op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6827
          op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6828
 
6829
          if (op0 != rtarg)
6830
            emit_move_insn (rtarg, op0);
6831
          if (op1 != itarg)
6832
            emit_move_insn (itarg, op1);
6833
 
6834
          return original_target;
6835
        }
6836
 
6837
      /* ... fall through ...  */
6838
 
6839
    case STRING_CST:
6840
      temp = output_constant_def (exp, 1);
6841
 
6842
      /* temp contains a constant address.
6843
         On RISC machines where a constant address isn't valid,
6844
         make some insns to get that address into a register.  */
6845
      if (modifier != EXPAND_CONST_ADDRESS
6846
          && modifier != EXPAND_INITIALIZER
6847
          && modifier != EXPAND_SUM
6848
          && (! memory_address_p (mode, XEXP (temp, 0))
6849
              || flag_force_addr))
6850
        return replace_equiv_address (temp,
6851
                                      copy_rtx (XEXP (temp, 0)));
6852
      return temp;
6853
 
6854
    case SAVE_EXPR:
6855
      {
6856
        tree val = TREE_OPERAND (exp, 0);
6857
        rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6858
 
6859
        if (!SAVE_EXPR_RESOLVED_P (exp))
6860
          {
6861
            /* We can indeed still hit this case, typically via builtin
6862
               expanders calling save_expr immediately before expanding
6863
               something.  Assume this means that we only have to deal
6864
               with non-BLKmode values.  */
6865
            gcc_assert (GET_MODE (ret) != BLKmode);
6866
 
6867
            val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6868
            DECL_ARTIFICIAL (val) = 1;
6869
            DECL_IGNORED_P (val) = 1;
6870
            TREE_OPERAND (exp, 0) = val;
6871
            SAVE_EXPR_RESOLVED_P (exp) = 1;
6872
 
6873
            if (!CONSTANT_P (ret))
6874
              ret = copy_to_reg (ret);
6875
            SET_DECL_RTL (val, ret);
6876
          }
6877
 
6878
        return ret;
6879
      }
6880
 
6881
    case GOTO_EXPR:
6882
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6883
        expand_goto (TREE_OPERAND (exp, 0));
6884
      else
6885
        expand_computed_goto (TREE_OPERAND (exp, 0));
6886
      return const0_rtx;
6887
 
6888
    case CONSTRUCTOR:
6889
      /* If we don't need the result, just ensure we evaluate any
6890
         subexpressions.  */
6891
      if (ignore)
6892
        {
6893
          unsigned HOST_WIDE_INT idx;
6894
          tree value;
6895
 
6896
          FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6897
            expand_expr (value, const0_rtx, VOIDmode, 0);
6898
 
6899
          return const0_rtx;
6900
        }
6901
 
6902
      /* Try to avoid creating a temporary at all.  This is possible
6903
         if all of the initializer is zero.
6904
         FIXME: try to handle all [0..255] initializers we can handle
6905
         with memset.  */
6906
      else if (TREE_STATIC (exp)
6907
               && !TREE_ADDRESSABLE (exp)
6908
               && target != 0 && mode == BLKmode
6909
               && all_zeros_p (exp))
6910
        {
6911
          clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6912
          return target;
6913
        }
6914
 
6915
      /* All elts simple constants => refer to a constant in memory.  But
6916
         if this is a non-BLKmode mode, let it store a field at a time
6917
         since that should make a CONST_INT or CONST_DOUBLE when we
6918
         fold.  Likewise, if we have a target we can use, it is best to
6919
         store directly into the target unless the type is large enough
6920
         that memcpy will be used.  If we are making an initializer and
6921
         all operands are constant, put it in memory as well.
6922
 
6923
        FIXME: Avoid trying to fill vector constructors piece-meal.
6924
        Output them with output_constant_def below unless we're sure
6925
        they're zeros.  This should go away when vector initializers
6926
        are treated like VECTOR_CST instead of arrays.
6927
      */
6928
      else if ((TREE_STATIC (exp)
6929
                && ((mode == BLKmode
6930
                     && ! (target != 0 && safe_from_p (target, exp, 1)))
6931
                    || TREE_ADDRESSABLE (exp)
6932
                    || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6933
                        && (! MOVE_BY_PIECES_P
6934
                            (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6935
                             TYPE_ALIGN (type)))
6936
                        && ! mostly_zeros_p (exp))))
6937
               || ((modifier == EXPAND_INITIALIZER
6938
                    || modifier == EXPAND_CONST_ADDRESS)
6939
                   && TREE_CONSTANT (exp)))
6940
        {
6941
          rtx constructor = output_constant_def (exp, 1);
6942
 
6943
          if (modifier != EXPAND_CONST_ADDRESS
6944
              && modifier != EXPAND_INITIALIZER
6945
              && modifier != EXPAND_SUM)
6946
            constructor = validize_mem (constructor);
6947
 
6948
          return constructor;
6949
        }
6950
      else
6951
        {
6952
          /* Handle calls that pass values in multiple non-contiguous
6953
             locations.  The Irix 6 ABI has examples of this.  */
6954
          if (target == 0 || ! safe_from_p (target, exp, 1)
6955
              || GET_CODE (target) == PARALLEL
6956
              || modifier == EXPAND_STACK_PARM)
6957
            target
6958
              = assign_temp (build_qualified_type (type,
6959
                                                   (TYPE_QUALS (type)
6960
                                                    | (TREE_READONLY (exp)
6961
                                                       * TYPE_QUAL_CONST))),
6962
                             0, TREE_ADDRESSABLE (exp), 1);
6963
 
6964
          store_constructor (exp, target, 0, int_expr_size (exp));
6965
          return target;
6966
        }
6967
 
6968
    case MISALIGNED_INDIRECT_REF:
6969
    case ALIGN_INDIRECT_REF:
6970
    case INDIRECT_REF:
6971
      {
6972
        tree exp1 = TREE_OPERAND (exp, 0);
6973
 
6974
        if (modifier != EXPAND_WRITE)
6975
          {
6976
            tree t;
6977
 
6978
            t = fold_read_from_constant_string (exp);
6979
            if (t)
6980
              return expand_expr (t, target, tmode, modifier);
6981
          }
6982
 
6983
        op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6984
        op0 = memory_address (mode, op0);
6985
 
6986
        if (code == ALIGN_INDIRECT_REF)
6987
          {
6988
            int align = TYPE_ALIGN_UNIT (type);
6989
            op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6990
            op0 = memory_address (mode, op0);
6991
          }
6992
 
6993
        temp = gen_rtx_MEM (mode, op0);
6994
 
6995
        set_mem_attributes (temp, exp, 0);
6996
 
6997
        /* Resolve the misalignment now, so that we don't have to remember
6998
           to resolve it later.  Of course, this only works for reads.  */
6999
        /* ??? When we get around to supporting writes, we'll have to handle
7000
           this in store_expr directly.  The vectorizer isn't generating
7001
           those yet, however.  */
7002
        if (code == MISALIGNED_INDIRECT_REF)
7003
          {
7004
            int icode;
7005
            rtx reg, insn;
7006
 
7007
            gcc_assert (modifier == EXPAND_NORMAL
7008
                        || modifier == EXPAND_STACK_PARM);
7009
 
7010
            /* The vectorizer should have already checked the mode.  */
7011
            icode = movmisalign_optab->handlers[mode].insn_code;
7012
            gcc_assert (icode != CODE_FOR_nothing);
7013
 
7014
            /* We've already validated the memory, and we're creating a
7015
               new pseudo destination.  The predicates really can't fail.  */
7016
            reg = gen_reg_rtx (mode);
7017
 
7018
            /* Nor can the insn generator.  */
7019
            insn = GEN_FCN (icode) (reg, temp);
7020
            emit_insn (insn);
7021
 
7022
            return reg;
7023
          }
7024
 
7025
        return temp;
7026
      }
7027
 
7028
    case TARGET_MEM_REF:
7029
      {
7030
        struct mem_address addr;
7031
 
7032
        get_address_description (exp, &addr);
7033
        op0 = addr_for_mem_ref (&addr, true);
7034
        op0 = memory_address (mode, op0);
7035
        temp = gen_rtx_MEM (mode, op0);
7036
        set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7037
      }
7038
      return temp;
7039
 
7040
    case ARRAY_REF:
7041
 
7042
      {
7043
        tree array = TREE_OPERAND (exp, 0);
7044
        tree index = TREE_OPERAND (exp, 1);
7045
 
7046
        /* Fold an expression like: "foo"[2].
7047
           This is not done in fold so it won't happen inside &.
7048
           Don't fold if this is for wide characters since it's too
7049
           difficult to do correctly and this is a very rare case.  */
7050
 
7051
        if (modifier != EXPAND_CONST_ADDRESS
7052
            && modifier != EXPAND_INITIALIZER
7053
            && modifier != EXPAND_MEMORY)
7054
          {
7055
            tree t = fold_read_from_constant_string (exp);
7056
 
7057
            if (t)
7058
              return expand_expr (t, target, tmode, modifier);
7059
          }
7060
 
7061
        /* If this is a constant index into a constant array,
7062
           just get the value from the array.  Handle both the cases when
7063
           we have an explicit constructor and when our operand is a variable
7064
           that was declared const.  */
7065
 
7066
        if (modifier != EXPAND_CONST_ADDRESS
7067
            && modifier != EXPAND_INITIALIZER
7068
            && modifier != EXPAND_MEMORY
7069
            && TREE_CODE (array) == CONSTRUCTOR
7070
            && ! TREE_SIDE_EFFECTS (array)
7071
            && TREE_CODE (index) == INTEGER_CST)
7072
          {
7073
            unsigned HOST_WIDE_INT ix;
7074
            tree field, value;
7075
 
7076
            FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7077
                                      field, value)
7078
              if (tree_int_cst_equal (field, index))
7079
                {
7080
                  if (!TREE_SIDE_EFFECTS (value))
7081
                    return expand_expr (fold (value), target, tmode, modifier);
7082
                  break;
7083
                }
7084
          }
7085
 
7086
        else if (optimize >= 1
7087
                 && modifier != EXPAND_CONST_ADDRESS
7088
                 && modifier != EXPAND_INITIALIZER
7089
                 && modifier != EXPAND_MEMORY
7090
                 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7091
                 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7092
                 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7093
                 && targetm.binds_local_p (array))
7094
          {
7095
            if (TREE_CODE (index) == INTEGER_CST)
7096
              {
7097
                tree init = DECL_INITIAL (array);
7098
 
7099
                if (TREE_CODE (init) == CONSTRUCTOR)
7100
                  {
7101
                    unsigned HOST_WIDE_INT ix;
7102
                    tree field, value;
7103
 
7104
                    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7105
                                              field, value)
7106
                      if (tree_int_cst_equal (field, index))
7107
                        {
7108
                          if (!TREE_SIDE_EFFECTS (value))
7109
                            return expand_expr (fold (value), target, tmode,
7110
                                                modifier);
7111
                          break;
7112
                        }
7113
                  }
7114
                else if(TREE_CODE (init) == STRING_CST)
7115
                  {
7116
                    tree index1 = index;
7117
                    tree low_bound = array_ref_low_bound (exp);
7118
                    index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7119
 
7120
                    /* Optimize the special-case of a zero lower bound.
7121
 
7122
                       We convert the low_bound to sizetype to avoid some problems
7123
                       with constant folding.  (E.g. suppose the lower bound is 1,
7124
                       and its mode is QI.  Without the conversion,l (ARRAY
7125
                       +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7126
                       +INDEX), which becomes (ARRAY+255+INDEX).  Opps!)  */
7127
 
7128
                    if (! integer_zerop (low_bound))
7129
                      index1 = size_diffop (index1, fold_convert (sizetype,
7130
                                                                  low_bound));
7131
 
7132
                    if (0 > compare_tree_int (index1,
7133
                                              TREE_STRING_LENGTH (init)))
7134
                      {
7135
                        tree type = TREE_TYPE (TREE_TYPE (init));
7136
                        enum machine_mode mode = TYPE_MODE (type);
7137
 
7138
                        if (GET_MODE_CLASS (mode) == MODE_INT
7139
                            && GET_MODE_SIZE (mode) == 1)
7140
                          return gen_int_mode (TREE_STRING_POINTER (init)
7141
                                               [TREE_INT_CST_LOW (index1)],
7142
                                               mode);
7143
                      }
7144
                  }
7145
              }
7146
          }
7147
      }
7148
      goto normal_inner_ref;
7149
 
7150
    case COMPONENT_REF:
7151
      /* If the operand is a CONSTRUCTOR, we can just extract the
7152
         appropriate field if it is present.  */
7153
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7154
        {
7155
          unsigned HOST_WIDE_INT idx;
7156
          tree field, value;
7157
 
7158
          FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7159
                                    idx, field, value)
7160
            if (field == TREE_OPERAND (exp, 1)
7161
                /* We can normally use the value of the field in the
7162
                   CONSTRUCTOR.  However, if this is a bitfield in
7163
                   an integral mode that we can fit in a HOST_WIDE_INT,
7164
                   we must mask only the number of bits in the bitfield,
7165
                   since this is done implicitly by the constructor.  If
7166
                   the bitfield does not meet either of those conditions,
7167
                   we can't do this optimization.  */
7168
                && (! DECL_BIT_FIELD (field)
7169
                    || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7170
                        && (GET_MODE_BITSIZE (DECL_MODE (field))
7171
                            <= HOST_BITS_PER_WIDE_INT))))
7172
              {
7173
                if (DECL_BIT_FIELD (field)
7174
                    && modifier == EXPAND_STACK_PARM)
7175
                  target = 0;
7176
                op0 = expand_expr (value, target, tmode, modifier);
7177
                if (DECL_BIT_FIELD (field))
7178
                  {
7179
                    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7180
                    enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7181
 
7182
                    if (TYPE_UNSIGNED (TREE_TYPE (field)))
7183
                      {
7184
                        op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7185
                        op0 = expand_and (imode, op0, op1, target);
7186
                      }
7187
                    else
7188
                      {
7189
                        tree count
7190
                          = build_int_cst (NULL_TREE,
7191
                                           GET_MODE_BITSIZE (imode) - bitsize);
7192
 
7193
                        op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7194
                                            target, 0);
7195
                        op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7196
                                            target, 0);
7197
                      }
7198
                  }
7199
 
7200
                return op0;
7201
              }
7202
        }
7203
      goto normal_inner_ref;
7204
 
7205
    case BIT_FIELD_REF:
7206
    case ARRAY_RANGE_REF:
7207
    normal_inner_ref:
7208
      {
7209
        enum machine_mode mode1;
7210
        HOST_WIDE_INT bitsize, bitpos;
7211
        tree offset;
7212
        int volatilep = 0;
7213
        tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7214
                                        &mode1, &unsignedp, &volatilep, true);
7215
        rtx orig_op0;
7216
 
7217
        /* If we got back the original object, something is wrong.  Perhaps
7218
           we are evaluating an expression too early.  In any event, don't
7219
           infinitely recurse.  */
7220
        gcc_assert (tem != exp);
7221
 
7222
        /* If TEM's type is a union of variable size, pass TARGET to the inner
7223
           computation, since it will need a temporary and TARGET is known
7224
           to have to do.  This occurs in unchecked conversion in Ada.  */
7225
 
7226
        orig_op0 = op0
7227
          = expand_expr (tem,
7228
                         (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7229
                          && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7230
                              != INTEGER_CST)
7231
                          && modifier != EXPAND_STACK_PARM
7232
                          ? target : NULL_RTX),
7233
                         VOIDmode,
7234
                         (modifier == EXPAND_INITIALIZER
7235
                          || modifier == EXPAND_CONST_ADDRESS
7236
                          || modifier == EXPAND_STACK_PARM)
7237
                         ? modifier : EXPAND_NORMAL);
7238
 
7239
        /* If this is a constant, put it into a register if it is a legitimate
7240
           constant, OFFSET is 0, and we won't try to extract outside the
7241
           register (in case we were passed a partially uninitialized object
7242
           or a view_conversion to a larger size).  Force the constant to
7243
           memory otherwise.  */
7244
        if (CONSTANT_P (op0))
7245
          {
7246
            enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7247
            if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7248
                && offset == 0
7249
                && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7250
              op0 = force_reg (mode, op0);
7251
            else
7252
              op0 = validize_mem (force_const_mem (mode, op0));
7253
          }
7254
 
7255
        /* Otherwise, if this object not in memory and we either have an
7256
           offset, a BLKmode result, or a reference outside the object, put it
7257
           there.  Such cases can occur in Ada if we have unchecked conversion
7258
           of an expression from a scalar type to an array or record type or
7259
           for an ARRAY_RANGE_REF whose type is BLKmode.  */
7260
        else if (!MEM_P (op0)
7261
                 && (offset != 0
7262
                     || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7263
                     || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7264
          {
7265
            tree nt = build_qualified_type (TREE_TYPE (tem),
7266
                                            (TYPE_QUALS (TREE_TYPE (tem))
7267
                                             | TYPE_QUAL_CONST));
7268
            rtx memloc = assign_temp (nt, 1, 1, 1);
7269
 
7270
            emit_move_insn (memloc, op0);
7271
            op0 = memloc;
7272
          }
7273
 
7274
        if (offset != 0)
7275
          {
7276
            rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7277
                                          EXPAND_SUM);
7278
 
7279
            gcc_assert (MEM_P (op0));
7280
 
7281
#ifdef POINTERS_EXTEND_UNSIGNED
7282
            if (GET_MODE (offset_rtx) != Pmode)
7283
              offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7284
#else
7285
            if (GET_MODE (offset_rtx) != ptr_mode)
7286
              offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7287
#endif
7288
 
7289
            if (GET_MODE (op0) == BLKmode
7290
                /* A constant address in OP0 can have VOIDmode, we must
7291
                   not try to call force_reg in that case.  */
7292
                && GET_MODE (XEXP (op0, 0)) != VOIDmode
7293
                && bitsize != 0
7294
                && (bitpos % bitsize) == 0
7295
                && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7296
                && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7297
              {
7298
                op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7299
                bitpos = 0;
7300
              }
7301
 
7302
            op0 = offset_address (op0, offset_rtx,
7303
                                  highest_pow2_factor (offset));
7304
          }
7305
 
7306
        /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7307
           record its alignment as BIGGEST_ALIGNMENT.  */
7308
        if (MEM_P (op0) && bitpos == 0 && offset != 0
7309
            && is_aligning_offset (offset, tem))
7310
          set_mem_align (op0, BIGGEST_ALIGNMENT);
7311
 
7312
        /* Don't forget about volatility even if this is a bitfield.  */
7313
        if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7314
          {
7315
            if (op0 == orig_op0)
7316
              op0 = copy_rtx (op0);
7317
 
7318
            MEM_VOLATILE_P (op0) = 1;
7319
          }
7320
 
7321
        /* The following code doesn't handle CONCAT.
7322
           Assume only bitpos == 0 can be used for CONCAT, due to
7323
           one element arrays having the same mode as its element.  */
7324
        if (GET_CODE (op0) == CONCAT)
7325
          {
7326
            gcc_assert (bitpos == 0
7327
                        && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7328
            return op0;
7329
          }
7330
 
7331
        /* In cases where an aligned union has an unaligned object
7332
           as a field, we might be extracting a BLKmode value from
7333
           an integer-mode (e.g., SImode) object.  Handle this case
7334
           by doing the extract into an object as wide as the field
7335
           (which we know to be the width of a basic mode), then
7336
           storing into memory, and changing the mode to BLKmode.  */
7337
        if (mode1 == VOIDmode
7338
            || REG_P (op0) || GET_CODE (op0) == SUBREG
7339
            || (mode1 != BLKmode && ! direct_load[(int) mode1]
7340
                && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7341
                && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7342
                && modifier != EXPAND_CONST_ADDRESS
7343
                && modifier != EXPAND_INITIALIZER)
7344
            /* If the field isn't aligned enough to fetch as a memref,
7345
               fetch it as a bit field.  */
7346
            || (mode1 != BLKmode
7347
                && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7348
                      || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7349
                      || (MEM_P (op0)
7350
                          && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7351
                              || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7352
                     && ((modifier == EXPAND_CONST_ADDRESS
7353
                          || modifier == EXPAND_INITIALIZER)
7354
                         ? STRICT_ALIGNMENT
7355
                         : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7356
                    || (bitpos % BITS_PER_UNIT != 0)))
7357
            /* If the type and the field are a constant size and the
7358
               size of the type isn't the same size as the bitfield,
7359
               we must use bitfield operations.  */
7360
            || (bitsize >= 0
7361
                && TYPE_SIZE (TREE_TYPE (exp))
7362
                && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7363
                && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7364
                                          bitsize)))
7365
          {
7366
            enum machine_mode ext_mode = mode;
7367
 
7368
            if (ext_mode == BLKmode
7369
                && ! (target != 0 && MEM_P (op0)
7370
                      && MEM_P (target)
7371
                      && bitpos % BITS_PER_UNIT == 0))
7372
              ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7373
 
7374
            if (ext_mode == BLKmode)
7375
              {
7376
                if (target == 0)
7377
                  target = assign_temp (type, 0, 1, 1);
7378
 
7379
                if (bitsize == 0)
7380
                  return target;
7381
 
7382
                /* In this case, BITPOS must start at a byte boundary and
7383
                   TARGET, if specified, must be a MEM.  */
7384
                gcc_assert (MEM_P (op0)
7385
                            && (!target || MEM_P (target))
7386
                            && !(bitpos % BITS_PER_UNIT));
7387
 
7388
                emit_block_move (target,
7389
                                 adjust_address (op0, VOIDmode,
7390
                                                 bitpos / BITS_PER_UNIT),
7391
                                 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7392
                                          / BITS_PER_UNIT),
7393
                                 (modifier == EXPAND_STACK_PARM
7394
                                  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7395
 
7396
                return target;
7397
              }
7398
 
7399
            op0 = validize_mem (op0);
7400
 
7401
            if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7402
              mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7403
 
7404
            op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7405
                                     (modifier == EXPAND_STACK_PARM
7406
                                      ? NULL_RTX : target),
7407
                                     ext_mode, ext_mode);
7408
 
7409
            /* If the result is a record type and BITSIZE is narrower than
7410
               the mode of OP0, an integral mode, and this is a big endian
7411
               machine, we must put the field into the high-order bits.  */
7412
            if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7413
                && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7414
                && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7415
              op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7416
                                  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7417
                                            - bitsize),
7418
                                  op0, 1);
7419
 
7420
            /* If the result type is BLKmode, store the data into a temporary
7421
               of the appropriate type, but with the mode corresponding to the
7422
               mode for the data we have (op0's mode).  It's tempting to make
7423
               this a constant type, since we know it's only being stored once,
7424
               but that can cause problems if we are taking the address of this
7425
               COMPONENT_REF because the MEM of any reference via that address
7426
               will have flags corresponding to the type, which will not
7427
               necessarily be constant.  */
7428
            if (mode == BLKmode)
7429
              {
7430
                rtx new
7431
                  = assign_stack_temp_for_type
7432
                    (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7433
 
7434
                emit_move_insn (new, op0);
7435
                op0 = copy_rtx (new);
7436
                PUT_MODE (op0, BLKmode);
7437
                set_mem_attributes (op0, exp, 1);
7438
              }
7439
 
7440
            return op0;
7441
          }
7442
 
7443
        /* If the result is BLKmode, use that to access the object
7444
           now as well.  */
7445
        if (mode == BLKmode)
7446
          mode1 = BLKmode;
7447
 
7448
        /* Get a reference to just this component.  */
7449
        if (modifier == EXPAND_CONST_ADDRESS
7450
            || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7451
          op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7452
        else
7453
          op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7454
 
7455
        if (op0 == orig_op0)
7456
          op0 = copy_rtx (op0);
7457
 
7458
        set_mem_attributes (op0, exp, 0);
7459
        if (REG_P (XEXP (op0, 0)))
7460
          mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7461
 
7462
        MEM_VOLATILE_P (op0) |= volatilep;
7463
        if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7464
            || modifier == EXPAND_CONST_ADDRESS
7465
            || modifier == EXPAND_INITIALIZER)
7466
          return op0;
7467
        else if (target == 0)
7468
          target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7469
 
7470
        convert_move (target, op0, unsignedp);
7471
        return target;
7472
      }
7473
 
7474
    case OBJ_TYPE_REF:
7475
      return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7476
 
7477
    case CALL_EXPR:
7478
      /* Check for a built-in function.  */
7479
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7480
          && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7481
              == FUNCTION_DECL)
7482
          && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7483
        {
7484
          if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7485
              == BUILT_IN_FRONTEND)
7486
            return lang_hooks.expand_expr (exp, original_target,
7487
                                           tmode, modifier,
7488
                                           alt_rtl);
7489
          else
7490
            return expand_builtin (exp, target, subtarget, tmode, ignore);
7491
        }
7492
 
7493
      return expand_call (exp, target, ignore);
7494
 
7495
    case NON_LVALUE_EXPR:
7496
    case NOP_EXPR:
7497
    case CONVERT_EXPR:
7498
      if (TREE_OPERAND (exp, 0) == error_mark_node)
7499
        return const0_rtx;
7500
 
7501
      if (TREE_CODE (type) == UNION_TYPE)
7502
        {
7503
          tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7504
 
7505
          /* If both input and output are BLKmode, this conversion isn't doing
7506
             anything except possibly changing memory attribute.  */
7507
          if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7508
            {
7509
              rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7510
                                        modifier);
7511
 
7512
              result = copy_rtx (result);
7513
              set_mem_attributes (result, exp, 0);
7514
              return result;
7515
            }
7516
 
7517
          if (target == 0)
7518
            {
7519
              if (TYPE_MODE (type) != BLKmode)
7520
                target = gen_reg_rtx (TYPE_MODE (type));
7521
              else
7522
                target = assign_temp (type, 0, 1, 1);
7523
            }
7524
 
7525
          if (MEM_P (target))
7526
            /* Store data into beginning of memory target.  */
7527
            store_expr (TREE_OPERAND (exp, 0),
7528
                        adjust_address (target, TYPE_MODE (valtype), 0),
7529
                        modifier == EXPAND_STACK_PARM);
7530
 
7531
          else
7532
            {
7533
              gcc_assert (REG_P (target));
7534
 
7535
              /* Store this field into a union of the proper type.  */
7536
              store_field (target,
7537
                           MIN ((int_size_in_bytes (TREE_TYPE
7538
                                                    (TREE_OPERAND (exp, 0)))
7539
                                 * BITS_PER_UNIT),
7540
                                (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7541
                           0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7542
                           type, 0);
7543
            }
7544
 
7545
          /* Return the entire union.  */
7546
          return target;
7547
        }
7548
 
7549
      if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7550
        {
7551
          op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7552
                             modifier);
7553
 
7554
          /* If the signedness of the conversion differs and OP0 is
7555
             a promoted SUBREG, clear that indication since we now
7556
             have to do the proper extension.  */
7557
          if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7558
              && GET_CODE (op0) == SUBREG)
7559
            SUBREG_PROMOTED_VAR_P (op0) = 0;
7560
 
7561
          return REDUCE_BIT_FIELD (op0);
7562
        }
7563
 
7564
      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7565
      if (GET_MODE (op0) == mode)
7566
        ;
7567
 
7568
      /* If OP0 is a constant, just convert it into the proper mode.  */
7569
      else if (CONSTANT_P (op0))
7570
        {
7571
          tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7572
          enum machine_mode inner_mode = TYPE_MODE (inner_type);
7573
 
7574
          if (modifier == EXPAND_INITIALIZER)
7575
            op0 = simplify_gen_subreg (mode, op0, inner_mode,
7576
                                       subreg_lowpart_offset (mode,
7577
                                                              inner_mode));
7578
          else
7579
            op0=  convert_modes (mode, inner_mode, op0,
7580
                                 TYPE_UNSIGNED (inner_type));
7581
        }
7582
 
7583
      else if (modifier == EXPAND_INITIALIZER)
7584
        op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7585
 
7586
      else if (target == 0)
7587
        op0 = convert_to_mode (mode, op0,
7588
                               TYPE_UNSIGNED (TREE_TYPE
7589
                                              (TREE_OPERAND (exp, 0))));
7590
      else
7591
        {
7592
          convert_move (target, op0,
7593
                        TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7594
          op0 = target;
7595
        }
7596
 
7597
      return REDUCE_BIT_FIELD (op0);
7598
 
7599
    case VIEW_CONVERT_EXPR:
7600
      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7601
 
7602
      /* If the input and output modes are both the same, we are done.  */
7603
      if (TYPE_MODE (type) == GET_MODE (op0))
7604
        ;
7605
      /* If neither mode is BLKmode, and both modes are the same size
7606
         then we can use gen_lowpart.  */
7607
      else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7608
               && GET_MODE_SIZE (TYPE_MODE (type))
7609
                   == GET_MODE_SIZE (GET_MODE (op0)))
7610
        {
7611
          if (GET_CODE (op0) == SUBREG)
7612
            op0 = force_reg (GET_MODE (op0), op0);
7613
          op0 = gen_lowpart (TYPE_MODE (type), op0);
7614
        }
7615
      /* If both modes are integral, then we can convert from one to the
7616
         other.  */
7617
      else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7618
               && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7619
        op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7620
                             TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7621
      /* As a last resort, spill op0 to memory, and reload it in a
7622
         different mode.  */
7623
      else if (!MEM_P (op0))
7624
        {
7625
          /* If the operand is not a MEM, force it into memory.  Since we
7626
             are going to be be changing the mode of the MEM, don't call
7627
             force_const_mem for constants because we don't allow pool
7628
             constants to change mode.  */
7629
          tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7630
 
7631
          gcc_assert (!TREE_ADDRESSABLE (exp));
7632
 
7633
          if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7634
            target
7635
              = assign_stack_temp_for_type
7636
                (TYPE_MODE (inner_type),
7637
                 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7638
 
7639
          emit_move_insn (target, op0);
7640
          op0 = target;
7641
        }
7642
 
7643
      /* At this point, OP0 is in the correct mode.  If the output type is such
7644
         that the operand is known to be aligned, indicate that it is.
7645
         Otherwise, we need only be concerned about alignment for non-BLKmode
7646
         results.  */
7647
      if (MEM_P (op0))
7648
        {
7649
          op0 = copy_rtx (op0);
7650
 
7651
          if (TYPE_ALIGN_OK (type))
7652
            set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7653
          else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7654
                   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7655
            {
7656
              tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7657
              HOST_WIDE_INT temp_size
7658
                = MAX (int_size_in_bytes (inner_type),
7659
                       (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7660
              rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7661
                                                    temp_size, 0, type);
7662
              rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7663
 
7664
              gcc_assert (!TREE_ADDRESSABLE (exp));
7665
 
7666
              if (GET_MODE (op0) == BLKmode)
7667
                emit_block_move (new_with_op0_mode, op0,
7668
                                 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7669
                                 (modifier == EXPAND_STACK_PARM
7670
                                  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7671
              else
7672
                emit_move_insn (new_with_op0_mode, op0);
7673
 
7674
              op0 = new;
7675
            }
7676
 
7677
          op0 = adjust_address (op0, TYPE_MODE (type), 0);
7678
        }
7679
 
7680
      return op0;
7681
 
7682
    case PLUS_EXPR:
7683
      /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7684
         something else, make sure we add the register to the constant and
7685
         then to the other thing.  This case can occur during strength
7686
         reduction and doing it this way will produce better code if the
7687
         frame pointer or argument pointer is eliminated.
7688
 
7689
         fold-const.c will ensure that the constant is always in the inner
7690
         PLUS_EXPR, so the only case we need to do anything about is if
7691
         sp, ap, or fp is our second argument, in which case we must swap
7692
         the innermost first argument and our second argument.  */
7693
 
7694
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7695
          && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7696
          && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7697
          && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7698
              || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7699
              || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7700
        {
7701
          tree t = TREE_OPERAND (exp, 1);
7702
 
7703
          TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7704
          TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7705
        }
7706
 
7707
      /* If the result is to be ptr_mode and we are adding an integer to
7708
         something, we might be forming a constant.  So try to use
7709
         plus_constant.  If it produces a sum and we can't accept it,
7710
         use force_operand.  This allows P = &ARR[const] to generate
7711
         efficient code on machines where a SYMBOL_REF is not a valid
7712
         address.
7713
 
7714
         If this is an EXPAND_SUM call, always return the sum.  */
7715
      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7716
          || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7717
        {
7718
          if (modifier == EXPAND_STACK_PARM)
7719
            target = 0;
7720
          if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7721
              && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7722
              && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7723
            {
7724
              rtx constant_part;
7725
 
7726
              op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7727
                                 EXPAND_SUM);
7728
              /* Use immed_double_const to ensure that the constant is
7729
                 truncated according to the mode of OP1, then sign extended
7730
                 to a HOST_WIDE_INT.  Using the constant directly can result
7731
                 in non-canonical RTL in a 64x32 cross compile.  */
7732
              constant_part
7733
                = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7734
                                      (HOST_WIDE_INT) 0,
7735
                                      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7736
              op1 = plus_constant (op1, INTVAL (constant_part));
7737
              if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7738
                op1 = force_operand (op1, target);
7739
              return REDUCE_BIT_FIELD (op1);
7740
            }
7741
 
7742
          else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7743
                   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7744
                   && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7745
            {
7746
              rtx constant_part;
7747
 
7748
              op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7749
                                 (modifier == EXPAND_INITIALIZER
7750
                                 ? EXPAND_INITIALIZER : EXPAND_SUM));
7751
              if (! CONSTANT_P (op0))
7752
                {
7753
                  op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7754
                                     VOIDmode, modifier);
7755
                  /* Return a PLUS if modifier says it's OK.  */
7756
                  if (modifier == EXPAND_SUM
7757
                      || modifier == EXPAND_INITIALIZER)
7758
                    return simplify_gen_binary (PLUS, mode, op0, op1);
7759
                  goto binop2;
7760
                }
7761
              /* Use immed_double_const to ensure that the constant is
7762
                 truncated according to the mode of OP1, then sign extended
7763
                 to a HOST_WIDE_INT.  Using the constant directly can result
7764
                 in non-canonical RTL in a 64x32 cross compile.  */
7765
              constant_part
7766
                = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7767
                                      (HOST_WIDE_INT) 0,
7768
                                      TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7769
              op0 = plus_constant (op0, INTVAL (constant_part));
7770
              if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7771
                op0 = force_operand (op0, target);
7772
              return REDUCE_BIT_FIELD (op0);
7773
            }
7774
        }
7775
 
7776
      /* No sense saving up arithmetic to be done
7777
         if it's all in the wrong mode to form part of an address.
7778
         And force_operand won't know whether to sign-extend or
7779
         zero-extend.  */
7780
      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7781
          || mode != ptr_mode)
7782
        {
7783
          expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7784
                           subtarget, &op0, &op1, 0);
7785
          if (op0 == const0_rtx)
7786
            return op1;
7787
          if (op1 == const0_rtx)
7788
            return op0;
7789
          goto binop2;
7790
        }
7791
 
7792
      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7793
                       subtarget, &op0, &op1, modifier);
7794
      return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7795
 
7796
    case MINUS_EXPR:
7797
      /* For initializers, we are allowed to return a MINUS of two
7798
         symbolic constants.  Here we handle all cases when both operands
7799
         are constant.  */
7800
      /* Handle difference of two symbolic constants,
7801
         for the sake of an initializer.  */
7802
      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7803
          && really_constant_p (TREE_OPERAND (exp, 0))
7804
          && really_constant_p (TREE_OPERAND (exp, 1)))
7805
        {
7806
          expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7807
                           NULL_RTX, &op0, &op1, modifier);
7808
 
7809
          /* If the last operand is a CONST_INT, use plus_constant of
7810
             the negated constant.  Else make the MINUS.  */
7811
          if (GET_CODE (op1) == CONST_INT)
7812
            return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7813
          else
7814
            return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7815
        }
7816
 
7817
      /* No sense saving up arithmetic to be done
7818
         if it's all in the wrong mode to form part of an address.
7819
         And force_operand won't know whether to sign-extend or
7820
         zero-extend.  */
7821
      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7822
          || mode != ptr_mode)
7823
        goto binop;
7824
 
7825
      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7826
                       subtarget, &op0, &op1, modifier);
7827
 
7828
      /* Convert A - const to A + (-const).  */
7829
      if (GET_CODE (op1) == CONST_INT)
7830
        {
7831
          op1 = negate_rtx (mode, op1);
7832
          return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7833
        }
7834
 
7835
      goto binop2;
7836
 
7837
    case MULT_EXPR:
7838
      /* If first operand is constant, swap them.
7839
         Thus the following special case checks need only
7840
         check the second operand.  */
7841
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7842
        {
7843
          tree t1 = TREE_OPERAND (exp, 0);
7844
          TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7845
          TREE_OPERAND (exp, 1) = t1;
7846
        }
7847
 
7848
      /* Attempt to return something suitable for generating an
7849
         indexed address, for machines that support that.  */
7850
 
7851
      if (modifier == EXPAND_SUM && mode == ptr_mode
7852
          && host_integerp (TREE_OPERAND (exp, 1), 0))
7853
        {
7854
          tree exp1 = TREE_OPERAND (exp, 1);
7855
 
7856
          op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7857
                             EXPAND_SUM);
7858
 
7859
          if (!REG_P (op0))
7860
            op0 = force_operand (op0, NULL_RTX);
7861
          if (!REG_P (op0))
7862
            op0 = copy_to_mode_reg (mode, op0);
7863
 
7864
          return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7865
                               gen_int_mode (tree_low_cst (exp1, 0),
7866
                                             TYPE_MODE (TREE_TYPE (exp1)))));
7867
        }
7868
 
7869
      if (modifier == EXPAND_STACK_PARM)
7870
        target = 0;
7871
 
7872
      /* Check for multiplying things that have been extended
7873
         from a narrower type.  If this machine supports multiplying
7874
         in that narrower type with a result in the desired type,
7875
         do it that way, and avoid the explicit type-conversion.  */
7876
      if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7877
          && TREE_CODE (type) == INTEGER_TYPE
7878
          && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7879
              < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7880
          && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7881
               && int_fits_type_p (TREE_OPERAND (exp, 1),
7882
                                   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7883
               /* Don't use a widening multiply if a shift will do.  */
7884
               && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7885
                    > HOST_BITS_PER_WIDE_INT)
7886
                   || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7887
              ||
7888
              (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7889
               && (TYPE_PRECISION (TREE_TYPE
7890
                                   (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7891
                   == TYPE_PRECISION (TREE_TYPE
7892
                                      (TREE_OPERAND
7893
                                       (TREE_OPERAND (exp, 0), 0))))
7894
               /* If both operands are extended, they must either both
7895
                  be zero-extended or both be sign-extended.  */
7896
               && (TYPE_UNSIGNED (TREE_TYPE
7897
                                  (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7898
                   == TYPE_UNSIGNED (TREE_TYPE
7899
                                     (TREE_OPERAND
7900
                                      (TREE_OPERAND (exp, 0), 0)))))))
7901
        {
7902
          tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7903
          enum machine_mode innermode = TYPE_MODE (op0type);
7904
          bool zextend_p = TYPE_UNSIGNED (op0type);
7905
          optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7906
          this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7907
 
7908
          if (mode == GET_MODE_2XWIDER_MODE (innermode))
7909
            {
7910
              if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7911
                {
7912
                  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7913
                    expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7914
                                     TREE_OPERAND (exp, 1),
7915
                                     NULL_RTX, &op0, &op1, 0);
7916
                  else
7917
                    expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7918
                                     TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7919
                                     NULL_RTX, &op0, &op1, 0);
7920
                  goto binop3;
7921
                }
7922
              else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7923
                       && innermode == word_mode)
7924
                {
7925
                  rtx htem, hipart;
7926
                  op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7927
                                     NULL_RTX, VOIDmode, 0);
7928
                  if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7929
                    op1 = convert_modes (innermode, mode,
7930
                                         expand_expr (TREE_OPERAND (exp, 1),
7931
                                                      NULL_RTX, VOIDmode, 0),
7932
                                         unsignedp);
7933
                  else
7934
                    op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7935
                                       NULL_RTX, VOIDmode, 0);
7936
                  temp = expand_binop (mode, other_optab, op0, op1, target,
7937
                                       unsignedp, OPTAB_LIB_WIDEN);
7938
                  hipart = gen_highpart (innermode, temp);
7939
                  htem = expand_mult_highpart_adjust (innermode, hipart,
7940
                                                      op0, op1, hipart,
7941
                                                      zextend_p);
7942
                  if (htem != hipart)
7943
                    emit_move_insn (hipart, htem);
7944
                  return REDUCE_BIT_FIELD (temp);
7945
                }
7946
            }
7947
        }
7948
      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7949
                       subtarget, &op0, &op1, 0);
7950
      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7951
 
7952
    case TRUNC_DIV_EXPR:
7953
    case FLOOR_DIV_EXPR:
7954
    case CEIL_DIV_EXPR:
7955
    case ROUND_DIV_EXPR:
7956
    case EXACT_DIV_EXPR:
7957
      if (modifier == EXPAND_STACK_PARM)
7958
        target = 0;
7959
      /* Possible optimization: compute the dividend with EXPAND_SUM
7960
         then if the divisor is constant can optimize the case
7961
         where some terms of the dividend have coeffs divisible by it.  */
7962
      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7963
                       subtarget, &op0, &op1, 0);
7964
      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7965
 
7966
    case RDIV_EXPR:
7967
      goto binop;
7968
 
7969
    case TRUNC_MOD_EXPR:
7970
    case FLOOR_MOD_EXPR:
7971
    case CEIL_MOD_EXPR:
7972
    case ROUND_MOD_EXPR:
7973
      if (modifier == EXPAND_STACK_PARM)
7974
        target = 0;
7975
      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7976
                       subtarget, &op0, &op1, 0);
7977
      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7978
 
7979
    case FIX_ROUND_EXPR:
7980
    case FIX_FLOOR_EXPR:
7981
    case FIX_CEIL_EXPR:
7982
      gcc_unreachable ();                       /* Not used for C.  */
7983
 
7984
    case FIX_TRUNC_EXPR:
7985
      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7986
      if (target == 0 || modifier == EXPAND_STACK_PARM)
7987
        target = gen_reg_rtx (mode);
7988
      expand_fix (target, op0, unsignedp);
7989
      return target;
7990
 
7991
    case FLOAT_EXPR:
7992
      op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7993
      if (target == 0 || modifier == EXPAND_STACK_PARM)
7994
        target = gen_reg_rtx (mode);
7995
      /* expand_float can't figure out what to do if FROM has VOIDmode.
7996
         So give it the correct mode.  With -O, cse will optimize this.  */
7997
      if (GET_MODE (op0) == VOIDmode)
7998
        op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7999
                                op0);
8000
      expand_float (target, op0,
8001
                    TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8002
      return target;
8003
 
8004
    case NEGATE_EXPR:
8005
      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8006
      if (modifier == EXPAND_STACK_PARM)
8007
        target = 0;
8008
      temp = expand_unop (mode,
8009
                          optab_for_tree_code (NEGATE_EXPR, type),
8010
                          op0, target, 0);
8011
      gcc_assert (temp);
8012
      return REDUCE_BIT_FIELD (temp);
8013
 
8014
    case ABS_EXPR:
8015
      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8016
      if (modifier == EXPAND_STACK_PARM)
8017
        target = 0;
8018
 
8019
      /* ABS_EXPR is not valid for complex arguments.  */
8020
      gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8021
                  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8022
 
8023
      /* Unsigned abs is simply the operand.  Testing here means we don't
8024
         risk generating incorrect code below.  */
8025
      if (TYPE_UNSIGNED (type))
8026
        return op0;
8027
 
8028
      return expand_abs (mode, op0, target, unsignedp,
8029
                         safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8030
 
8031
    case MAX_EXPR:
8032
    case MIN_EXPR:
8033
      target = original_target;
8034
      if (target == 0
8035
          || modifier == EXPAND_STACK_PARM
8036
          || (MEM_P (target) && MEM_VOLATILE_P (target))
8037
          || GET_MODE (target) != mode
8038
          || (REG_P (target)
8039
              && REGNO (target) < FIRST_PSEUDO_REGISTER))
8040
        target = gen_reg_rtx (mode);
8041
      expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8042
                       target, &op0, &op1, 0);
8043
 
8044
      /* First try to do it with a special MIN or MAX instruction.
8045
         If that does not win, use a conditional jump to select the proper
8046
         value.  */
8047
      this_optab = optab_for_tree_code (code, type);
8048
      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8049
                           OPTAB_WIDEN);
8050
      if (temp != 0)
8051
        return temp;
8052
 
8053
      /* At this point, a MEM target is no longer useful; we will get better
8054
         code without it.  */
8055
 
8056
      if (! REG_P (target))
8057
        target = gen_reg_rtx (mode);
8058
 
8059
      /* If op1 was placed in target, swap op0 and op1.  */
8060
      if (target != op0 && target == op1)
8061
        {
8062
          temp = op0;
8063
          op0 = op1;
8064
          op1 = temp;
8065
        }
8066
 
8067
      /* We generate better code and avoid problems with op1 mentioning
8068
         target by forcing op1 into a pseudo if it isn't a constant.  */
8069
      if (! CONSTANT_P (op1))
8070
        op1 = force_reg (mode, op1);
8071
 
8072
      {
8073
        enum rtx_code comparison_code;
8074
        rtx cmpop1 = op1;
8075
 
8076
        if (code == MAX_EXPR)
8077
          comparison_code = unsignedp ? GEU : GE;
8078
        else
8079
          comparison_code = unsignedp ? LEU : LE;
8080
 
8081
        /* Canonicalize to comparsions against 0.  */
8082
        if (op1 == const1_rtx)
8083
          {
8084
            /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8085
               or (a != 0 ? a : 1) for unsigned.
8086
               For MIN we are safe converting (a <= 1 ? a : 1)
8087
               into (a <= 0 ? a : 1)  */
8088
            cmpop1 = const0_rtx;
8089
            if (code == MAX_EXPR)
8090
              comparison_code = unsignedp ? NE : GT;
8091
          }
8092
        if (op1 == constm1_rtx && !unsignedp)
8093
          {
8094
            /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8095
               and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8096
            cmpop1 = const0_rtx;
8097
            if (code == MIN_EXPR)
8098
              comparison_code = LT;
8099
          }
8100
#ifdef HAVE_conditional_move
8101
        /* Use a conditional move if possible.  */
8102
        if (can_conditionally_move_p (mode))
8103
          {
8104
            rtx insn;
8105
 
8106
            /* ??? Same problem as in expmed.c: emit_conditional_move
8107
               forces a stack adjustment via compare_from_rtx, and we
8108
               lose the stack adjustment if the sequence we are about
8109
               to create is discarded.  */
8110
            do_pending_stack_adjust ();
8111
 
8112
            start_sequence ();
8113
 
8114
            /* Try to emit the conditional move.  */
8115
            insn = emit_conditional_move (target, comparison_code,
8116
                                          op0, cmpop1, mode,
8117
                                          op0, op1, mode,
8118
                                          unsignedp);
8119
 
8120
            /* If we could do the conditional move, emit the sequence,
8121
               and return.  */
8122
            if (insn)
8123
              {
8124
                rtx seq = get_insns ();
8125
                end_sequence ();
8126
                emit_insn (seq);
8127
                return target;
8128
              }
8129
 
8130
            /* Otherwise discard the sequence and fall back to code with
8131
               branches.  */
8132
            end_sequence ();
8133
          }
8134
#endif
8135
        if (target != op0)
8136
          emit_move_insn (target, op0);
8137
 
8138
        temp = gen_label_rtx ();
8139
 
8140
        /* If this mode is an integer too wide to compare properly,
8141
           compare word by word.  Rely on cse to optimize constant cases.  */
8142
        if (GET_MODE_CLASS (mode) == MODE_INT
8143
            && ! can_compare_p (GE, mode, ccp_jump))
8144
          {
8145
            if (code == MAX_EXPR)
8146
              do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8147
                                            NULL_RTX, temp);
8148
            else
8149
              do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8150
                                            NULL_RTX, temp);
8151
          }
8152
        else
8153
          {
8154
            do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8155
                                     unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8156
          }
8157
      }
8158
      emit_move_insn (target, op1);
8159
      emit_label (temp);
8160
      return target;
8161
 
8162
    case BIT_NOT_EXPR:
8163
      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8164
      if (modifier == EXPAND_STACK_PARM)
8165
        target = 0;
8166
      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8167
      gcc_assert (temp);
8168
      return temp;
8169
 
8170
      /* ??? Can optimize bitwise operations with one arg constant.
8171
         Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8172
         and (a bitwise1 b) bitwise2 b (etc)
8173
         but that is probably not worth while.  */
8174
 
8175
      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
8176
         boolean values when we want in all cases to compute both of them.  In
8177
         general it is fastest to do TRUTH_AND_EXPR by computing both operands
8178
         as actual zero-or-1 values and then bitwise anding.  In cases where
8179
         there cannot be any side effects, better code would be made by
8180
         treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8181
         how to recognize those cases.  */
8182
 
8183
    case TRUTH_AND_EXPR:
8184
      code = BIT_AND_EXPR;
8185
    case BIT_AND_EXPR:
8186
      goto binop;
8187
 
8188
    case TRUTH_OR_EXPR:
8189
      code = BIT_IOR_EXPR;
8190
    case BIT_IOR_EXPR:
8191
      goto binop;
8192
 
8193
    case TRUTH_XOR_EXPR:
8194
      code = BIT_XOR_EXPR;
8195
    case BIT_XOR_EXPR:
8196
      goto binop;
8197
 
8198
    case LSHIFT_EXPR:
8199
    case RSHIFT_EXPR:
8200
    case LROTATE_EXPR:
8201
    case RROTATE_EXPR:
8202
      if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8203
        subtarget = 0;
8204
      if (modifier == EXPAND_STACK_PARM)
8205
        target = 0;
8206
      op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8207
      return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8208
                           unsignedp);
8209
 
8210
      /* Could determine the answer when only additive constants differ.  Also,
8211
         the addition of one can be handled by changing the condition.  */
8212
    case LT_EXPR:
8213
    case LE_EXPR:
8214
    case GT_EXPR:
8215
    case GE_EXPR:
8216
    case EQ_EXPR:
8217
    case NE_EXPR:
8218
    case UNORDERED_EXPR:
8219
    case ORDERED_EXPR:
8220
    case UNLT_EXPR:
8221
    case UNLE_EXPR:
8222
    case UNGT_EXPR:
8223
    case UNGE_EXPR:
8224
    case UNEQ_EXPR:
8225
    case LTGT_EXPR:
8226
      temp = do_store_flag (exp,
8227
                            modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8228
                            tmode != VOIDmode ? tmode : mode, 0);
8229
      if (temp != 0)
8230
        return temp;
8231
 
8232
      /* For foo != 0, load foo, and if it is nonzero load 1 instead.  */
8233
      if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8234
          && original_target
8235
          && REG_P (original_target)
8236
          && (GET_MODE (original_target)
8237
              == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8238
        {
8239
          temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8240
                              VOIDmode, 0);
8241
 
8242
          /* If temp is constant, we can just compute the result.  */
8243
          if (GET_CODE (temp) == CONST_INT)
8244
            {
8245
              if (INTVAL (temp) != 0)
8246
                emit_move_insn (target, const1_rtx);
8247
              else
8248
                emit_move_insn (target, const0_rtx);
8249
 
8250
              return target;
8251
            }
8252
 
8253
          if (temp != original_target)
8254
            {
8255
              enum machine_mode mode1 = GET_MODE (temp);
8256
              if (mode1 == VOIDmode)
8257
                mode1 = tmode != VOIDmode ? tmode : mode;
8258
 
8259
              temp = copy_to_mode_reg (mode1, temp);
8260
            }
8261
 
8262
          op1 = gen_label_rtx ();
8263
          emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8264
                                   GET_MODE (temp), unsignedp, op1);
8265
          emit_move_insn (temp, const1_rtx);
8266
          emit_label (op1);
8267
          return temp;
8268
        }
8269
 
8270
      /* If no set-flag instruction, must generate a conditional store
8271
         into a temporary variable.  Drop through and handle this
8272
         like && and ||.  */
8273
 
8274
      if (! ignore
8275
          && (target == 0
8276
              || modifier == EXPAND_STACK_PARM
8277
              || ! safe_from_p (target, exp, 1)
8278
              /* Make sure we don't have a hard reg (such as function's return
8279
                 value) live across basic blocks, if not optimizing.  */
8280
              || (!optimize && REG_P (target)
8281
                  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8282
        target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8283
 
8284
      if (target)
8285
        emit_move_insn (target, const0_rtx);
8286
 
8287
      op1 = gen_label_rtx ();
8288
      jumpifnot (exp, op1);
8289
 
8290
      if (target)
8291
        emit_move_insn (target, const1_rtx);
8292
 
8293
      emit_label (op1);
8294
      return ignore ? const0_rtx : target;
8295
 
8296
    case TRUTH_NOT_EXPR:
8297
      if (modifier == EXPAND_STACK_PARM)
8298
        target = 0;
8299
      op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8300
      /* The parser is careful to generate TRUTH_NOT_EXPR
8301
         only with operands that are always zero or one.  */
8302
      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8303
                           target, 1, OPTAB_LIB_WIDEN);
8304
      gcc_assert (temp);
8305
      return temp;
8306
 
8307
    case STATEMENT_LIST:
8308
      {
8309
        tree_stmt_iterator iter;
8310
 
8311
        gcc_assert (ignore);
8312
 
8313
        for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8314
          expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8315
      }
8316
      return const0_rtx;
8317
 
8318
    case COND_EXPR:
8319
      /* A COND_EXPR with its type being VOID_TYPE represents a
8320
         conditional jump and is handled in
8321
         expand_gimple_cond_expr.  */
8322
      gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8323
 
8324
        /* Note that COND_EXPRs whose type is a structure or union
8325
         are required to be constructed to contain assignments of
8326
         a temporary variable, so that we can evaluate them here
8327
         for side effect only.  If type is void, we must do likewise.  */
8328
 
8329
        gcc_assert (!TREE_ADDRESSABLE (type)
8330
                    && !ignore
8331
                    && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8332
                    && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8333
 
8334
       /* If we are not to produce a result, we have no target.  Otherwise,
8335
         if a target was specified use it; it will not be used as an
8336
         intermediate target unless it is safe.  If no target, use a
8337
         temporary.  */
8338
 
8339
       if (modifier != EXPAND_STACK_PARM
8340
          && original_target
8341
          && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8342
          && GET_MODE (original_target) == mode
8343
#ifdef HAVE_conditional_move
8344
          && (! can_conditionally_move_p (mode)
8345
              || REG_P (original_target))
8346
#endif
8347
          && !MEM_P (original_target))
8348
        temp = original_target;
8349
       else
8350
        temp = assign_temp (type, 0, 0, 1);
8351
 
8352
       do_pending_stack_adjust ();
8353
       NO_DEFER_POP;
8354
       op0 = gen_label_rtx ();
8355
       op1 = gen_label_rtx ();
8356
       jumpifnot (TREE_OPERAND (exp, 0), op0);
8357
       store_expr (TREE_OPERAND (exp, 1), temp,
8358
                  modifier == EXPAND_STACK_PARM);
8359
 
8360
       emit_jump_insn (gen_jump (op1));
8361
       emit_barrier ();
8362
       emit_label (op0);
8363
       store_expr (TREE_OPERAND (exp, 2), temp,
8364
                  modifier == EXPAND_STACK_PARM);
8365
 
8366
       emit_label (op1);
8367
       OK_DEFER_POP;
8368
       return temp;
8369
 
8370
    case VEC_COND_EXPR:
8371
        target = expand_vec_cond_expr (exp, target);
8372
        return target;
8373
 
8374
    case MODIFY_EXPR:
8375
      {
8376
        tree lhs = TREE_OPERAND (exp, 0);
8377
        tree rhs = TREE_OPERAND (exp, 1);
8378
 
8379
        gcc_assert (ignore);
8380
 
8381
        /* Check for |= or &= of a bitfield of size one into another bitfield
8382
           of size 1.  In this case, (unless we need the result of the
8383
           assignment) we can do this more efficiently with a
8384
           test followed by an assignment, if necessary.
8385
 
8386
           ??? At this point, we can't get a BIT_FIELD_REF here.  But if
8387
           things change so we do, this code should be enhanced to
8388
           support it.  */
8389
        if (TREE_CODE (lhs) == COMPONENT_REF
8390
            && (TREE_CODE (rhs) == BIT_IOR_EXPR
8391
                || TREE_CODE (rhs) == BIT_AND_EXPR)
8392
            && TREE_OPERAND (rhs, 0) == lhs
8393
            && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8394
            && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8395
            && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8396
          {
8397
            rtx label = gen_label_rtx ();
8398
 
8399
            do_jump (TREE_OPERAND (rhs, 1),
8400
                     TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8401
                     TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8402
            expand_assignment (lhs, convert (TREE_TYPE (rhs),
8403
                                             (TREE_CODE (rhs) == BIT_IOR_EXPR
8404
                                              ? integer_one_node
8405
                                              : integer_zero_node)));
8406
            do_pending_stack_adjust ();
8407
            emit_label (label);
8408
            return const0_rtx;
8409
          }
8410
 
8411
        expand_assignment (lhs, rhs);
8412
 
8413
        return const0_rtx;
8414
      }
8415
 
8416
    case RETURN_EXPR:
8417
      if (!TREE_OPERAND (exp, 0))
8418
        expand_null_return ();
8419
      else
8420
        expand_return (TREE_OPERAND (exp, 0));
8421
      return const0_rtx;
8422
 
8423
    case ADDR_EXPR:
8424
      return expand_expr_addr_expr (exp, target, tmode, modifier);
8425
 
8426
    case COMPLEX_EXPR:
8427
      /* Get the rtx code of the operands.  */
8428
      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8429
      op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8430
 
8431
      if (!target)
8432
        target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8433
 
8434
      /* Move the real (op0) and imaginary (op1) parts to their location.  */
8435
      write_complex_part (target, op0, false);
8436
      write_complex_part (target, op1, true);
8437
 
8438
      return target;
8439
 
8440
    case REALPART_EXPR:
8441
      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8442
      return read_complex_part (op0, false);
8443
 
8444
    case IMAGPART_EXPR:
8445
      op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8446
      return read_complex_part (op0, true);
8447
 
8448
    case RESX_EXPR:
8449
      expand_resx_expr (exp);
8450
      return const0_rtx;
8451
 
8452
    case TRY_CATCH_EXPR:
8453
    case CATCH_EXPR:
8454
    case EH_FILTER_EXPR:
8455
    case TRY_FINALLY_EXPR:
8456
      /* Lowered by tree-eh.c.  */
8457
      gcc_unreachable ();
8458
 
8459
    case WITH_CLEANUP_EXPR:
8460
    case CLEANUP_POINT_EXPR:
8461
    case TARGET_EXPR:
8462
    case CASE_LABEL_EXPR:
8463
    case VA_ARG_EXPR:
8464
    case BIND_EXPR:
8465
    case INIT_EXPR:
8466
    case CONJ_EXPR:
8467
    case COMPOUND_EXPR:
8468
    case PREINCREMENT_EXPR:
8469
    case PREDECREMENT_EXPR:
8470
    case POSTINCREMENT_EXPR:
8471
    case POSTDECREMENT_EXPR:
8472
    case LOOP_EXPR:
8473
    case EXIT_EXPR:
8474
    case TRUTH_ANDIF_EXPR:
8475
    case TRUTH_ORIF_EXPR:
8476
      /* Lowered by gimplify.c.  */
8477
      gcc_unreachable ();
8478
 
8479
    case EXC_PTR_EXPR:
8480
      return get_exception_pointer (cfun);
8481
 
8482
    case FILTER_EXPR:
8483
      return get_exception_filter (cfun);
8484
 
8485
    case FDESC_EXPR:
8486
      /* Function descriptors are not valid except for as
8487
         initialization constants, and should not be expanded.  */
8488
      gcc_unreachable ();
8489
 
8490
    case SWITCH_EXPR:
8491
      expand_case (exp);
8492
      return const0_rtx;
8493
 
8494
    case LABEL_EXPR:
8495
      expand_label (TREE_OPERAND (exp, 0));
8496
      return const0_rtx;
8497
 
8498
    case ASM_EXPR:
8499
      expand_asm_expr (exp);
8500
      return const0_rtx;
8501
 
8502
    case WITH_SIZE_EXPR:
8503
      /* WITH_SIZE_EXPR expands to its first argument.  The caller should
8504
         have pulled out the size to use in whatever context it needed.  */
8505
      return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8506
                               modifier, alt_rtl);
8507
 
8508
    case REALIGN_LOAD_EXPR:
8509
      {
8510
        tree oprnd0 = TREE_OPERAND (exp, 0);
8511
        tree oprnd1 = TREE_OPERAND (exp, 1);
8512
        tree oprnd2 = TREE_OPERAND (exp, 2);
8513
        rtx op2;
8514
 
8515
        this_optab = optab_for_tree_code (code, type);
8516
        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8517
        op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8518
        temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8519
                                  target, unsignedp);
8520
        gcc_assert (temp);
8521
        return temp;
8522
      }
8523
 
8524
    case REDUC_MAX_EXPR:
8525
    case REDUC_MIN_EXPR:
8526
    case REDUC_PLUS_EXPR:
8527
      {
8528
        op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8529
        this_optab = optab_for_tree_code (code, type);
8530
        temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8531
        gcc_assert (temp);
8532
        return temp;
8533
      }
8534
 
8535
    case VEC_LSHIFT_EXPR:
8536
    case VEC_RSHIFT_EXPR:
8537
      {
8538
        target = expand_vec_shift_expr (exp, target);
8539
        return target;
8540
      }
8541
 
8542
    default:
8543
      return lang_hooks.expand_expr (exp, original_target, tmode,
8544
                                     modifier, alt_rtl);
8545
    }
8546
 
8547
  /* Here to do an ordinary binary operator.  */
8548
 binop:
8549
  expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8550
                   subtarget, &op0, &op1, 0);
8551
 binop2:
8552
  this_optab = optab_for_tree_code (code, type);
8553
 binop3:
8554
  if (modifier == EXPAND_STACK_PARM)
8555
    target = 0;
8556
  temp = expand_binop (mode, this_optab, op0, op1, target,
8557
                       unsignedp, OPTAB_LIB_WIDEN);
8558
  gcc_assert (temp);
8559
  return REDUCE_BIT_FIELD (temp);
8560
}
8561
#undef REDUCE_BIT_FIELD
8562
 
8563
/* Subroutine of above: reduce EXP to the precision of TYPE (in the
8564
   signedness of TYPE), possibly returning the result in TARGET.  */
8565
static rtx
8566
reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8567
{
8568
  HOST_WIDE_INT prec = TYPE_PRECISION (type);
8569
  if (target && GET_MODE (target) != GET_MODE (exp))
8570
    target = 0;
8571
  if (TYPE_UNSIGNED (type))
8572
    {
8573
      rtx mask;
8574
      if (prec < HOST_BITS_PER_WIDE_INT)
8575
        mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8576
                                   GET_MODE (exp));
8577
      else
8578
        mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8579
                                   ((unsigned HOST_WIDE_INT) 1
8580
                                    << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8581
                                   GET_MODE (exp));
8582
      return expand_and (GET_MODE (exp), exp, mask, target);
8583
    }
8584
  else
8585
    {
8586
      tree count = build_int_cst (NULL_TREE,
8587
                                  GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8588
      exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8589
      return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8590
    }
8591
}
8592
 
8593
/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8594
   when applied to the address of EXP produces an address known to be
8595
   aligned more than BIGGEST_ALIGNMENT.  */
8596
 
8597
static int
8598
is_aligning_offset (tree offset, tree exp)
8599
{
8600
  /* Strip off any conversions.  */
8601
  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8602
         || TREE_CODE (offset) == NOP_EXPR
8603
         || TREE_CODE (offset) == CONVERT_EXPR)
8604
    offset = TREE_OPERAND (offset, 0);
8605
 
8606
  /* We must now have a BIT_AND_EXPR with a constant that is one less than
8607
     power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
8608
  if (TREE_CODE (offset) != BIT_AND_EXPR
8609
      || !host_integerp (TREE_OPERAND (offset, 1), 1)
8610
      || compare_tree_int (TREE_OPERAND (offset, 1),
8611
                           BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8612
      || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8613
    return 0;
8614
 
8615
  /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8616
     It must be NEGATE_EXPR.  Then strip any more conversions.  */
8617
  offset = TREE_OPERAND (offset, 0);
8618
  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8619
         || TREE_CODE (offset) == NOP_EXPR
8620
         || TREE_CODE (offset) == CONVERT_EXPR)
8621
    offset = TREE_OPERAND (offset, 0);
8622
 
8623
  if (TREE_CODE (offset) != NEGATE_EXPR)
8624
    return 0;
8625
 
8626
  offset = TREE_OPERAND (offset, 0);
8627
  while (TREE_CODE (offset) == NON_LVALUE_EXPR
8628
         || TREE_CODE (offset) == NOP_EXPR
8629
         || TREE_CODE (offset) == CONVERT_EXPR)
8630
    offset = TREE_OPERAND (offset, 0);
8631
 
8632
  /* This must now be the address of EXP.  */
8633
  return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8634
}
8635
 
8636
/* Return the tree node if an ARG corresponds to a string constant or zero
8637
   if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
8638
   in bytes within the string that ARG is accessing.  The type of the
8639
   offset will be `sizetype'.  */
8640
 
8641
tree
8642
string_constant (tree arg, tree *ptr_offset)
8643
{
8644
  tree array, offset;
8645
  STRIP_NOPS (arg);
8646
 
8647
  if (TREE_CODE (arg) == ADDR_EXPR)
8648
    {
8649
      if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8650
        {
8651
          *ptr_offset = size_zero_node;
8652
          return TREE_OPERAND (arg, 0);
8653
        }
8654
      else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8655
        {
8656
          array = TREE_OPERAND (arg, 0);
8657
          offset = size_zero_node;
8658
        }
8659
      else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8660
        {
8661
          array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8662
          offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8663
          if (TREE_CODE (array) != STRING_CST
8664
              && TREE_CODE (array) != VAR_DECL)
8665
            return 0;
8666
        }
8667
      else
8668
        return 0;
8669
    }
8670
  else if (TREE_CODE (arg) == PLUS_EXPR)
8671
    {
8672
      tree arg0 = TREE_OPERAND (arg, 0);
8673
      tree arg1 = TREE_OPERAND (arg, 1);
8674
 
8675
      STRIP_NOPS (arg0);
8676
      STRIP_NOPS (arg1);
8677
 
8678
      if (TREE_CODE (arg0) == ADDR_EXPR
8679
          && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8680
              || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8681
        {
8682
          array = TREE_OPERAND (arg0, 0);
8683
          offset = arg1;
8684
        }
8685
      else if (TREE_CODE (arg1) == ADDR_EXPR
8686
               && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8687
                   || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8688
        {
8689
          array = TREE_OPERAND (arg1, 0);
8690
          offset = arg0;
8691
        }
8692
      else
8693
        return 0;
8694
    }
8695
  else
8696
    return 0;
8697
 
8698
  if (TREE_CODE (array) == STRING_CST)
8699
    {
8700
      *ptr_offset = convert (sizetype, offset);
8701
      return array;
8702
    }
8703
  else if (TREE_CODE (array) == VAR_DECL)
8704
    {
8705
      int length;
8706
 
8707
      /* Variables initialized to string literals can be handled too.  */
8708
      if (DECL_INITIAL (array) == NULL_TREE
8709
          || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8710
        return 0;
8711
 
8712
      /* If they are read-only, non-volatile and bind locally.  */
8713
      if (! TREE_READONLY (array)
8714
          || TREE_SIDE_EFFECTS (array)
8715
          || ! targetm.binds_local_p (array))
8716
        return 0;
8717
 
8718
      /* Avoid const char foo[4] = "abcde";  */
8719
      if (DECL_SIZE_UNIT (array) == NULL_TREE
8720
          || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8721
          || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8722
          || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8723
        return 0;
8724
 
8725
      /* If variable is bigger than the string literal, OFFSET must be constant
8726
         and inside of the bounds of the string literal.  */
8727
      offset = convert (sizetype, offset);
8728
      if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8729
          && (! host_integerp (offset, 1)
8730
              || compare_tree_int (offset, length) >= 0))
8731
        return 0;
8732
 
8733
      *ptr_offset = offset;
8734
      return DECL_INITIAL (array);
8735
    }
8736
 
8737
  return 0;
8738
}
8739
 
8740
/* Generate code to calculate EXP using a store-flag instruction
8741
   and return an rtx for the result.  EXP is either a comparison
8742
   or a TRUTH_NOT_EXPR whose operand is a comparison.
8743
 
8744
   If TARGET is nonzero, store the result there if convenient.
8745
 
8746
   If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8747
   cheap.
8748
 
8749
   Return zero if there is no suitable set-flag instruction
8750
   available on this machine.
8751
 
8752
   Once expand_expr has been called on the arguments of the comparison,
8753
   we are committed to doing the store flag, since it is not safe to
8754
   re-evaluate the expression.  We emit the store-flag insn by calling
8755
   emit_store_flag, but only expand the arguments if we have a reason
8756
   to believe that emit_store_flag will be successful.  If we think that
8757
   it will, but it isn't, we have to simulate the store-flag with a
8758
   set/jump/set sequence.  */
8759
 
8760
static rtx
8761
do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8762
{
8763
  enum rtx_code code;
8764
  tree arg0, arg1, type;
8765
  tree tem;
8766
  enum machine_mode operand_mode;
8767
  int invert = 0;
8768
  int unsignedp;
8769
  rtx op0, op1;
8770
  enum insn_code icode;
8771
  rtx subtarget = target;
8772
  rtx result, label;
8773
 
8774
  /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8775
     result at the end.  We can't simply invert the test since it would
8776
     have already been inverted if it were valid.  This case occurs for
8777
     some floating-point comparisons.  */
8778
 
8779
  if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8780
    invert = 1, exp = TREE_OPERAND (exp, 0);
8781
 
8782
  arg0 = TREE_OPERAND (exp, 0);
8783
  arg1 = TREE_OPERAND (exp, 1);
8784
 
8785
  /* Don't crash if the comparison was erroneous.  */
8786
  if (arg0 == error_mark_node || arg1 == error_mark_node)
8787
    return const0_rtx;
8788
 
8789
  type = TREE_TYPE (arg0);
8790
  operand_mode = TYPE_MODE (type);
8791
  unsignedp = TYPE_UNSIGNED (type);
8792
 
8793
  /* We won't bother with BLKmode store-flag operations because it would mean
8794
     passing a lot of information to emit_store_flag.  */
8795
  if (operand_mode == BLKmode)
8796
    return 0;
8797
 
8798
  /* We won't bother with store-flag operations involving function pointers
8799
     when function pointers must be canonicalized before comparisons.  */
8800
#ifdef HAVE_canonicalize_funcptr_for_compare
8801
  if (HAVE_canonicalize_funcptr_for_compare
8802
      && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8803
           && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8804
               == FUNCTION_TYPE))
8805
          || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8806
              && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8807
                  == FUNCTION_TYPE))))
8808
    return 0;
8809
#endif
8810
 
8811
  STRIP_NOPS (arg0);
8812
  STRIP_NOPS (arg1);
8813
 
8814
  /* Get the rtx comparison code to use.  We know that EXP is a comparison
8815
     operation of some type.  Some comparisons against 1 and -1 can be
8816
     converted to comparisons with zero.  Do so here so that the tests
8817
     below will be aware that we have a comparison with zero.   These
8818
     tests will not catch constants in the first operand, but constants
8819
     are rarely passed as the first operand.  */
8820
 
8821
  switch (TREE_CODE (exp))
8822
    {
8823
    case EQ_EXPR:
8824
      code = EQ;
8825
      break;
8826
    case NE_EXPR:
8827
      code = NE;
8828
      break;
8829
    case LT_EXPR:
8830
      if (integer_onep (arg1))
8831
        arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8832
      else
8833
        code = unsignedp ? LTU : LT;
8834
      break;
8835
    case LE_EXPR:
8836
      if (! unsignedp && integer_all_onesp (arg1))
8837
        arg1 = integer_zero_node, code = LT;
8838
      else
8839
        code = unsignedp ? LEU : LE;
8840
      break;
8841
    case GT_EXPR:
8842
      if (! unsignedp && integer_all_onesp (arg1))
8843
        arg1 = integer_zero_node, code = GE;
8844
      else
8845
        code = unsignedp ? GTU : GT;
8846
      break;
8847
    case GE_EXPR:
8848
      if (integer_onep (arg1))
8849
        arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8850
      else
8851
        code = unsignedp ? GEU : GE;
8852
      break;
8853
 
8854
    case UNORDERED_EXPR:
8855
      code = UNORDERED;
8856
      break;
8857
    case ORDERED_EXPR:
8858
      code = ORDERED;
8859
      break;
8860
    case UNLT_EXPR:
8861
      code = UNLT;
8862
      break;
8863
    case UNLE_EXPR:
8864
      code = UNLE;
8865
      break;
8866
    case UNGT_EXPR:
8867
      code = UNGT;
8868
      break;
8869
    case UNGE_EXPR:
8870
      code = UNGE;
8871
      break;
8872
    case UNEQ_EXPR:
8873
      code = UNEQ;
8874
      break;
8875
    case LTGT_EXPR:
8876
      code = LTGT;
8877
      break;
8878
 
8879
    default:
8880
      gcc_unreachable ();
8881
    }
8882
 
8883
  /* Put a constant second.  */
8884
  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8885
    {
8886
      tem = arg0; arg0 = arg1; arg1 = tem;
8887
      code = swap_condition (code);
8888
    }
8889
 
8890
  /* If this is an equality or inequality test of a single bit, we can
8891
     do this by shifting the bit being tested to the low-order bit and
8892
     masking the result with the constant 1.  If the condition was EQ,
8893
     we xor it with 1.  This does not require an scc insn and is faster
8894
     than an scc insn even if we have it.
8895
 
8896
     The code to make this transformation was moved into fold_single_bit_test,
8897
     so we just call into the folder and expand its result.  */
8898
 
8899
  if ((code == NE || code == EQ)
8900
      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8901
      && integer_pow2p (TREE_OPERAND (arg0, 1)))
8902
    {
8903
      tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8904
      return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8905
                                                arg0, arg1, type),
8906
                          target, VOIDmode, EXPAND_NORMAL);
8907
    }
8908
 
8909
  /* Now see if we are likely to be able to do this.  Return if not.  */
8910
  if (! can_compare_p (code, operand_mode, ccp_store_flag))
8911
    return 0;
8912
 
8913
  icode = setcc_gen_code[(int) code];
8914
  if (icode == CODE_FOR_nothing
8915
      || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8916
    {
8917
      /* We can only do this if it is one of the special cases that
8918
         can be handled without an scc insn.  */
8919
      if ((code == LT && integer_zerop (arg1))
8920
          || (! only_cheap && code == GE && integer_zerop (arg1)))
8921
        ;
8922
      else if (! only_cheap && (code == NE || code == EQ)
8923
               && TREE_CODE (type) != REAL_TYPE
8924
               && ((abs_optab->handlers[(int) operand_mode].insn_code
8925
                    != CODE_FOR_nothing)
8926
                   || (ffs_optab->handlers[(int) operand_mode].insn_code
8927
                       != CODE_FOR_nothing)))
8928
        ;
8929
      else
8930
        return 0;
8931
    }
8932
 
8933
  if (! get_subtarget (target)
8934
      || GET_MODE (subtarget) != operand_mode)
8935
    subtarget = 0;
8936
 
8937
  expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8938
 
8939
  if (target == 0)
8940
    target = gen_reg_rtx (mode);
8941
 
8942
  result = emit_store_flag (target, code, op0, op1,
8943
                            operand_mode, unsignedp, 1);
8944
 
8945
  if (result)
8946
    {
8947
      if (invert)
8948
        result = expand_binop (mode, xor_optab, result, const1_rtx,
8949
                               result, 0, OPTAB_LIB_WIDEN);
8950
      return result;
8951
    }
8952
 
8953
  /* If this failed, we have to do this with set/compare/jump/set code.  */
8954
  if (!REG_P (target)
8955
      || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8956
    target = gen_reg_rtx (GET_MODE (target));
8957
 
8958
  emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8959
  result = compare_from_rtx (op0, op1, code, unsignedp,
8960
                             operand_mode, NULL_RTX);
8961
  if (GET_CODE (result) == CONST_INT)
8962
    return (((result == const0_rtx && ! invert)
8963
             || (result != const0_rtx && invert))
8964
            ? const0_rtx : const1_rtx);
8965
 
8966
  /* The code of RESULT may not match CODE if compare_from_rtx
8967
     decided to swap its operands and reverse the original code.
8968
 
8969
     We know that compare_from_rtx returns either a CONST_INT or
8970
     a new comparison code, so it is safe to just extract the
8971
     code from RESULT.  */
8972
  code = GET_CODE (result);
8973
 
8974
  label = gen_label_rtx ();
8975
  gcc_assert (bcc_gen_fctn[(int) code]);
8976
 
8977
  emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8978
  emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8979
  emit_label (label);
8980
 
8981
  return target;
8982
}
8983
 
8984
 
8985
/* Stubs in case we haven't got a casesi insn.  */
8986
#ifndef HAVE_casesi
8987
# define HAVE_casesi 0
8988
# define gen_casesi(a, b, c, d, e) (0)
8989
# define CODE_FOR_casesi CODE_FOR_nothing
8990
#endif
8991
 
8992
/* If the machine does not have a case insn that compares the bounds,
8993
   this means extra overhead for dispatch tables, which raises the
8994
   threshold for using them.  */
8995
#ifndef CASE_VALUES_THRESHOLD
8996
#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8997
#endif /* CASE_VALUES_THRESHOLD */
8998
 
8999
unsigned int
9000
case_values_threshold (void)
9001
{
9002
  return CASE_VALUES_THRESHOLD;
9003
}
9004
 
9005
/* Attempt to generate a casesi instruction.  Returns 1 if successful,
9006
 
9007
int
9008
try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9009
            rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9010
{
9011
  enum machine_mode index_mode = SImode;
9012
  int index_bits = GET_MODE_BITSIZE (index_mode);
9013
  rtx op1, op2, index;
9014
  enum machine_mode op_mode;
9015
 
9016
  if (! HAVE_casesi)
9017
    return 0;
9018
 
9019
  /* Convert the index to SImode.  */
9020
  if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9021
    {
9022
      enum machine_mode omode = TYPE_MODE (index_type);
9023
      rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9024
 
9025
      /* We must handle the endpoints in the original mode.  */
9026
      index_expr = build2 (MINUS_EXPR, index_type,
9027
                           index_expr, minval);
9028
      minval = integer_zero_node;
9029
      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9030
      emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9031
                               omode, 1, default_label);
9032
      /* Now we can safely truncate.  */
9033
      index = convert_to_mode (index_mode, index, 0);
9034
    }
9035
  else
9036
    {
9037
      if (TYPE_MODE (index_type) != index_mode)
9038
        {
9039
          index_expr = convert (lang_hooks.types.type_for_size
9040
                                (index_bits, 0), index_expr);
9041
          index_type = TREE_TYPE (index_expr);
9042
        }
9043
 
9044
      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9045
    }
9046
 
9047
  do_pending_stack_adjust ();
9048
 
9049
  op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9050
  if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9051
      (index, op_mode))
9052
    index = copy_to_mode_reg (op_mode, index);
9053
 
9054
  op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9055
 
9056
  op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9057
  op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9058
                       op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9059
  if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9060
      (op1, op_mode))
9061
    op1 = copy_to_mode_reg (op_mode, op1);
9062
 
9063
  op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9064
 
9065
  op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9066
  op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9067
                       op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9068
  if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9069
      (op2, op_mode))
9070
    op2 = copy_to_mode_reg (op_mode, op2);
9071
 
9072
  emit_jump_insn (gen_casesi (index, op1, op2,
9073
                              table_label, default_label));
9074
  return 1;
9075
}
9076
 
9077
/* Attempt to generate a tablejump instruction; same concept.  */
9078
#ifndef HAVE_tablejump
9079
#define HAVE_tablejump 0
9080
#define gen_tablejump(x, y) (0)
9081
#endif
9082
 
9083
/* Subroutine of the next function.
9084
 
9085
   INDEX is the value being switched on, with the lowest value
9086
   in the table already subtracted.
9087
   MODE is its expected mode (needed if INDEX is constant).
9088
   RANGE is the length of the jump table.
9089
   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9090
 
9091
   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9092
   index value is out of range.  */
9093
 
9094
static void
9095
do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9096
              rtx default_label)
9097
{
9098
  rtx temp, vector;
9099
 
9100
  if (INTVAL (range) > cfun->max_jumptable_ents)
9101
    cfun->max_jumptable_ents = INTVAL (range);
9102
 
9103
  /* Do an unsigned comparison (in the proper mode) between the index
9104
     expression and the value which represents the length of the range.
9105
     Since we just finished subtracting the lower bound of the range
9106
     from the index expression, this comparison allows us to simultaneously
9107
     check that the original index expression value is both greater than
9108
     or equal to the minimum value of the range and less than or equal to
9109
     the maximum value of the range.  */
9110
 
9111
  emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9112
                           default_label);
9113
 
9114
  /* If index is in range, it must fit in Pmode.
9115
     Convert to Pmode so we can index with it.  */
9116
  if (mode != Pmode)
9117
    index = convert_to_mode (Pmode, index, 1);
9118
 
9119
  /* Don't let a MEM slip through, because then INDEX that comes
9120
     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9121
     and break_out_memory_refs will go to work on it and mess it up.  */
9122
#ifdef PIC_CASE_VECTOR_ADDRESS
9123
  if (flag_pic && !REG_P (index))
9124
    index = copy_to_mode_reg (Pmode, index);
9125
#endif
9126
 
9127
  /* If flag_force_addr were to affect this address
9128
     it could interfere with the tricky assumptions made
9129
     about addresses that contain label-refs,
9130
     which may be valid only very near the tablejump itself.  */
9131
  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9132
     GET_MODE_SIZE, because this indicates how large insns are.  The other
9133
     uses should all be Pmode, because they are addresses.  This code
9134
     could fail if addresses and insns are not the same size.  */
9135
  index = gen_rtx_PLUS (Pmode,
9136
                        gen_rtx_MULT (Pmode, index,
9137
                                      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9138
                        gen_rtx_LABEL_REF (Pmode, table_label));
9139
#ifdef PIC_CASE_VECTOR_ADDRESS
9140
  if (flag_pic)
9141
    index = PIC_CASE_VECTOR_ADDRESS (index);
9142
  else
9143
#endif
9144
    index = memory_address_noforce (CASE_VECTOR_MODE, index);
9145
  temp = gen_reg_rtx (CASE_VECTOR_MODE);
9146
  vector = gen_const_mem (CASE_VECTOR_MODE, index);
9147
  convert_move (temp, vector, 0);
9148
 
9149
  emit_jump_insn (gen_tablejump (temp, table_label));
9150
 
9151
  /* If we are generating PIC code or if the table is PC-relative, the
9152
     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
9153
  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9154
    emit_barrier ();
9155
}
9156
 
9157
int
9158
try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9159
               rtx table_label, rtx default_label)
9160
{
9161
  rtx index;
9162
 
9163
  if (! HAVE_tablejump)
9164
    return 0;
9165
 
9166
  index_expr = fold_build2 (MINUS_EXPR, index_type,
9167
                            convert (index_type, index_expr),
9168
                            convert (index_type, minval));
9169
  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9170
  do_pending_stack_adjust ();
9171
 
9172
  do_tablejump (index, TYPE_MODE (index_type),
9173
                convert_modes (TYPE_MODE (index_type),
9174
                               TYPE_MODE (TREE_TYPE (range)),
9175
                               expand_expr (range, NULL_RTX,
9176
                                            VOIDmode, 0),
9177
                               TYPE_UNSIGNED (TREE_TYPE (range))),
9178
                table_label, default_label);
9179
  return 1;
9180
}
9181
 
9182
/* Nonzero if the mode is a valid vector mode for this architecture.
9183
   This returns nonzero even if there is no hardware support for the
9184
   vector mode, but we can emulate with narrower modes.  */
9185
 
9186
int
9187
vector_mode_valid_p (enum machine_mode mode)
9188
{
9189
  enum mode_class class = GET_MODE_CLASS (mode);
9190
  enum machine_mode innermode;
9191
 
9192
  /* Doh!  What's going on?  */
9193
  if (class != MODE_VECTOR_INT
9194
      && class != MODE_VECTOR_FLOAT)
9195
    return 0;
9196
 
9197
  /* Hardware support.  Woo hoo!  */
9198
  if (targetm.vector_mode_supported_p (mode))
9199
    return 1;
9200
 
9201
  innermode = GET_MODE_INNER (mode);
9202
 
9203
  /* We should probably return 1 if requesting V4DI and we have no DI,
9204
     but we have V2DI, but this is probably very unlikely.  */
9205
 
9206
  /* If we have support for the inner mode, we can safely emulate it.
9207
     We may not have V2DI, but me can emulate with a pair of DIs.  */
9208
  return targetm.scalar_mode_supported_p (innermode);
9209
}
9210
 
9211
/* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
9212
static rtx
9213
const_vector_from_tree (tree exp)
9214
{
9215
  rtvec v;
9216
  int units, i;
9217
  tree link, elt;
9218
  enum machine_mode inner, mode;
9219
 
9220
  mode = TYPE_MODE (TREE_TYPE (exp));
9221
 
9222
  if (initializer_zerop (exp))
9223
    return CONST0_RTX (mode);
9224
 
9225
  units = GET_MODE_NUNITS (mode);
9226
  inner = GET_MODE_INNER (mode);
9227
 
9228
  v = rtvec_alloc (units);
9229
 
9230
  link = TREE_VECTOR_CST_ELTS (exp);
9231
  for (i = 0; link; link = TREE_CHAIN (link), ++i)
9232
    {
9233
      elt = TREE_VALUE (link);
9234
 
9235
      if (TREE_CODE (elt) == REAL_CST)
9236
        RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9237
                                                         inner);
9238
      else
9239
        RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9240
                                               TREE_INT_CST_HIGH (elt),
9241
                                               inner);
9242
    }
9243
 
9244
  /* Initialize remaining elements to 0.  */
9245
  for (; i < units; ++i)
9246
    RTVEC_ELT (v, i) = CONST0_RTX (inner);
9247
 
9248
  return gen_rtx_CONST_VECTOR (mode, v);
9249
}
9250
#include "gt-expr.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.