OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [expr.c] - Blame information for rev 731

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Convert tree expression to rtl instructions, for GNU compiler.
2
   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3
   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4
   2012 Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "machmode.h"
27
#include "rtl.h"
28
#include "tree.h"
29
#include "flags.h"
30
#include "regs.h"
31
#include "hard-reg-set.h"
32
#include "except.h"
33
#include "function.h"
34
#include "insn-config.h"
35
#include "insn-attr.h"
36
/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
37
#include "expr.h"
38
#include "optabs.h"
39
#include "libfuncs.h"
40
#include "recog.h"
41
#include "reload.h"
42
#include "output.h"
43
#include "typeclass.h"
44
#include "toplev.h"
45
#include "langhooks.h"
46
#include "intl.h"
47
#include "tm_p.h"
48
#include "tree-iterator.h"
49
#include "tree-pass.h"
50
#include "tree-flow.h"
51
#include "target.h"
52
#include "common/common-target.h"
53
#include "timevar.h"
54
#include "df.h"
55
#include "diagnostic.h"
56
#include "ssaexpand.h"
57
#include "target-globals.h"
58
#include "params.h"
59
 
60
/* Decide whether a function's arguments should be processed
61
   from first to last or from last to first.
62
 
63
   They should if the stack and args grow in opposite directions, but
64
   only if we have push insns.  */
65
 
66
#ifdef PUSH_ROUNDING
67
 
68
#ifndef PUSH_ARGS_REVERSED
69
#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70
#define PUSH_ARGS_REVERSED      /* If it's last to first.  */
71
#endif
72
#endif
73
 
74
#endif
75
 
76
#ifndef STACK_PUSH_CODE
77
#ifdef STACK_GROWS_DOWNWARD
78
#define STACK_PUSH_CODE PRE_DEC
79
#else
80
#define STACK_PUSH_CODE PRE_INC
81
#endif
82
#endif
83
 
84
 
85
/* If this is nonzero, we do not bother generating VOLATILE
86
   around volatile memory references, and we are willing to
87
   output indirect addresses.  If cse is to follow, we reject
88
   indirect addresses so a useful potential cse is generated;
89
   if it is used only once, instruction combination will produce
90
   the same indirect address eventually.  */
91
int cse_not_expected;
92
 
93
/* This structure is used by move_by_pieces to describe the move to
94
   be performed.  */
95
struct move_by_pieces_d
96
{
97
  rtx to;
98
  rtx to_addr;
99
  int autinc_to;
100
  int explicit_inc_to;
101
  rtx from;
102
  rtx from_addr;
103
  int autinc_from;
104
  int explicit_inc_from;
105
  unsigned HOST_WIDE_INT len;
106
  HOST_WIDE_INT offset;
107
  int reverse;
108
};
109
 
110
/* This structure is used by store_by_pieces to describe the clear to
111
   be performed.  */
112
 
113
struct store_by_pieces_d
114
{
115
  rtx to;
116
  rtx to_addr;
117
  int autinc_to;
118
  int explicit_inc_to;
119
  unsigned HOST_WIDE_INT len;
120
  HOST_WIDE_INT offset;
121
  rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122
  void *constfundata;
123
  int reverse;
124
};
125
 
126
static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127
                              struct move_by_pieces_d *);
128
static bool block_move_libcall_safe_for_call_parm (void);
129
static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130
static tree emit_block_move_libcall_fn (int);
131
static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132
static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133
static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134
static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
135
static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136
                               struct store_by_pieces_d *);
137
static tree clear_storage_libcall_fn (int);
138
static rtx compress_float_constant (rtx, rtx);
139
static rtx get_subtarget (rtx);
140
static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141
                                     HOST_WIDE_INT, enum machine_mode,
142
                                     tree, tree, int, alias_set_type);
143
static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144
static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
145
                        unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
146
                        enum machine_mode,
147
                        tree, tree, alias_set_type, bool);
148
 
149
static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150
 
151
static int is_aligning_offset (const_tree, const_tree);
152
static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153
                             enum expand_modifier);
154
static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155
static rtx do_store_flag (sepops, rtx, enum machine_mode);
156
#ifdef PUSH_ROUNDING
157
static void emit_single_push_insn (enum machine_mode, rtx, tree);
158
#endif
159
static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160
static rtx const_vector_from_tree (tree);
161
static void write_complex_part (rtx, rtx, bool);
162
 
163
/* This macro is used to determine whether move_by_pieces should be called
164
   to perform a structure copy.  */
165
#ifndef MOVE_BY_PIECES_P
166
#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
167
  (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
168
   < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
169
#endif
170
 
171
/* This macro is used to determine whether clear_by_pieces should be
172
   called to clear storage.  */
173
#ifndef CLEAR_BY_PIECES_P
174
#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
175
  (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
176
   < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
177
#endif
178
 
179
/* This macro is used to determine whether store_by_pieces should be
180
   called to "memset" storage with byte values other than zero.  */
181
#ifndef SET_BY_PIECES_P
182
#define SET_BY_PIECES_P(SIZE, ALIGN) \
183
  (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
184
   < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
185
#endif
186
 
187
/* This macro is used to determine whether store_by_pieces should be
188
   called to "memcpy" storage when the source is a constant string.  */
189
#ifndef STORE_BY_PIECES_P
190
#define STORE_BY_PIECES_P(SIZE, ALIGN) \
191
  (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
192
   < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
193
#endif
194
 
195
/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow.  */
196
 
197
#ifndef SLOW_UNALIGNED_ACCESS
198
#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
199
#endif
200
 
201
/* This is run to set up which modes can be used
202
   directly in memory and to initialize the block move optab.  It is run
203
   at the beginning of compilation and when the target is reinitialized.  */
204
 
205
void
206
init_expr_target (void)
207
{
208
  rtx insn, pat;
209
  enum machine_mode mode;
210
  int num_clobbers;
211
  rtx mem, mem1;
212
  rtx reg;
213
 
214
  /* Try indexing by frame ptr and try by stack ptr.
215
     It is known that on the Convex the stack ptr isn't a valid index.
216
     With luck, one or the other is valid on any machine.  */
217
  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
218
  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
219
 
220
  /* A scratch register we can modify in-place below to avoid
221
     useless RTL allocations.  */
222
  reg = gen_rtx_REG (VOIDmode, -1);
223
 
224
  insn = rtx_alloc (INSN);
225
  pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
226
  PATTERN (insn) = pat;
227
 
228
  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
229
       mode = (enum machine_mode) ((int) mode + 1))
230
    {
231
      int regno;
232
 
233
      direct_load[(int) mode] = direct_store[(int) mode] = 0;
234
      PUT_MODE (mem, mode);
235
      PUT_MODE (mem1, mode);
236
      PUT_MODE (reg, mode);
237
 
238
      /* See if there is some register that can be used in this mode and
239
         directly loaded or stored from memory.  */
240
 
241
      if (mode != VOIDmode && mode != BLKmode)
242
        for (regno = 0; regno < FIRST_PSEUDO_REGISTER
243
             && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
244
             regno++)
245
          {
246
            if (! HARD_REGNO_MODE_OK (regno, mode))
247
              continue;
248
 
249
            SET_REGNO (reg, regno);
250
 
251
            SET_SRC (pat) = mem;
252
            SET_DEST (pat) = reg;
253
            if (recog (pat, insn, &num_clobbers) >= 0)
254
              direct_load[(int) mode] = 1;
255
 
256
            SET_SRC (pat) = mem1;
257
            SET_DEST (pat) = reg;
258
            if (recog (pat, insn, &num_clobbers) >= 0)
259
              direct_load[(int) mode] = 1;
260
 
261
            SET_SRC (pat) = reg;
262
            SET_DEST (pat) = mem;
263
            if (recog (pat, insn, &num_clobbers) >= 0)
264
              direct_store[(int) mode] = 1;
265
 
266
            SET_SRC (pat) = reg;
267
            SET_DEST (pat) = mem1;
268
            if (recog (pat, insn, &num_clobbers) >= 0)
269
              direct_store[(int) mode] = 1;
270
          }
271
    }
272
 
273
  mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
274
 
275
  for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
276
       mode = GET_MODE_WIDER_MODE (mode))
277
    {
278
      enum machine_mode srcmode;
279
      for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
280
           srcmode = GET_MODE_WIDER_MODE (srcmode))
281
        {
282
          enum insn_code ic;
283
 
284
          ic = can_extend_p (mode, srcmode, 0);
285
          if (ic == CODE_FOR_nothing)
286
            continue;
287
 
288
          PUT_MODE (mem, srcmode);
289
 
290
          if (insn_operand_matches (ic, 1, mem))
291
            float_extend_from_mem[mode][srcmode] = true;
292
        }
293
    }
294
}
295
 
296
/* This is run at the start of compiling a function.  */
297
 
298
void
299
init_expr (void)
300
{
301
  memset (&crtl->expr, 0, sizeof (crtl->expr));
302
}
303
 
304
/* Copy data from FROM to TO, where the machine modes are not the same.
305
   Both modes may be integer, or both may be floating, or both may be
306
   fixed-point.
307
   UNSIGNEDP should be nonzero if FROM is an unsigned type.
308
   This causes zero-extension instead of sign-extension.  */
309
 
310
void
311
convert_move (rtx to, rtx from, int unsignedp)
312
{
313
  enum machine_mode to_mode = GET_MODE (to);
314
  enum machine_mode from_mode = GET_MODE (from);
315
  int to_real = SCALAR_FLOAT_MODE_P (to_mode);
316
  int from_real = SCALAR_FLOAT_MODE_P (from_mode);
317
  enum insn_code code;
318
  rtx libcall;
319
 
320
  /* rtx code for making an equivalent value.  */
321
  enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
322
                              : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
323
 
324
 
325
  gcc_assert (to_real == from_real);
326
  gcc_assert (to_mode != BLKmode);
327
  gcc_assert (from_mode != BLKmode);
328
 
329
  /* If the source and destination are already the same, then there's
330
     nothing to do.  */
331
  if (to == from)
332
    return;
333
 
334
  /* If FROM is a SUBREG that indicates that we have already done at least
335
     the required extension, strip it.  We don't handle such SUBREGs as
336
     TO here.  */
337
 
338
  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
339
      && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
340
          >= GET_MODE_PRECISION (to_mode))
341
      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
342
    from = gen_lowpart (to_mode, from), from_mode = to_mode;
343
 
344
  gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
345
 
346
  if (to_mode == from_mode
347
      || (from_mode == VOIDmode && CONSTANT_P (from)))
348
    {
349
      emit_move_insn (to, from);
350
      return;
351
    }
352
 
353
  if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
354
    {
355
      gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
356
 
357
      if (VECTOR_MODE_P (to_mode))
358
        from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
359
      else
360
        to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
361
 
362
      emit_move_insn (to, from);
363
      return;
364
    }
365
 
366
  if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
367
    {
368
      convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
369
      convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
370
      return;
371
    }
372
 
373
  if (to_real)
374
    {
375
      rtx value, insns;
376
      convert_optab tab;
377
 
378
      gcc_assert ((GET_MODE_PRECISION (from_mode)
379
                   != GET_MODE_PRECISION (to_mode))
380
                  || (DECIMAL_FLOAT_MODE_P (from_mode)
381
                      != DECIMAL_FLOAT_MODE_P (to_mode)));
382
 
383
      if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
384
        /* Conversion between decimal float and binary float, same size.  */
385
        tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
386
      else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
387
        tab = sext_optab;
388
      else
389
        tab = trunc_optab;
390
 
391
      /* Try converting directly if the insn is supported.  */
392
 
393
      code = convert_optab_handler (tab, to_mode, from_mode);
394
      if (code != CODE_FOR_nothing)
395
        {
396
          emit_unop_insn (code, to, from,
397
                          tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
398
          return;
399
        }
400
 
401
      /* Otherwise use a libcall.  */
402
      libcall = convert_optab_libfunc (tab, to_mode, from_mode);
403
 
404
      /* Is this conversion implemented yet?  */
405
      gcc_assert (libcall);
406
 
407
      start_sequence ();
408
      value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
409
                                       1, from, from_mode);
410
      insns = get_insns ();
411
      end_sequence ();
412
      emit_libcall_block (insns, to, value,
413
                          tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
414
                                                                       from)
415
                          : gen_rtx_FLOAT_EXTEND (to_mode, from));
416
      return;
417
    }
418
 
419
  /* Handle pointer conversion.  */                     /* SPEE 900220.  */
420
  /* Targets are expected to provide conversion insns between PxImode and
421
     xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
422
  if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
423
    {
424
      enum machine_mode full_mode
425
        = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
426
 
427
      gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
428
                  != CODE_FOR_nothing);
429
 
430
      if (full_mode != from_mode)
431
        from = convert_to_mode (full_mode, from, unsignedp);
432
      emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
433
                      to, from, UNKNOWN);
434
      return;
435
    }
436
  if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
437
    {
438
      rtx new_from;
439
      enum machine_mode full_mode
440
        = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
441
 
442
      gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)
443
                  != CODE_FOR_nothing);
444
 
445
      if (to_mode == full_mode)
446
        {
447
          emit_unop_insn (convert_optab_handler (sext_optab, full_mode,
448
                                                 from_mode),
449
                          to, from, UNKNOWN);
450
          return;
451
        }
452
 
453
      new_from = gen_reg_rtx (full_mode);
454
      emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode),
455
                      new_from, from, UNKNOWN);
456
 
457
      /* else proceed to integer conversions below.  */
458
      from_mode = full_mode;
459
      from = new_from;
460
    }
461
 
462
   /* Make sure both are fixed-point modes or both are not.  */
463
   gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
464
               ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
465
   if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
466
    {
467
      /* If we widen from_mode to to_mode and they are in the same class,
468
         we won't saturate the result.
469
         Otherwise, always saturate the result to play safe.  */
470
      if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
471
          && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
472
        expand_fixed_convert (to, from, 0, 0);
473
      else
474
        expand_fixed_convert (to, from, 0, 1);
475
      return;
476
    }
477
 
478
  /* Now both modes are integers.  */
479
 
480
  /* Handle expanding beyond a word.  */
481
  if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
482
      && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
483
    {
484
      rtx insns;
485
      rtx lowpart;
486
      rtx fill_value;
487
      rtx lowfrom;
488
      int i;
489
      enum machine_mode lowpart_mode;
490
      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
491
 
492
      /* Try converting directly if the insn is supported.  */
493
      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
494
          != CODE_FOR_nothing)
495
        {
496
          /* If FROM is a SUBREG, put it into a register.  Do this
497
             so that we always generate the same set of insns for
498
             better cse'ing; if an intermediate assignment occurred,
499
             we won't be doing the operation directly on the SUBREG.  */
500
          if (optimize > 0 && GET_CODE (from) == SUBREG)
501
            from = force_reg (from_mode, from);
502
          emit_unop_insn (code, to, from, equiv_code);
503
          return;
504
        }
505
      /* Next, try converting via full word.  */
506
      else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
507
               && ((code = can_extend_p (to_mode, word_mode, unsignedp))
508
                   != CODE_FOR_nothing))
509
        {
510
          rtx word_to = gen_reg_rtx (word_mode);
511
          if (REG_P (to))
512
            {
513
              if (reg_overlap_mentioned_p (to, from))
514
                from = force_reg (from_mode, from);
515
              emit_clobber (to);
516
            }
517
          convert_move (word_to, from, unsignedp);
518
          emit_unop_insn (code, to, word_to, equiv_code);
519
          return;
520
        }
521
 
522
      /* No special multiword conversion insn; do it by hand.  */
523
      start_sequence ();
524
 
525
      /* Since we will turn this into a no conflict block, we must ensure
526
         that the source does not overlap the target.  */
527
 
528
      if (reg_overlap_mentioned_p (to, from))
529
        from = force_reg (from_mode, from);
530
 
531
      /* Get a copy of FROM widened to a word, if necessary.  */
532
      if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
533
        lowpart_mode = word_mode;
534
      else
535
        lowpart_mode = from_mode;
536
 
537
      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
538
 
539
      lowpart = gen_lowpart (lowpart_mode, to);
540
      emit_move_insn (lowpart, lowfrom);
541
 
542
      /* Compute the value to put in each remaining word.  */
543
      if (unsignedp)
544
        fill_value = const0_rtx;
545
      else
546
        fill_value = emit_store_flag (gen_reg_rtx (word_mode),
547
                                      LT, lowfrom, const0_rtx,
548
                                      VOIDmode, 0, -1);
549
 
550
      /* Fill the remaining words.  */
551
      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552
        {
553
          int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
554
          rtx subword = operand_subword (to, index, 1, to_mode);
555
 
556
          gcc_assert (subword);
557
 
558
          if (fill_value != subword)
559
            emit_move_insn (subword, fill_value);
560
        }
561
 
562
      insns = get_insns ();
563
      end_sequence ();
564
 
565
      emit_insn (insns);
566
      return;
567
    }
568
 
569
  /* Truncating multi-word to a word or less.  */
570
  if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
571
      && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
572
    {
573
      if (!((MEM_P (from)
574
             && ! MEM_VOLATILE_P (from)
575
             && direct_load[(int) to_mode]
576
             && ! mode_dependent_address_p (XEXP (from, 0)))
577
            || REG_P (from)
578
            || GET_CODE (from) == SUBREG))
579
        from = force_reg (from_mode, from);
580
      convert_move (to, gen_lowpart (word_mode, from), 0);
581
      return;
582
    }
583
 
584
  /* Now follow all the conversions between integers
585
     no more than a word long.  */
586
 
587
  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
588
  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589
      && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
590
    {
591
      if (!((MEM_P (from)
592
             && ! MEM_VOLATILE_P (from)
593
             && direct_load[(int) to_mode]
594
             && ! mode_dependent_address_p (XEXP (from, 0)))
595
            || REG_P (from)
596
            || GET_CODE (from) == SUBREG))
597
        from = force_reg (from_mode, from);
598
      if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
599
          && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
600
        from = copy_to_reg (from);
601
      emit_move_insn (to, gen_lowpart (to_mode, from));
602
      return;
603
    }
604
 
605
  /* Handle extension.  */
606
  if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
607
    {
608
      /* Convert directly if that works.  */
609
      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
610
          != CODE_FOR_nothing)
611
        {
612
          emit_unop_insn (code, to, from, equiv_code);
613
          return;
614
        }
615
      else
616
        {
617
          enum machine_mode intermediate;
618
          rtx tmp;
619
          int shift_amount;
620
 
621
          /* Search for a mode to convert via.  */
622
          for (intermediate = from_mode; intermediate != VOIDmode;
623
               intermediate = GET_MODE_WIDER_MODE (intermediate))
624
            if (((can_extend_p (to_mode, intermediate, unsignedp)
625
                  != CODE_FOR_nothing)
626
                 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
627
                     && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
628
                && (can_extend_p (intermediate, from_mode, unsignedp)
629
                    != CODE_FOR_nothing))
630
              {
631
                convert_move (to, convert_to_mode (intermediate, from,
632
                                                   unsignedp), unsignedp);
633
                return;
634
              }
635
 
636
          /* No suitable intermediate mode.
637
             Generate what we need with shifts.  */
638
          shift_amount = (GET_MODE_PRECISION (to_mode)
639
                          - GET_MODE_PRECISION (from_mode));
640
          from = gen_lowpart (to_mode, force_reg (from_mode, from));
641
          tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
642
                              to, unsignedp);
643
          tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
644
                              to, unsignedp);
645
          if (tmp != to)
646
            emit_move_insn (to, tmp);
647
          return;
648
        }
649
    }
650
 
651
  /* Support special truncate insns for certain modes.  */
652
  if (convert_optab_handler (trunc_optab, to_mode,
653
                             from_mode) != CODE_FOR_nothing)
654
    {
655
      emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
656
                      to, from, UNKNOWN);
657
      return;
658
    }
659
 
660
  /* Handle truncation of volatile memrefs, and so on;
661
     the things that couldn't be truncated directly,
662
     and for which there was no special instruction.
663
 
664
     ??? Code above formerly short-circuited this, for most integer
665
     mode pairs, with a force_reg in from_mode followed by a recursive
666
     call to this routine.  Appears always to have been wrong.  */
667
  if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
668
    {
669
      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
670
      emit_move_insn (to, temp);
671
      return;
672
    }
673
 
674
  /* Mode combination is not recognized.  */
675
  gcc_unreachable ();
676
}
677
 
678
/* Return an rtx for a value that would result
679
   from converting X to mode MODE.
680
   Both X and MODE may be floating, or both integer.
681
   UNSIGNEDP is nonzero if X is an unsigned value.
682
   This can be done by referring to a part of X in place
683
   or by copying to a new temporary with conversion.  */
684
 
685
rtx
686
convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
687
{
688
  return convert_modes (mode, VOIDmode, x, unsignedp);
689
}
690
 
691
/* Return an rtx for a value that would result
692
   from converting X from mode OLDMODE to mode MODE.
693
   Both modes may be floating, or both integer.
694
   UNSIGNEDP is nonzero if X is an unsigned value.
695
 
696
   This can be done by referring to a part of X in place
697
   or by copying to a new temporary with conversion.
698
 
699
   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.  */
700
 
701
rtx
702
convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
703
{
704
  rtx temp;
705
 
706
  /* If FROM is a SUBREG that indicates that we have already done at least
707
     the required extension, strip it.  */
708
 
709
  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
710
      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
711
      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
712
    x = gen_lowpart (mode, x);
713
 
714
  if (GET_MODE (x) != VOIDmode)
715
    oldmode = GET_MODE (x);
716
 
717
  if (mode == oldmode)
718
    return x;
719
 
720
  /* There is one case that we must handle specially: If we are converting
721
     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
722
     we are to interpret the constant as unsigned, gen_lowpart will do
723
     the wrong if the constant appears negative.  What we want to do is
724
     make the high-order word of the constant zero, not all ones.  */
725
 
726
  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
727
      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
728
      && CONST_INT_P (x) && INTVAL (x) < 0)
729
    {
730
      double_int val = uhwi_to_double_int (INTVAL (x));
731
 
732
      /* We need to zero extend VAL.  */
733
      if (oldmode != VOIDmode)
734
        val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
735
 
736
      return immed_double_int_const (val, mode);
737
    }
738
 
739
  /* We can do this with a gen_lowpart if both desired and current modes
740
     are integer, and this is either a constant integer, a register, or a
741
     non-volatile MEM.  Except for the constant case where MODE is no
742
     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
743
 
744
  if ((CONST_INT_P (x)
745
       && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
746
      || (GET_MODE_CLASS (mode) == MODE_INT
747
          && GET_MODE_CLASS (oldmode) == MODE_INT
748
          && (GET_CODE (x) == CONST_DOUBLE
749
              || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
750
                  && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
751
                       && direct_load[(int) mode])
752
                      || (REG_P (x)
753
                          && (! HARD_REGISTER_P (x)
754
                              || HARD_REGNO_MODE_OK (REGNO (x), mode))
755
                          && TRULY_NOOP_TRUNCATION_MODES_P (mode,
756
                                                            GET_MODE (x))))))))
757
    {
758
      /* ?? If we don't know OLDMODE, we have to assume here that
759
         X does not need sign- or zero-extension.   This may not be
760
         the case, but it's the best we can do.  */
761
      if (CONST_INT_P (x) && oldmode != VOIDmode
762
          && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
763
        {
764
          HOST_WIDE_INT val = INTVAL (x);
765
 
766
          /* We must sign or zero-extend in this case.  Start by
767
             zero-extending, then sign extend if we need to.  */
768
          val &= GET_MODE_MASK (oldmode);
769
          if (! unsignedp
770
              && val_signbit_known_set_p (oldmode, val))
771
            val |= ~GET_MODE_MASK (oldmode);
772
 
773
          return gen_int_mode (val, mode);
774
        }
775
 
776
      return gen_lowpart (mode, x);
777
    }
778
 
779
  /* Converting from integer constant into mode is always equivalent to an
780
     subreg operation.  */
781
  if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
782
    {
783
      gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
784
      return simplify_gen_subreg (mode, x, oldmode, 0);
785
    }
786
 
787
  temp = gen_reg_rtx (mode);
788
  convert_move (temp, x, unsignedp);
789
  return temp;
790
}
791
 
792
/* Return the largest alignment we can use for doing a move (or store)
793
   of MAX_PIECES.  ALIGN is the largest alignment we could use.  */
794
 
795
static unsigned int
796
alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
797
{
798
  enum machine_mode tmode;
799
 
800
  tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
801
  if (align >= GET_MODE_ALIGNMENT (tmode))
802
    align = GET_MODE_ALIGNMENT (tmode);
803
  else
804
    {
805
      enum machine_mode tmode, xmode;
806
 
807
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
808
           tmode != VOIDmode;
809
           xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
810
        if (GET_MODE_SIZE (tmode) > max_pieces
811
            || SLOW_UNALIGNED_ACCESS (tmode, align))
812
          break;
813
 
814
      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
815
    }
816
 
817
  return align;
818
}
819
 
820
/* Return the widest integer mode no wider than SIZE.  If no such mode
821
   can be found, return VOIDmode.  */
822
 
823
static enum machine_mode
824
widest_int_mode_for_size (unsigned int size)
825
{
826
  enum machine_mode tmode, mode = VOIDmode;
827
 
828
  for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
829
       tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
830
    if (GET_MODE_SIZE (tmode) < size)
831
      mode = tmode;
832
 
833
  return mode;
834
}
835
 
836
/* STORE_MAX_PIECES is the number of bytes at a time that we can
837
   store efficiently.  Due to internal GCC limitations, this is
838
   MOVE_MAX_PIECES limited by the number of bytes GCC can represent
839
   for an immediate constant.  */
840
 
841
#define STORE_MAX_PIECES  MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
842
 
843
/* Determine whether the LEN bytes can be moved by using several move
844
   instructions.  Return nonzero if a call to move_by_pieces should
845
   succeed.  */
846
 
847
int
848
can_move_by_pieces (unsigned HOST_WIDE_INT len,
849
                    unsigned int align ATTRIBUTE_UNUSED)
850
{
851
  return MOVE_BY_PIECES_P (len, align);
852
}
853
 
854
/* Generate several move instructions to copy LEN bytes from block FROM to
855
   block TO.  (These are MEM rtx's with BLKmode).
856
 
857
   If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
858
   used to push FROM to the stack.
859
 
860
   ALIGN is maximum stack alignment we can assume.
861
 
862
   If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
863
   mempcpy, and if ENDP is 2 return memory the end minus one byte ala
864
   stpcpy.  */
865
 
866
rtx
867
move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
868
                unsigned int align, int endp)
869
{
870
  struct move_by_pieces_d data;
871
  enum machine_mode to_addr_mode, from_addr_mode
872
    = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
873
  rtx to_addr, from_addr = XEXP (from, 0);
874
  unsigned int max_size = MOVE_MAX_PIECES + 1;
875
  enum insn_code icode;
876
 
877
  align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
878
 
879
  data.offset = 0;
880
  data.from_addr = from_addr;
881
  if (to)
882
    {
883
      to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
884
      to_addr = XEXP (to, 0);
885
      data.to = to;
886
      data.autinc_to
887
        = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
888
           || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
889
      data.reverse
890
        = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
891
    }
892
  else
893
    {
894
      to_addr_mode = VOIDmode;
895
      to_addr = NULL_RTX;
896
      data.to = NULL_RTX;
897
      data.autinc_to = 1;
898
#ifdef STACK_GROWS_DOWNWARD
899
      data.reverse = 1;
900
#else
901
      data.reverse = 0;
902
#endif
903
    }
904
  data.to_addr = to_addr;
905
  data.from = from;
906
  data.autinc_from
907
    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
908
       || GET_CODE (from_addr) == POST_INC
909
       || GET_CODE (from_addr) == POST_DEC);
910
 
911
  data.explicit_inc_from = 0;
912
  data.explicit_inc_to = 0;
913
  if (data.reverse) data.offset = len;
914
  data.len = len;
915
 
916
  /* If copying requires more than two move insns,
917
     copy addresses to registers (to make displacements shorter)
918
     and use post-increment if available.  */
919
  if (!(data.autinc_from && data.autinc_to)
920
      && move_by_pieces_ninsns (len, align, max_size) > 2)
921
    {
922
      /* Find the mode of the largest move...
923
         MODE might not be used depending on the definitions of the
924
         USE_* macros below.  */
925
      enum machine_mode mode ATTRIBUTE_UNUSED
926
        = widest_int_mode_for_size (max_size);
927
 
928
      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
929
        {
930
          data.from_addr = copy_to_mode_reg (from_addr_mode,
931
                                             plus_constant (from_addr, len));
932
          data.autinc_from = 1;
933
          data.explicit_inc_from = -1;
934
        }
935
      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
936
        {
937
          data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
938
          data.autinc_from = 1;
939
          data.explicit_inc_from = 1;
940
        }
941
      if (!data.autinc_from && CONSTANT_P (from_addr))
942
        data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
943
      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
944
        {
945
          data.to_addr = copy_to_mode_reg (to_addr_mode,
946
                                           plus_constant (to_addr, len));
947
          data.autinc_to = 1;
948
          data.explicit_inc_to = -1;
949
        }
950
      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
951
        {
952
          data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
953
          data.autinc_to = 1;
954
          data.explicit_inc_to = 1;
955
        }
956
      if (!data.autinc_to && CONSTANT_P (to_addr))
957
        data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
958
    }
959
 
960
  align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
961
 
962
  /* First move what we can in the largest integer mode, then go to
963
     successively smaller modes.  */
964
 
965
  while (max_size > 1)
966
    {
967
      enum machine_mode mode = widest_int_mode_for_size (max_size);
968
 
969
      if (mode == VOIDmode)
970
        break;
971
 
972
      icode = optab_handler (mov_optab, mode);
973
      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
974
        move_by_pieces_1 (GEN_FCN (icode), mode, &data);
975
 
976
      max_size = GET_MODE_SIZE (mode);
977
    }
978
 
979
  /* The code above should have handled everything.  */
980
  gcc_assert (!data.len);
981
 
982
  if (endp)
983
    {
984
      rtx to1;
985
 
986
      gcc_assert (!data.reverse);
987
      if (data.autinc_to)
988
        {
989
          if (endp == 2)
990
            {
991
              if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
992
                emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
993
              else
994
                data.to_addr = copy_to_mode_reg (to_addr_mode,
995
                                                 plus_constant (data.to_addr,
996
                                                                -1));
997
            }
998
          to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
999
                                           data.offset);
1000
        }
1001
      else
1002
        {
1003
          if (endp == 2)
1004
            --data.offset;
1005
          to1 = adjust_address (data.to, QImode, data.offset);
1006
        }
1007
      return to1;
1008
    }
1009
  else
1010
    return data.to;
1011
}
1012
 
1013
/* Return number of insns required to move L bytes by pieces.
1014
   ALIGN (in bits) is maximum alignment we can assume.  */
1015
 
1016
unsigned HOST_WIDE_INT
1017
move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1018
                       unsigned int max_size)
1019
{
1020
  unsigned HOST_WIDE_INT n_insns = 0;
1021
 
1022
  align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1023
 
1024
  while (max_size > 1)
1025
    {
1026
      enum machine_mode mode;
1027
      enum insn_code icode;
1028
 
1029
      mode = widest_int_mode_for_size (max_size);
1030
 
1031
      if (mode == VOIDmode)
1032
        break;
1033
 
1034
      icode = optab_handler (mov_optab, mode);
1035
      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1036
        n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1037
 
1038
      max_size = GET_MODE_SIZE (mode);
1039
    }
1040
 
1041
  gcc_assert (!l);
1042
  return n_insns;
1043
}
1044
 
1045
/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1046
   with move instructions for mode MODE.  GENFUN is the gen_... function
1047
   to make a move insn for that mode.  DATA has all the other info.  */
1048
 
1049
static void
1050
move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1051
                  struct move_by_pieces_d *data)
1052
{
1053
  unsigned int size = GET_MODE_SIZE (mode);
1054
  rtx to1 = NULL_RTX, from1;
1055
 
1056
  while (data->len >= size)
1057
    {
1058
      if (data->reverse)
1059
        data->offset -= size;
1060
 
1061
      if (data->to)
1062
        {
1063
          if (data->autinc_to)
1064
            to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1065
                                             data->offset);
1066
          else
1067
            to1 = adjust_address (data->to, mode, data->offset);
1068
        }
1069
 
1070
      if (data->autinc_from)
1071
        from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1072
                                           data->offset);
1073
      else
1074
        from1 = adjust_address (data->from, mode, data->offset);
1075
 
1076
      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1077
        emit_insn (gen_add2_insn (data->to_addr,
1078
                                  GEN_INT (-(HOST_WIDE_INT)size)));
1079
      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1080
        emit_insn (gen_add2_insn (data->from_addr,
1081
                                  GEN_INT (-(HOST_WIDE_INT)size)));
1082
 
1083
      if (data->to)
1084
        emit_insn ((*genfun) (to1, from1));
1085
      else
1086
        {
1087
#ifdef PUSH_ROUNDING
1088
          emit_single_push_insn (mode, from1, NULL);
1089
#else
1090
          gcc_unreachable ();
1091
#endif
1092
        }
1093
 
1094
      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1095
        emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1096
      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1097
        emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1098
 
1099
      if (! data->reverse)
1100
        data->offset += size;
1101
 
1102
      data->len -= size;
1103
    }
1104
}
1105
 
1106
/* Emit code to move a block Y to a block X.  This may be done with
1107
   string-move instructions, with multiple scalar move instructions,
1108
   or with a library call.
1109
 
1110
   Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1111
   SIZE is an rtx that says how long they are.
1112
   ALIGN is the maximum alignment we can assume they have.
1113
   METHOD describes what kind of copy this is, and what mechanisms may be used.
1114
 
1115
   Return the address of the new block, if memcpy is called and returns it,
1116
 
1117
 
1118
rtx
1119
emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1120
                       unsigned int expected_align, HOST_WIDE_INT expected_size)
1121
{
1122
  bool may_use_call;
1123
  rtx retval = 0;
1124
  unsigned int align;
1125
 
1126
  gcc_assert (size);
1127
  if (CONST_INT_P (size)
1128
      && INTVAL (size) == 0)
1129
    return 0;
1130
 
1131
  switch (method)
1132
    {
1133
    case BLOCK_OP_NORMAL:
1134
    case BLOCK_OP_TAILCALL:
1135
      may_use_call = true;
1136
      break;
1137
 
1138
    case BLOCK_OP_CALL_PARM:
1139
      may_use_call = block_move_libcall_safe_for_call_parm ();
1140
 
1141
      /* Make inhibit_defer_pop nonzero around the library call
1142
         to force it to pop the arguments right away.  */
1143
      NO_DEFER_POP;
1144
      break;
1145
 
1146
    case BLOCK_OP_NO_LIBCALL:
1147
      may_use_call = false;
1148
      break;
1149
 
1150
    default:
1151
      gcc_unreachable ();
1152
    }
1153
 
1154
  gcc_assert (MEM_P (x) && MEM_P (y));
1155
  align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1156
  gcc_assert (align >= BITS_PER_UNIT);
1157
 
1158
  /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1159
     block copy is more efficient for other large modes, e.g. DCmode.  */
1160
  x = adjust_address (x, BLKmode, 0);
1161
  y = adjust_address (y, BLKmode, 0);
1162
 
1163
  /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1164
     can be incorrect is coming from __builtin_memcpy.  */
1165
  if (CONST_INT_P (size))
1166
    {
1167
      x = shallow_copy_rtx (x);
1168
      y = shallow_copy_rtx (y);
1169
      set_mem_size (x, INTVAL (size));
1170
      set_mem_size (y, INTVAL (size));
1171
    }
1172
 
1173
  if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1174
    move_by_pieces (x, y, INTVAL (size), align, 0);
1175
  else if (emit_block_move_via_movmem (x, y, size, align,
1176
                                       expected_align, expected_size))
1177
    ;
1178
  else if (may_use_call
1179
           && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1180
           && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1181
    {
1182
      /* Since x and y are passed to a libcall, mark the corresponding
1183
         tree EXPR as addressable.  */
1184
      tree y_expr = MEM_EXPR (y);
1185
      tree x_expr = MEM_EXPR (x);
1186
      if (y_expr)
1187
        mark_addressable (y_expr);
1188
      if (x_expr)
1189
        mark_addressable (x_expr);
1190
      retval = emit_block_move_via_libcall (x, y, size,
1191
                                            method == BLOCK_OP_TAILCALL);
1192
    }
1193
 
1194
  else
1195
    emit_block_move_via_loop (x, y, size, align);
1196
 
1197
  if (method == BLOCK_OP_CALL_PARM)
1198
    OK_DEFER_POP;
1199
 
1200
  return retval;
1201
}
1202
 
1203
rtx
1204
emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1205
{
1206
  return emit_block_move_hints (x, y, size, method, 0, -1);
1207
}
1208
 
1209
/* A subroutine of emit_block_move.  Returns true if calling the
1210
   block move libcall will not clobber any parameters which may have
1211
   already been placed on the stack.  */
1212
 
1213
static bool
1214
block_move_libcall_safe_for_call_parm (void)
1215
{
1216
#if defined (REG_PARM_STACK_SPACE)
1217
  tree fn;
1218
#endif
1219
 
1220
  /* If arguments are pushed on the stack, then they're safe.  */
1221
  if (PUSH_ARGS)
1222
    return true;
1223
 
1224
  /* If registers go on the stack anyway, any argument is sure to clobber
1225
     an outgoing argument.  */
1226
#if defined (REG_PARM_STACK_SPACE)
1227
  fn = emit_block_move_libcall_fn (false);
1228
  /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1229
     depend on its argument.  */
1230
  (void) fn;
1231
  if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1232
      && REG_PARM_STACK_SPACE (fn) != 0)
1233
    return false;
1234
#endif
1235
 
1236
  /* If any argument goes in memory, then it might clobber an outgoing
1237
     argument.  */
1238
  {
1239
    CUMULATIVE_ARGS args_so_far_v;
1240
    cumulative_args_t args_so_far;
1241
    tree fn, arg;
1242
 
1243
    fn = emit_block_move_libcall_fn (false);
1244
    INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1245
    args_so_far = pack_cumulative_args (&args_so_far_v);
1246
 
1247
    arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1248
    for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1249
      {
1250
        enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1251
        rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1252
                                              NULL_TREE, true);
1253
        if (!tmp || !REG_P (tmp))
1254
          return false;
1255
        if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1256
          return false;
1257
        targetm.calls.function_arg_advance (args_so_far, mode,
1258
                                            NULL_TREE, true);
1259
      }
1260
  }
1261
  return true;
1262
}
1263
 
1264
/* A subroutine of emit_block_move.  Expand a movmem pattern;
1265
   return true if successful.  */
1266
 
1267
static bool
1268
emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1269
                            unsigned int expected_align, HOST_WIDE_INT expected_size)
1270
{
1271
  int save_volatile_ok = volatile_ok;
1272
  enum machine_mode mode;
1273
 
1274
  if (expected_align < align)
1275
    expected_align = align;
1276
 
1277
  /* Since this is a move insn, we don't care about volatility.  */
1278
  volatile_ok = 1;
1279
 
1280
  /* Try the most limited insn first, because there's no point
1281
     including more than one in the machine description unless
1282
     the more limited one has some advantage.  */
1283
 
1284
  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1285
       mode = GET_MODE_WIDER_MODE (mode))
1286
    {
1287
      enum insn_code code = direct_optab_handler (movmem_optab, mode);
1288
 
1289
      if (code != CODE_FOR_nothing
1290
          /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1291
             here because if SIZE is less than the mode mask, as it is
1292
             returned by the macro, it will definitely be less than the
1293
             actual mode mask.  */
1294
          && ((CONST_INT_P (size)
1295
               && ((unsigned HOST_WIDE_INT) INTVAL (size)
1296
                   <= (GET_MODE_MASK (mode) >> 1)))
1297
              || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
1298
        {
1299
          struct expand_operand ops[6];
1300
          unsigned int nops;
1301
 
1302
          /* ??? When called via emit_block_move_for_call, it'd be
1303
             nice if there were some way to inform the backend, so
1304
             that it doesn't fail the expansion because it thinks
1305
             emitting the libcall would be more efficient.  */
1306
          nops = insn_data[(int) code].n_generator_args;
1307
          gcc_assert (nops == 4 || nops == 6);
1308
 
1309
          create_fixed_operand (&ops[0], x);
1310
          create_fixed_operand (&ops[1], y);
1311
          /* The check above guarantees that this size conversion is valid.  */
1312
          create_convert_operand_to (&ops[2], size, mode, true);
1313
          create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1314
          if (nops == 6)
1315
            {
1316
              create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1317
              create_integer_operand (&ops[5], expected_size);
1318
            }
1319
          if (maybe_expand_insn (code, nops, ops))
1320
            {
1321
              volatile_ok = save_volatile_ok;
1322
              return true;
1323
            }
1324
        }
1325
    }
1326
 
1327
  volatile_ok = save_volatile_ok;
1328
  return false;
1329
}
1330
 
1331
/* A subroutine of emit_block_move.  Expand a call to memcpy.
1332
   Return the return value from memcpy, 0 otherwise.  */
1333
 
1334
rtx
1335
emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1336
{
1337
  rtx dst_addr, src_addr;
1338
  tree call_expr, fn, src_tree, dst_tree, size_tree;
1339
  enum machine_mode size_mode;
1340
  rtx retval;
1341
 
1342
  /* Emit code to copy the addresses of DST and SRC and SIZE into new
1343
     pseudos.  We can then place those new pseudos into a VAR_DECL and
1344
     use them later.  */
1345
 
1346
  dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1347
  src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1348
 
1349
  dst_addr = convert_memory_address (ptr_mode, dst_addr);
1350
  src_addr = convert_memory_address (ptr_mode, src_addr);
1351
 
1352
  dst_tree = make_tree (ptr_type_node, dst_addr);
1353
  src_tree = make_tree (ptr_type_node, src_addr);
1354
 
1355
  size_mode = TYPE_MODE (sizetype);
1356
 
1357
  size = convert_to_mode (size_mode, size, 1);
1358
  size = copy_to_mode_reg (size_mode, size);
1359
 
1360
  /* It is incorrect to use the libcall calling conventions to call
1361
     memcpy in this context.  This could be a user call to memcpy and
1362
     the user may wish to examine the return value from memcpy.  For
1363
     targets where libcalls and normal calls have different conventions
1364
     for returning pointers, we could end up generating incorrect code.  */
1365
 
1366
  size_tree = make_tree (sizetype, size);
1367
 
1368
  fn = emit_block_move_libcall_fn (true);
1369
  call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1370
  CALL_EXPR_TAILCALL (call_expr) = tailcall;
1371
 
1372
  retval = expand_normal (call_expr);
1373
 
1374
  return retval;
1375
}
1376
 
1377
/* A subroutine of emit_block_move_via_libcall.  Create the tree node
1378
   for the function we use for block copies.  The first time FOR_CALL
1379
   is true, we call assemble_external.  */
1380
 
1381
static GTY(()) tree block_move_fn;
1382
 
1383
void
1384
init_block_move_fn (const char *asmspec)
1385
{
1386
  if (!block_move_fn)
1387
    {
1388
      tree args, fn;
1389
 
1390
      fn = get_identifier ("memcpy");
1391
      args = build_function_type_list (ptr_type_node, ptr_type_node,
1392
                                       const_ptr_type_node, sizetype,
1393
                                       NULL_TREE);
1394
 
1395
      fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1396
      DECL_EXTERNAL (fn) = 1;
1397
      TREE_PUBLIC (fn) = 1;
1398
      DECL_ARTIFICIAL (fn) = 1;
1399
      TREE_NOTHROW (fn) = 1;
1400
      DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1401
      DECL_VISIBILITY_SPECIFIED (fn) = 1;
1402
 
1403
      block_move_fn = fn;
1404
    }
1405
 
1406
  if (asmspec)
1407
    set_user_assembler_name (block_move_fn, asmspec);
1408
}
1409
 
1410
static tree
1411
emit_block_move_libcall_fn (int for_call)
1412
{
1413
  static bool emitted_extern;
1414
 
1415
  if (!block_move_fn)
1416
    init_block_move_fn (NULL);
1417
 
1418
  if (for_call && !emitted_extern)
1419
    {
1420
      emitted_extern = true;
1421
      make_decl_rtl (block_move_fn);
1422
      assemble_external (block_move_fn);
1423
    }
1424
 
1425
  return block_move_fn;
1426
}
1427
 
1428
/* A subroutine of emit_block_move.  Copy the data via an explicit
1429
   loop.  This is used only when libcalls are forbidden.  */
1430
/* ??? It'd be nice to copy in hunks larger than QImode.  */
1431
 
1432
static void
1433
emit_block_move_via_loop (rtx x, rtx y, rtx size,
1434
                          unsigned int align ATTRIBUTE_UNUSED)
1435
{
1436
  rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1437
  enum machine_mode x_addr_mode
1438
    = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1439
  enum machine_mode y_addr_mode
1440
    = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1441
  enum machine_mode iter_mode;
1442
 
1443
  iter_mode = GET_MODE (size);
1444
  if (iter_mode == VOIDmode)
1445
    iter_mode = word_mode;
1446
 
1447
  top_label = gen_label_rtx ();
1448
  cmp_label = gen_label_rtx ();
1449
  iter = gen_reg_rtx (iter_mode);
1450
 
1451
  emit_move_insn (iter, const0_rtx);
1452
 
1453
  x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1454
  y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1455
  do_pending_stack_adjust ();
1456
 
1457
  emit_jump (cmp_label);
1458
  emit_label (top_label);
1459
 
1460
  tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1461
  x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1462
 
1463
  if (x_addr_mode != y_addr_mode)
1464
    tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1465
  y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1466
 
1467
  x = change_address (x, QImode, x_addr);
1468
  y = change_address (y, QImode, y_addr);
1469
 
1470
  emit_move_insn (x, y);
1471
 
1472
  tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1473
                             true, OPTAB_LIB_WIDEN);
1474
  if (tmp != iter)
1475
    emit_move_insn (iter, tmp);
1476
 
1477
  emit_label (cmp_label);
1478
 
1479
  emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1480
                           true, top_label);
1481
}
1482
 
1483
/* Copy all or part of a value X into registers starting at REGNO.
1484
   The number of registers to be filled is NREGS.  */
1485
 
1486
void
1487
move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1488
{
1489
  int i;
1490
#ifdef HAVE_load_multiple
1491
  rtx pat;
1492
  rtx last;
1493
#endif
1494
 
1495
  if (nregs == 0)
1496
    return;
1497
 
1498
  if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1499
    x = validize_mem (force_const_mem (mode, x));
1500
 
1501
  /* See if the machine can do this with a load multiple insn.  */
1502
#ifdef HAVE_load_multiple
1503
  if (HAVE_load_multiple)
1504
    {
1505
      last = get_last_insn ();
1506
      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1507
                               GEN_INT (nregs));
1508
      if (pat)
1509
        {
1510
          emit_insn (pat);
1511
          return;
1512
        }
1513
      else
1514
        delete_insns_since (last);
1515
    }
1516
#endif
1517
 
1518
  for (i = 0; i < nregs; i++)
1519
    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1520
                    operand_subword_force (x, i, mode));
1521
}
1522
 
1523
/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1524
   The number of registers to be filled is NREGS.  */
1525
 
1526
void
1527
move_block_from_reg (int regno, rtx x, int nregs)
1528
{
1529
  int i;
1530
 
1531
  if (nregs == 0)
1532
    return;
1533
 
1534
  /* See if the machine can do this with a store multiple insn.  */
1535
#ifdef HAVE_store_multiple
1536
  if (HAVE_store_multiple)
1537
    {
1538
      rtx last = get_last_insn ();
1539
      rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1540
                                    GEN_INT (nregs));
1541
      if (pat)
1542
        {
1543
          emit_insn (pat);
1544
          return;
1545
        }
1546
      else
1547
        delete_insns_since (last);
1548
    }
1549
#endif
1550
 
1551
  for (i = 0; i < nregs; i++)
1552
    {
1553
      rtx tem = operand_subword (x, i, 1, BLKmode);
1554
 
1555
      gcc_assert (tem);
1556
 
1557
      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1558
    }
1559
}
1560
 
1561
/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1562
   ORIG, where ORIG is a non-consecutive group of registers represented by
1563
   a PARALLEL.  The clone is identical to the original except in that the
1564
   original set of registers is replaced by a new set of pseudo registers.
1565
   The new set has the same modes as the original set.  */
1566
 
1567
rtx
1568
gen_group_rtx (rtx orig)
1569
{
1570
  int i, length;
1571
  rtx *tmps;
1572
 
1573
  gcc_assert (GET_CODE (orig) == PARALLEL);
1574
 
1575
  length = XVECLEN (orig, 0);
1576
  tmps = XALLOCAVEC (rtx, length);
1577
 
1578
  /* Skip a NULL entry in first slot.  */
1579
  i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1580
 
1581
  if (i)
1582
    tmps[0] = 0;
1583
 
1584
  for (; i < length; i++)
1585
    {
1586
      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1587
      rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1588
 
1589
      tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1590
    }
1591
 
1592
  return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1593
}
1594
 
1595
/* A subroutine of emit_group_load.  Arguments as for emit_group_load,
1596
   except that values are placed in TMPS[i], and must later be moved
1597
   into corresponding XEXP (XVECEXP (DST, 0, i), 0) element.  */
1598
 
1599
static void
1600
emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1601
{
1602
  rtx src;
1603
  int start, i;
1604
  enum machine_mode m = GET_MODE (orig_src);
1605
 
1606
  gcc_assert (GET_CODE (dst) == PARALLEL);
1607
 
1608
  if (m != VOIDmode
1609
      && !SCALAR_INT_MODE_P (m)
1610
      && !MEM_P (orig_src)
1611
      && GET_CODE (orig_src) != CONCAT)
1612
    {
1613
      enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1614
      if (imode == BLKmode)
1615
        src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1616
      else
1617
        src = gen_reg_rtx (imode);
1618
      if (imode != BLKmode)
1619
        src = gen_lowpart (GET_MODE (orig_src), src);
1620
      emit_move_insn (src, orig_src);
1621
      /* ...and back again.  */
1622
      if (imode != BLKmode)
1623
        src = gen_lowpart (imode, src);
1624
      emit_group_load_1 (tmps, dst, src, type, ssize);
1625
      return;
1626
    }
1627
 
1628
  /* Check for a NULL entry, used to indicate that the parameter goes
1629
     both on the stack and in registers.  */
1630
  if (XEXP (XVECEXP (dst, 0, 0), 0))
1631
    start = 0;
1632
  else
1633
    start = 1;
1634
 
1635
  /* Process the pieces.  */
1636
  for (i = start; i < XVECLEN (dst, 0); i++)
1637
    {
1638
      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1639
      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1640
      unsigned int bytelen = GET_MODE_SIZE (mode);
1641
      int shift = 0;
1642
 
1643
      /* Handle trailing fragments that run over the size of the struct.  */
1644
      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1645
        {
1646
          /* Arrange to shift the fragment to where it belongs.
1647
             extract_bit_field loads to the lsb of the reg.  */
1648
          if (
1649
#ifdef BLOCK_REG_PADDING
1650
              BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1651
              == (BYTES_BIG_ENDIAN ? upward : downward)
1652
#else
1653
              BYTES_BIG_ENDIAN
1654
#endif
1655
              )
1656
            shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1657
          bytelen = ssize - bytepos;
1658
          gcc_assert (bytelen > 0);
1659
        }
1660
 
1661
      /* If we won't be loading directly from memory, protect the real source
1662
         from strange tricks we might play; but make sure that the source can
1663
         be loaded directly into the destination.  */
1664
      src = orig_src;
1665
      if (!MEM_P (orig_src)
1666
          && (!CONSTANT_P (orig_src)
1667
              || (GET_MODE (orig_src) != mode
1668
                  && GET_MODE (orig_src) != VOIDmode)))
1669
        {
1670
          if (GET_MODE (orig_src) == VOIDmode)
1671
            src = gen_reg_rtx (mode);
1672
          else
1673
            src = gen_reg_rtx (GET_MODE (orig_src));
1674
 
1675
          emit_move_insn (src, orig_src);
1676
        }
1677
 
1678
      /* Optimize the access just a bit.  */
1679
      if (MEM_P (src)
1680
          && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1681
              || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1682
          && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1683
          && bytelen == GET_MODE_SIZE (mode))
1684
        {
1685
          tmps[i] = gen_reg_rtx (mode);
1686
          emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1687
        }
1688
      else if (COMPLEX_MODE_P (mode)
1689
               && GET_MODE (src) == mode
1690
               && bytelen == GET_MODE_SIZE (mode))
1691
        /* Let emit_move_complex do the bulk of the work.  */
1692
        tmps[i] = src;
1693
      else if (GET_CODE (src) == CONCAT)
1694
        {
1695
          unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1696
          unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1697
 
1698
          if ((bytepos == 0 && bytelen == slen0)
1699
              || (bytepos != 0 && bytepos + bytelen <= slen))
1700
            {
1701
              /* The following assumes that the concatenated objects all
1702
                 have the same size.  In this case, a simple calculation
1703
                 can be used to determine the object and the bit field
1704
                 to be extracted.  */
1705
              tmps[i] = XEXP (src, bytepos / slen0);
1706
              if (! CONSTANT_P (tmps[i])
1707
                  && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1708
                tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1709
                                             (bytepos % slen0) * BITS_PER_UNIT,
1710
                                             1, false, NULL_RTX, mode, mode);
1711
            }
1712
          else
1713
            {
1714
              rtx mem;
1715
 
1716
              gcc_assert (!bytepos);
1717
              mem = assign_stack_temp (GET_MODE (src), slen, 0);
1718
              emit_move_insn (mem, src);
1719
              tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1720
                                           0, 1, false, NULL_RTX, mode, mode);
1721
            }
1722
        }
1723
      /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1724
         SIMD register, which is currently broken.  While we get GCC
1725
         to emit proper RTL for these cases, let's dump to memory.  */
1726
      else if (VECTOR_MODE_P (GET_MODE (dst))
1727
               && REG_P (src))
1728
        {
1729
          int slen = GET_MODE_SIZE (GET_MODE (src));
1730
          rtx mem;
1731
 
1732
          mem = assign_stack_temp (GET_MODE (src), slen, 0);
1733
          emit_move_insn (mem, src);
1734
          tmps[i] = adjust_address (mem, mode, (int) bytepos);
1735
        }
1736
      else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1737
               && XVECLEN (dst, 0) > 1)
1738
        tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1739
      else if (CONSTANT_P (src))
1740
        {
1741
          HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1742
 
1743
          if (len == ssize)
1744
            tmps[i] = src;
1745
          else
1746
            {
1747
              rtx first, second;
1748
 
1749
              gcc_assert (2 * len == ssize);
1750
              split_double (src, &first, &second);
1751
              if (i)
1752
                tmps[i] = second;
1753
              else
1754
                tmps[i] = first;
1755
            }
1756
        }
1757
      else if (REG_P (src) && GET_MODE (src) == mode)
1758
        tmps[i] = src;
1759
      else
1760
        tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1761
                                     bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
1762
                                     mode, mode);
1763
 
1764
      if (shift)
1765
        tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1766
                                shift, tmps[i], 0);
1767
    }
1768
}
1769
 
1770
/* Emit code to move a block SRC of type TYPE to a block DST,
1771
   where DST is non-consecutive registers represented by a PARALLEL.
1772
   SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1773
   if not known.  */
1774
 
1775
void
1776
emit_group_load (rtx dst, rtx src, tree type, int ssize)
1777
{
1778
  rtx *tmps;
1779
  int i;
1780
 
1781
  tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1782
  emit_group_load_1 (tmps, dst, src, type, ssize);
1783
 
1784
  /* Copy the extracted pieces into the proper (probable) hard regs.  */
1785
  for (i = 0; i < XVECLEN (dst, 0); i++)
1786
    {
1787
      rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1788
      if (d == NULL)
1789
        continue;
1790
      emit_move_insn (d, tmps[i]);
1791
    }
1792
}
1793
 
1794
/* Similar, but load SRC into new pseudos in a format that looks like
1795
   PARALLEL.  This can later be fed to emit_group_move to get things
1796
   in the right place.  */
1797
 
1798
rtx
1799
emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1800
{
1801
  rtvec vec;
1802
  int i;
1803
 
1804
  vec = rtvec_alloc (XVECLEN (parallel, 0));
1805
  emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1806
 
1807
  /* Convert the vector to look just like the original PARALLEL, except
1808
     with the computed values.  */
1809
  for (i = 0; i < XVECLEN (parallel, 0); i++)
1810
    {
1811
      rtx e = XVECEXP (parallel, 0, i);
1812
      rtx d = XEXP (e, 0);
1813
 
1814
      if (d)
1815
        {
1816
          d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1817
          e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1818
        }
1819
      RTVEC_ELT (vec, i) = e;
1820
    }
1821
 
1822
  return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1823
}
1824
 
1825
/* Emit code to move a block SRC to block DST, where SRC and DST are
1826
   non-consecutive groups of registers, each represented by a PARALLEL.  */
1827
 
1828
void
1829
emit_group_move (rtx dst, rtx src)
1830
{
1831
  int i;
1832
 
1833
  gcc_assert (GET_CODE (src) == PARALLEL
1834
              && GET_CODE (dst) == PARALLEL
1835
              && XVECLEN (src, 0) == XVECLEN (dst, 0));
1836
 
1837
  /* Skip first entry if NULL.  */
1838
  for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1839
    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1840
                    XEXP (XVECEXP (src, 0, i), 0));
1841
}
1842
 
1843
/* Move a group of registers represented by a PARALLEL into pseudos.  */
1844
 
1845
rtx
1846
emit_group_move_into_temps (rtx src)
1847
{
1848
  rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1849
  int i;
1850
 
1851
  for (i = 0; i < XVECLEN (src, 0); i++)
1852
    {
1853
      rtx e = XVECEXP (src, 0, i);
1854
      rtx d = XEXP (e, 0);
1855
 
1856
      if (d)
1857
        e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1858
      RTVEC_ELT (vec, i) = e;
1859
    }
1860
 
1861
  return gen_rtx_PARALLEL (GET_MODE (src), vec);
1862
}
1863
 
1864
/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1865
   where SRC is non-consecutive registers represented by a PARALLEL.
1866
   SSIZE represents the total size of block ORIG_DST, or -1 if not
1867
   known.  */
1868
 
1869
void
1870
emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1871
{
1872
  rtx *tmps, dst;
1873
  int start, finish, i;
1874
  enum machine_mode m = GET_MODE (orig_dst);
1875
 
1876
  gcc_assert (GET_CODE (src) == PARALLEL);
1877
 
1878
  if (!SCALAR_INT_MODE_P (m)
1879
      && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1880
    {
1881
      enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1882
      if (imode == BLKmode)
1883
        dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1884
      else
1885
        dst = gen_reg_rtx (imode);
1886
      emit_group_store (dst, src, type, ssize);
1887
      if (imode != BLKmode)
1888
        dst = gen_lowpart (GET_MODE (orig_dst), dst);
1889
      emit_move_insn (orig_dst, dst);
1890
      return;
1891
    }
1892
 
1893
  /* Check for a NULL entry, used to indicate that the parameter goes
1894
     both on the stack and in registers.  */
1895
  if (XEXP (XVECEXP (src, 0, 0), 0))
1896
    start = 0;
1897
  else
1898
    start = 1;
1899
  finish = XVECLEN (src, 0);
1900
 
1901
  tmps = XALLOCAVEC (rtx, finish);
1902
 
1903
  /* Copy the (probable) hard regs into pseudos.  */
1904
  for (i = start; i < finish; i++)
1905
    {
1906
      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1907
      if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1908
        {
1909
          tmps[i] = gen_reg_rtx (GET_MODE (reg));
1910
          emit_move_insn (tmps[i], reg);
1911
        }
1912
      else
1913
        tmps[i] = reg;
1914
    }
1915
 
1916
  /* If we won't be storing directly into memory, protect the real destination
1917
     from strange tricks we might play.  */
1918
  dst = orig_dst;
1919
  if (GET_CODE (dst) == PARALLEL)
1920
    {
1921
      rtx temp;
1922
 
1923
      /* We can get a PARALLEL dst if there is a conditional expression in
1924
         a return statement.  In that case, the dst and src are the same,
1925
         so no action is necessary.  */
1926
      if (rtx_equal_p (dst, src))
1927
        return;
1928
 
1929
      /* It is unclear if we can ever reach here, but we may as well handle
1930
         it.  Allocate a temporary, and split this into a store/load to/from
1931
         the temporary.  */
1932
 
1933
      temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1934
      emit_group_store (temp, src, type, ssize);
1935
      emit_group_load (dst, temp, type, ssize);
1936
      return;
1937
    }
1938
  else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1939
    {
1940
      enum machine_mode outer = GET_MODE (dst);
1941
      enum machine_mode inner;
1942
      HOST_WIDE_INT bytepos;
1943
      bool done = false;
1944
      rtx temp;
1945
 
1946
      if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1947
        dst = gen_reg_rtx (outer);
1948
 
1949
      /* Make life a bit easier for combine.  */
1950
      /* If the first element of the vector is the low part
1951
         of the destination mode, use a paradoxical subreg to
1952
         initialize the destination.  */
1953
      if (start < finish)
1954
        {
1955
          inner = GET_MODE (tmps[start]);
1956
          bytepos = subreg_lowpart_offset (inner, outer);
1957
          if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1958
            {
1959
              temp = simplify_gen_subreg (outer, tmps[start],
1960
                                          inner, 0);
1961
              if (temp)
1962
                {
1963
                  emit_move_insn (dst, temp);
1964
                  done = true;
1965
                  start++;
1966
                }
1967
            }
1968
        }
1969
 
1970
      /* If the first element wasn't the low part, try the last.  */
1971
      if (!done
1972
          && start < finish - 1)
1973
        {
1974
          inner = GET_MODE (tmps[finish - 1]);
1975
          bytepos = subreg_lowpart_offset (inner, outer);
1976
          if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1977
            {
1978
              temp = simplify_gen_subreg (outer, tmps[finish - 1],
1979
                                          inner, 0);
1980
              if (temp)
1981
                {
1982
                  emit_move_insn (dst, temp);
1983
                  done = true;
1984
                  finish--;
1985
                }
1986
            }
1987
        }
1988
 
1989
      /* Otherwise, simply initialize the result to zero.  */
1990
      if (!done)
1991
        emit_move_insn (dst, CONST0_RTX (outer));
1992
    }
1993
 
1994
  /* Process the pieces.  */
1995
  for (i = start; i < finish; i++)
1996
    {
1997
      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1998
      enum machine_mode mode = GET_MODE (tmps[i]);
1999
      unsigned int bytelen = GET_MODE_SIZE (mode);
2000
      unsigned int adj_bytelen = bytelen;
2001
      rtx dest = dst;
2002
 
2003
      /* Handle trailing fragments that run over the size of the struct.  */
2004
      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2005
        adj_bytelen = ssize - bytepos;
2006
 
2007
      if (GET_CODE (dst) == CONCAT)
2008
        {
2009
          if (bytepos + adj_bytelen
2010
              <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2011
            dest = XEXP (dst, 0);
2012
          else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2013
            {
2014
              bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2015
              dest = XEXP (dst, 1);
2016
            }
2017
          else
2018
            {
2019
              enum machine_mode dest_mode = GET_MODE (dest);
2020
              enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2021
 
2022
              gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2023
 
2024
              if (GET_MODE_ALIGNMENT (dest_mode)
2025
                  >= GET_MODE_ALIGNMENT (tmp_mode))
2026
                {
2027
                  dest = assign_stack_temp (dest_mode,
2028
                                            GET_MODE_SIZE (dest_mode),
2029
                                            0);
2030
                  emit_move_insn (adjust_address (dest,
2031
                                                  tmp_mode,
2032
                                                  bytepos),
2033
                                  tmps[i]);
2034
                  dst = dest;
2035
                }
2036
              else
2037
                {
2038
                  dest = assign_stack_temp (tmp_mode,
2039
                                            GET_MODE_SIZE (tmp_mode),
2040
                                            0);
2041
                  emit_move_insn (dest, tmps[i]);
2042
                  dst = adjust_address (dest, dest_mode, bytepos);
2043
                }
2044
              break;
2045
            }
2046
        }
2047
 
2048
      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2049
        {
2050
          /* store_bit_field always takes its value from the lsb.
2051
             Move the fragment to the lsb if it's not already there.  */
2052
          if (
2053
#ifdef BLOCK_REG_PADDING
2054
              BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2055
              == (BYTES_BIG_ENDIAN ? upward : downward)
2056
#else
2057
              BYTES_BIG_ENDIAN
2058
#endif
2059
              )
2060
            {
2061
              int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2062
              tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2063
                                      shift, tmps[i], 0);
2064
            }
2065
          bytelen = adj_bytelen;
2066
        }
2067
 
2068
      /* Optimize the access just a bit.  */
2069
      if (MEM_P (dest)
2070
          && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2071
              || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2072
          && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2073
          && bytelen == GET_MODE_SIZE (mode))
2074
        emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2075
      else
2076
        store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2077
                         0, 0, mode, tmps[i]);
2078
    }
2079
 
2080
  /* Copy from the pseudo into the (probable) hard reg.  */
2081
  if (orig_dst != dst)
2082
    emit_move_insn (orig_dst, dst);
2083
}
2084
 
2085
/* Generate code to copy a BLKmode object of TYPE out of a
2086
   set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2087
   is null, a stack temporary is created.  TGTBLK is returned.
2088
 
2089
   The purpose of this routine is to handle functions that return
2090
   BLKmode structures in registers.  Some machines (the PA for example)
2091
   want to return all small structures in registers regardless of the
2092
   structure's alignment.  */
2093
 
2094
rtx
2095
copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2096
{
2097
  unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2098
  rtx src = NULL, dst = NULL;
2099
  unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2100
  unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2101
  enum machine_mode copy_mode;
2102
 
2103
  if (tgtblk == 0)
2104
    {
2105
      tgtblk = assign_temp (build_qualified_type (type,
2106
                                                  (TYPE_QUALS (type)
2107
                                                   | TYPE_QUAL_CONST)),
2108
                            0, 1, 1);
2109
      preserve_temp_slots (tgtblk);
2110
    }
2111
 
2112
  /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2113
     into a new pseudo which is a full word.  */
2114
 
2115
  if (GET_MODE (srcreg) != BLKmode
2116
      && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2117
    srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2118
 
2119
  /* If the structure doesn't take up a whole number of words, see whether
2120
     SRCREG is padded on the left or on the right.  If it's on the left,
2121
     set PADDING_CORRECTION to the number of bits to skip.
2122
 
2123
     In most ABIs, the structure will be returned at the least end of
2124
     the register, which translates to right padding on little-endian
2125
     targets and left padding on big-endian targets.  The opposite
2126
     holds if the structure is returned at the most significant
2127
     end of the register.  */
2128
  if (bytes % UNITS_PER_WORD != 0
2129
      && (targetm.calls.return_in_msb (type)
2130
          ? !BYTES_BIG_ENDIAN
2131
          : BYTES_BIG_ENDIAN))
2132
    padding_correction
2133
      = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2134
 
2135
  /* Copy the structure BITSIZE bits at a time.  If the target lives in
2136
     memory, take care of not reading/writing past its end by selecting
2137
     a copy mode suited to BITSIZE.  This should always be possible given
2138
     how it is computed.
2139
 
2140
     We could probably emit more efficient code for machines which do not use
2141
     strict alignment, but it doesn't seem worth the effort at the current
2142
     time.  */
2143
 
2144
  copy_mode = word_mode;
2145
  if (MEM_P (tgtblk))
2146
    {
2147
      enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2148
      if (mem_mode != BLKmode)
2149
        copy_mode = mem_mode;
2150
    }
2151
 
2152
  for (bitpos = 0, xbitpos = padding_correction;
2153
       bitpos < bytes * BITS_PER_UNIT;
2154
       bitpos += bitsize, xbitpos += bitsize)
2155
    {
2156
      /* We need a new source operand each time xbitpos is on a
2157
         word boundary and when xbitpos == padding_correction
2158
         (the first time through).  */
2159
      if (xbitpos % BITS_PER_WORD == 0
2160
          || xbitpos == padding_correction)
2161
        src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2162
                                     GET_MODE (srcreg));
2163
 
2164
      /* We need a new destination operand each time bitpos is on
2165
         a word boundary.  */
2166
      if (bitpos % BITS_PER_WORD == 0)
2167
        dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2168
 
2169
      /* Use xbitpos for the source extraction (right justified) and
2170
         bitpos for the destination store (left justified).  */
2171
      store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2172
                       extract_bit_field (src, bitsize,
2173
                                          xbitpos % BITS_PER_WORD, 1, false,
2174
                                          NULL_RTX, copy_mode, copy_mode));
2175
    }
2176
 
2177
  return tgtblk;
2178
}
2179
 
2180
/* Copy BLKmode value SRC into a register of mode MODE.  Return the
2181
   register if it contains any data, otherwise return null.
2182
 
2183
   This is used on targets that return BLKmode values in registers.  */
2184
 
2185
rtx
2186
copy_blkmode_to_reg (enum machine_mode mode, tree src)
2187
{
2188
  int i, n_regs;
2189
  unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2190
  unsigned int bitsize;
2191
  rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2192
  enum machine_mode dst_mode;
2193
 
2194
  gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2195
 
2196
  x = expand_normal (src);
2197
 
2198
  bytes = int_size_in_bytes (TREE_TYPE (src));
2199
  if (bytes == 0)
2200
    return NULL_RTX;
2201
 
2202
  /* If the structure doesn't take up a whole number of words, see
2203
     whether the register value should be padded on the left or on
2204
     the right.  Set PADDING_CORRECTION to the number of padding
2205
     bits needed on the left side.
2206
 
2207
     In most ABIs, the structure will be returned at the least end of
2208
     the register, which translates to right padding on little-endian
2209
     targets and left padding on big-endian targets.  The opposite
2210
     holds if the structure is returned at the most significant
2211
     end of the register.  */
2212
  if (bytes % UNITS_PER_WORD != 0
2213
      && (targetm.calls.return_in_msb (TREE_TYPE (src))
2214
          ? !BYTES_BIG_ENDIAN
2215
          : BYTES_BIG_ENDIAN))
2216
    padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2217
                                           * BITS_PER_UNIT));
2218
 
2219
  n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2220
  dst_words = XALLOCAVEC (rtx, n_regs);
2221
  bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2222
 
2223
  /* Copy the structure BITSIZE bits at a time.  */
2224
  for (bitpos = 0, xbitpos = padding_correction;
2225
       bitpos < bytes * BITS_PER_UNIT;
2226
       bitpos += bitsize, xbitpos += bitsize)
2227
    {
2228
      /* We need a new destination pseudo each time xbitpos is
2229
         on a word boundary and when xbitpos == padding_correction
2230
         (the first time through).  */
2231
      if (xbitpos % BITS_PER_WORD == 0
2232
          || xbitpos == padding_correction)
2233
        {
2234
          /* Generate an appropriate register.  */
2235
          dst_word = gen_reg_rtx (word_mode);
2236
          dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2237
 
2238
          /* Clear the destination before we move anything into it.  */
2239
          emit_move_insn (dst_word, CONST0_RTX (word_mode));
2240
        }
2241
 
2242
      /* We need a new source operand each time bitpos is on a word
2243
         boundary.  */
2244
      if (bitpos % BITS_PER_WORD == 0)
2245
        src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2246
 
2247
      /* Use bitpos for the source extraction (left justified) and
2248
         xbitpos for the destination store (right justified).  */
2249
      store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2250
                       0, 0, word_mode,
2251
                       extract_bit_field (src_word, bitsize,
2252
                                          bitpos % BITS_PER_WORD, 1, false,
2253
                                          NULL_RTX, word_mode, word_mode));
2254
    }
2255
 
2256
  if (mode == BLKmode)
2257
    {
2258
      /* Find the smallest integer mode large enough to hold the
2259
         entire structure.  */
2260
      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2261
           mode != VOIDmode;
2262
           mode = GET_MODE_WIDER_MODE (mode))
2263
        /* Have we found a large enough mode?  */
2264
        if (GET_MODE_SIZE (mode) >= bytes)
2265
          break;
2266
 
2267
      /* A suitable mode should have been found.  */
2268
      gcc_assert (mode != VOIDmode);
2269
    }
2270
 
2271
  if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2272
    dst_mode = word_mode;
2273
  else
2274
    dst_mode = mode;
2275
  dst = gen_reg_rtx (dst_mode);
2276
 
2277
  for (i = 0; i < n_regs; i++)
2278
    emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2279
 
2280
  if (mode != dst_mode)
2281
    dst = gen_lowpart (mode, dst);
2282
 
2283
  return dst;
2284
}
2285
 
2286
/* Add a USE expression for REG to the (possibly empty) list pointed
2287
   to by CALL_FUSAGE.  REG must denote a hard register.  */
2288
 
2289
void
2290
use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2291
{
2292
  gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2293
 
2294
  *call_fusage
2295
    = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2296
}
2297
 
2298
/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2299
   starting at REGNO.  All of these registers must be hard registers.  */
2300
 
2301
void
2302
use_regs (rtx *call_fusage, int regno, int nregs)
2303
{
2304
  int i;
2305
 
2306
  gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2307
 
2308
  for (i = 0; i < nregs; i++)
2309
    use_reg (call_fusage, regno_reg_rtx[regno + i]);
2310
}
2311
 
2312
/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2313
   PARALLEL REGS.  This is for calls that pass values in multiple
2314
   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2315
 
2316
void
2317
use_group_regs (rtx *call_fusage, rtx regs)
2318
{
2319
  int i;
2320
 
2321
  for (i = 0; i < XVECLEN (regs, 0); i++)
2322
    {
2323
      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2324
 
2325
      /* A NULL entry means the parameter goes both on the stack and in
2326
         registers.  This can also be a MEM for targets that pass values
2327
         partially on the stack and partially in registers.  */
2328
      if (reg != 0 && REG_P (reg))
2329
        use_reg (call_fusage, reg);
2330
    }
2331
}
2332
 
2333
/* Return the defining gimple statement for SSA_NAME NAME if it is an
2334
   assigment and the code of the expresion on the RHS is CODE.  Return
2335
   NULL otherwise.  */
2336
 
2337
static gimple
2338
get_def_for_expr (tree name, enum tree_code code)
2339
{
2340
  gimple def_stmt;
2341
 
2342
  if (TREE_CODE (name) != SSA_NAME)
2343
    return NULL;
2344
 
2345
  def_stmt = get_gimple_for_ssa_name (name);
2346
  if (!def_stmt
2347
      || gimple_assign_rhs_code (def_stmt) != code)
2348
    return NULL;
2349
 
2350
  return def_stmt;
2351
}
2352
 
2353
 
2354
/* Determine whether the LEN bytes generated by CONSTFUN can be
2355
   stored to memory using several move instructions.  CONSTFUNDATA is
2356
   a pointer which will be passed as argument in every CONSTFUN call.
2357
   ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
2358
   a memset operation and false if it's a copy of a constant string.
2359
   Return nonzero if a call to store_by_pieces should succeed.  */
2360
 
2361
int
2362
can_store_by_pieces (unsigned HOST_WIDE_INT len,
2363
                     rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2364
                     void *constfundata, unsigned int align, bool memsetp)
2365
{
2366
  unsigned HOST_WIDE_INT l;
2367
  unsigned int max_size;
2368
  HOST_WIDE_INT offset = 0;
2369
  enum machine_mode mode;
2370
  enum insn_code icode;
2371
  int reverse;
2372
  /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it.  */
2373
  rtx cst ATTRIBUTE_UNUSED;
2374
 
2375
  if (len == 0)
2376
    return 1;
2377
 
2378
  if (! (memsetp
2379
         ? SET_BY_PIECES_P (len, align)
2380
         : STORE_BY_PIECES_P (len, align)))
2381
    return 0;
2382
 
2383
  align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2384
 
2385
  /* We would first store what we can in the largest integer mode, then go to
2386
     successively smaller modes.  */
2387
 
2388
  for (reverse = 0;
2389
       reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2390
       reverse++)
2391
    {
2392
      l = len;
2393
      max_size = STORE_MAX_PIECES + 1;
2394
      while (max_size > 1)
2395
        {
2396
          mode = widest_int_mode_for_size (max_size);
2397
 
2398
          if (mode == VOIDmode)
2399
            break;
2400
 
2401
          icode = optab_handler (mov_optab, mode);
2402
          if (icode != CODE_FOR_nothing
2403
              && align >= GET_MODE_ALIGNMENT (mode))
2404
            {
2405
              unsigned int size = GET_MODE_SIZE (mode);
2406
 
2407
              while (l >= size)
2408
                {
2409
                  if (reverse)
2410
                    offset -= size;
2411
 
2412
                  cst = (*constfun) (constfundata, offset, mode);
2413
                  if (!targetm.legitimate_constant_p (mode, cst))
2414
                    return 0;
2415
 
2416
                  if (!reverse)
2417
                    offset += size;
2418
 
2419
                  l -= size;
2420
                }
2421
            }
2422
 
2423
          max_size = GET_MODE_SIZE (mode);
2424
        }
2425
 
2426
      /* The code above should have handled everything.  */
2427
      gcc_assert (!l);
2428
    }
2429
 
2430
  return 1;
2431
}
2432
 
2433
/* Generate several move instructions to store LEN bytes generated by
2434
   CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2435
   pointer which will be passed as argument in every CONSTFUN call.
2436
   ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
2437
   a memset operation and false if it's a copy of a constant string.
2438
   If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2439
   mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2440
   stpcpy.  */
2441
 
2442
rtx
2443
store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2444
                 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2445
                 void *constfundata, unsigned int align, bool memsetp, int endp)
2446
{
2447
  enum machine_mode to_addr_mode
2448
    = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2449
  struct store_by_pieces_d data;
2450
 
2451
  if (len == 0)
2452
    {
2453
      gcc_assert (endp != 2);
2454
      return to;
2455
    }
2456
 
2457
  gcc_assert (memsetp
2458
              ? SET_BY_PIECES_P (len, align)
2459
              : STORE_BY_PIECES_P (len, align));
2460
  data.constfun = constfun;
2461
  data.constfundata = constfundata;
2462
  data.len = len;
2463
  data.to = to;
2464
  store_by_pieces_1 (&data, align);
2465
  if (endp)
2466
    {
2467
      rtx to1;
2468
 
2469
      gcc_assert (!data.reverse);
2470
      if (data.autinc_to)
2471
        {
2472
          if (endp == 2)
2473
            {
2474
              if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2475
                emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2476
              else
2477
                data.to_addr = copy_to_mode_reg (to_addr_mode,
2478
                                                 plus_constant (data.to_addr,
2479
                                                                -1));
2480
            }
2481
          to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2482
                                           data.offset);
2483
        }
2484
      else
2485
        {
2486
          if (endp == 2)
2487
            --data.offset;
2488
          to1 = adjust_address (data.to, QImode, data.offset);
2489
        }
2490
      return to1;
2491
    }
2492
  else
2493
    return data.to;
2494
}
2495
 
2496
/* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2497
   rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2498
 
2499
static void
2500
clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2501
{
2502
  struct store_by_pieces_d data;
2503
 
2504
  if (len == 0)
2505
    return;
2506
 
2507
  data.constfun = clear_by_pieces_1;
2508
  data.constfundata = NULL;
2509
  data.len = len;
2510
  data.to = to;
2511
  store_by_pieces_1 (&data, align);
2512
}
2513
 
2514
/* Callback routine for clear_by_pieces.
2515
   Return const0_rtx unconditionally.  */
2516
 
2517
static rtx
2518
clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2519
                   HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2520
                   enum machine_mode mode ATTRIBUTE_UNUSED)
2521
{
2522
  return const0_rtx;
2523
}
2524
 
2525
/* Subroutine of clear_by_pieces and store_by_pieces.
2526
   Generate several move instructions to store LEN bytes of block TO.  (A MEM
2527
   rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2528
 
2529
static void
2530
store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2531
                   unsigned int align ATTRIBUTE_UNUSED)
2532
{
2533
  enum machine_mode to_addr_mode
2534
    = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2535
  rtx to_addr = XEXP (data->to, 0);
2536
  unsigned int max_size = STORE_MAX_PIECES + 1;
2537
  enum insn_code icode;
2538
 
2539
  data->offset = 0;
2540
  data->to_addr = to_addr;
2541
  data->autinc_to
2542
    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2543
       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2544
 
2545
  data->explicit_inc_to = 0;
2546
  data->reverse
2547
    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2548
  if (data->reverse)
2549
    data->offset = data->len;
2550
 
2551
  /* If storing requires more than two move insns,
2552
     copy addresses to registers (to make displacements shorter)
2553
     and use post-increment if available.  */
2554
  if (!data->autinc_to
2555
      && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2556
    {
2557
      /* Determine the main mode we'll be using.
2558
         MODE might not be used depending on the definitions of the
2559
         USE_* macros below.  */
2560
      enum machine_mode mode ATTRIBUTE_UNUSED
2561
        = widest_int_mode_for_size (max_size);
2562
 
2563
      if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2564
        {
2565
          data->to_addr = copy_to_mode_reg (to_addr_mode,
2566
                                            plus_constant (to_addr, data->len));
2567
          data->autinc_to = 1;
2568
          data->explicit_inc_to = -1;
2569
        }
2570
 
2571
      if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2572
          && ! data->autinc_to)
2573
        {
2574
          data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2575
          data->autinc_to = 1;
2576
          data->explicit_inc_to = 1;
2577
        }
2578
 
2579
      if ( !data->autinc_to && CONSTANT_P (to_addr))
2580
        data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2581
    }
2582
 
2583
  align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2584
 
2585
  /* First store what we can in the largest integer mode, then go to
2586
     successively smaller modes.  */
2587
 
2588
  while (max_size > 1)
2589
    {
2590
      enum machine_mode mode = widest_int_mode_for_size (max_size);
2591
 
2592
      if (mode == VOIDmode)
2593
        break;
2594
 
2595
      icode = optab_handler (mov_optab, mode);
2596
      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2597
        store_by_pieces_2 (GEN_FCN (icode), mode, data);
2598
 
2599
      max_size = GET_MODE_SIZE (mode);
2600
    }
2601
 
2602
  /* The code above should have handled everything.  */
2603
  gcc_assert (!data->len);
2604
}
2605
 
2606
/* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2607
   with move instructions for mode MODE.  GENFUN is the gen_... function
2608
   to make a move insn for that mode.  DATA has all the other info.  */
2609
 
2610
static void
2611
store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2612
                   struct store_by_pieces_d *data)
2613
{
2614
  unsigned int size = GET_MODE_SIZE (mode);
2615
  rtx to1, cst;
2616
 
2617
  while (data->len >= size)
2618
    {
2619
      if (data->reverse)
2620
        data->offset -= size;
2621
 
2622
      if (data->autinc_to)
2623
        to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2624
                                         data->offset);
2625
      else
2626
        to1 = adjust_address (data->to, mode, data->offset);
2627
 
2628
      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2629
        emit_insn (gen_add2_insn (data->to_addr,
2630
                                  GEN_INT (-(HOST_WIDE_INT) size)));
2631
 
2632
      cst = (*data->constfun) (data->constfundata, data->offset, mode);
2633
      emit_insn ((*genfun) (to1, cst));
2634
 
2635
      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2636
        emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2637
 
2638
      if (! data->reverse)
2639
        data->offset += size;
2640
 
2641
      data->len -= size;
2642
    }
2643
}
2644
 
2645
/* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2646
   its length in bytes.  */
2647
 
2648
rtx
2649
clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2650
                     unsigned int expected_align, HOST_WIDE_INT expected_size)
2651
{
2652
  enum machine_mode mode = GET_MODE (object);
2653
  unsigned int align;
2654
 
2655
  gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2656
 
2657
  /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2658
     just move a zero.  Otherwise, do this a piece at a time.  */
2659
  if (mode != BLKmode
2660
      && CONST_INT_P (size)
2661
      && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2662
    {
2663
      rtx zero = CONST0_RTX (mode);
2664
      if (zero != NULL)
2665
        {
2666
          emit_move_insn (object, zero);
2667
          return NULL;
2668
        }
2669
 
2670
      if (COMPLEX_MODE_P (mode))
2671
        {
2672
          zero = CONST0_RTX (GET_MODE_INNER (mode));
2673
          if (zero != NULL)
2674
            {
2675
              write_complex_part (object, zero, 0);
2676
              write_complex_part (object, zero, 1);
2677
              return NULL;
2678
            }
2679
        }
2680
    }
2681
 
2682
  if (size == const0_rtx)
2683
    return NULL;
2684
 
2685
  align = MEM_ALIGN (object);
2686
 
2687
  if (CONST_INT_P (size)
2688
      && CLEAR_BY_PIECES_P (INTVAL (size), align))
2689
    clear_by_pieces (object, INTVAL (size), align);
2690
  else if (set_storage_via_setmem (object, size, const0_rtx, align,
2691
                                   expected_align, expected_size))
2692
    ;
2693
  else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2694
    return set_storage_via_libcall (object, size, const0_rtx,
2695
                                    method == BLOCK_OP_TAILCALL);
2696
  else
2697
    gcc_unreachable ();
2698
 
2699
  return NULL;
2700
}
2701
 
2702
rtx
2703
clear_storage (rtx object, rtx size, enum block_op_methods method)
2704
{
2705
  return clear_storage_hints (object, size, method, 0, -1);
2706
}
2707
 
2708
 
2709
/* A subroutine of clear_storage.  Expand a call to memset.
2710
   Return the return value of memset, 0 otherwise.  */
2711
 
2712
rtx
2713
set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2714
{
2715
  tree call_expr, fn, object_tree, size_tree, val_tree;
2716
  enum machine_mode size_mode;
2717
  rtx retval;
2718
 
2719
  /* Emit code to copy OBJECT and SIZE into new pseudos.  We can then
2720
     place those into new pseudos into a VAR_DECL and use them later.  */
2721
 
2722
  object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2723
 
2724
  size_mode = TYPE_MODE (sizetype);
2725
  size = convert_to_mode (size_mode, size, 1);
2726
  size = copy_to_mode_reg (size_mode, size);
2727
 
2728
  /* It is incorrect to use the libcall calling conventions to call
2729
     memset in this context.  This could be a user call to memset and
2730
     the user may wish to examine the return value from memset.  For
2731
     targets where libcalls and normal calls have different conventions
2732
     for returning pointers, we could end up generating incorrect code.  */
2733
 
2734
  object_tree = make_tree (ptr_type_node, object);
2735
  if (!CONST_INT_P (val))
2736
    val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2737
  size_tree = make_tree (sizetype, size);
2738
  val_tree = make_tree (integer_type_node, val);
2739
 
2740
  fn = clear_storage_libcall_fn (true);
2741
  call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2742
  CALL_EXPR_TAILCALL (call_expr) = tailcall;
2743
 
2744
  retval = expand_normal (call_expr);
2745
 
2746
  return retval;
2747
}
2748
 
2749
/* A subroutine of set_storage_via_libcall.  Create the tree node
2750
   for the function we use for block clears.  The first time FOR_CALL
2751
   is true, we call assemble_external.  */
2752
 
2753
tree block_clear_fn;
2754
 
2755
void
2756
init_block_clear_fn (const char *asmspec)
2757
{
2758
  if (!block_clear_fn)
2759
    {
2760
      tree fn, args;
2761
 
2762
      fn = get_identifier ("memset");
2763
      args = build_function_type_list (ptr_type_node, ptr_type_node,
2764
                                       integer_type_node, sizetype,
2765
                                       NULL_TREE);
2766
 
2767
      fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2768
      DECL_EXTERNAL (fn) = 1;
2769
      TREE_PUBLIC (fn) = 1;
2770
      DECL_ARTIFICIAL (fn) = 1;
2771
      TREE_NOTHROW (fn) = 1;
2772
      DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2773
      DECL_VISIBILITY_SPECIFIED (fn) = 1;
2774
 
2775
      block_clear_fn = fn;
2776
    }
2777
 
2778
  if (asmspec)
2779
    set_user_assembler_name (block_clear_fn, asmspec);
2780
}
2781
 
2782
static tree
2783
clear_storage_libcall_fn (int for_call)
2784
{
2785
  static bool emitted_extern;
2786
 
2787
  if (!block_clear_fn)
2788
    init_block_clear_fn (NULL);
2789
 
2790
  if (for_call && !emitted_extern)
2791
    {
2792
      emitted_extern = true;
2793
      make_decl_rtl (block_clear_fn);
2794
      assemble_external (block_clear_fn);
2795
    }
2796
 
2797
  return block_clear_fn;
2798
}
2799
 
2800
/* Expand a setmem pattern; return true if successful.  */
2801
 
2802
bool
2803
set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2804
                        unsigned int expected_align, HOST_WIDE_INT expected_size)
2805
{
2806
  /* Try the most limited insn first, because there's no point
2807
     including more than one in the machine description unless
2808
     the more limited one has some advantage.  */
2809
 
2810
  enum machine_mode mode;
2811
 
2812
  if (expected_align < align)
2813
    expected_align = align;
2814
 
2815
  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2816
       mode = GET_MODE_WIDER_MODE (mode))
2817
    {
2818
      enum insn_code code = direct_optab_handler (setmem_optab, mode);
2819
 
2820
      if (code != CODE_FOR_nothing
2821
          /* We don't need MODE to be narrower than
2822
             BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2823
             the mode mask, as it is returned by the macro, it will
2824
             definitely be less than the actual mode mask.  */
2825
          && ((CONST_INT_P (size)
2826
               && ((unsigned HOST_WIDE_INT) INTVAL (size)
2827
                   <= (GET_MODE_MASK (mode) >> 1)))
2828
              || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
2829
        {
2830
          struct expand_operand ops[6];
2831
          unsigned int nops;
2832
 
2833
          nops = insn_data[(int) code].n_generator_args;
2834
          gcc_assert (nops == 4 || nops == 6);
2835
 
2836
          create_fixed_operand (&ops[0], object);
2837
          /* The check above guarantees that this size conversion is valid.  */
2838
          create_convert_operand_to (&ops[1], size, mode, true);
2839
          create_convert_operand_from (&ops[2], val, byte_mode, true);
2840
          create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2841
          if (nops == 6)
2842
            {
2843
              create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2844
              create_integer_operand (&ops[5], expected_size);
2845
            }
2846
          if (maybe_expand_insn (code, nops, ops))
2847
            return true;
2848
        }
2849
    }
2850
 
2851
  return false;
2852
}
2853
 
2854
 
2855
/* Write to one of the components of the complex value CPLX.  Write VAL to
2856
   the real part if IMAG_P is false, and the imaginary part if its true.  */
2857
 
2858
static void
2859
write_complex_part (rtx cplx, rtx val, bool imag_p)
2860
{
2861
  enum machine_mode cmode;
2862
  enum machine_mode imode;
2863
  unsigned ibitsize;
2864
 
2865
  if (GET_CODE (cplx) == CONCAT)
2866
    {
2867
      emit_move_insn (XEXP (cplx, imag_p), val);
2868
      return;
2869
    }
2870
 
2871
  cmode = GET_MODE (cplx);
2872
  imode = GET_MODE_INNER (cmode);
2873
  ibitsize = GET_MODE_BITSIZE (imode);
2874
 
2875
  /* For MEMs simplify_gen_subreg may generate an invalid new address
2876
     because, e.g., the original address is considered mode-dependent
2877
     by the target, which restricts simplify_subreg from invoking
2878
     adjust_address_nv.  Instead of preparing fallback support for an
2879
     invalid address, we call adjust_address_nv directly.  */
2880
  if (MEM_P (cplx))
2881
    {
2882
      emit_move_insn (adjust_address_nv (cplx, imode,
2883
                                         imag_p ? GET_MODE_SIZE (imode) : 0),
2884
                      val);
2885
      return;
2886
    }
2887
 
2888
  /* If the sub-object is at least word sized, then we know that subregging
2889
     will work.  This special case is important, since store_bit_field
2890
     wants to operate on integer modes, and there's rarely an OImode to
2891
     correspond to TCmode.  */
2892
  if (ibitsize >= BITS_PER_WORD
2893
      /* For hard regs we have exact predicates.  Assume we can split
2894
         the original object if it spans an even number of hard regs.
2895
         This special case is important for SCmode on 64-bit platforms
2896
         where the natural size of floating-point regs is 32-bit.  */
2897
      || (REG_P (cplx)
2898
          && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2899
          && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2900
    {
2901
      rtx part = simplify_gen_subreg (imode, cplx, cmode,
2902
                                      imag_p ? GET_MODE_SIZE (imode) : 0);
2903
      if (part)
2904
        {
2905
          emit_move_insn (part, val);
2906
          return;
2907
        }
2908
      else
2909
        /* simplify_gen_subreg may fail for sub-word MEMs.  */
2910
        gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2911
    }
2912
 
2913
  store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
2914
}
2915
 
2916
/* Extract one of the components of the complex value CPLX.  Extract the
2917
   real part if IMAG_P is false, and the imaginary part if it's true.  */
2918
 
2919
static rtx
2920
read_complex_part (rtx cplx, bool imag_p)
2921
{
2922
  enum machine_mode cmode, imode;
2923
  unsigned ibitsize;
2924
 
2925
  if (GET_CODE (cplx) == CONCAT)
2926
    return XEXP (cplx, imag_p);
2927
 
2928
  cmode = GET_MODE (cplx);
2929
  imode = GET_MODE_INNER (cmode);
2930
  ibitsize = GET_MODE_BITSIZE (imode);
2931
 
2932
  /* Special case reads from complex constants that got spilled to memory.  */
2933
  if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2934
    {
2935
      tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2936
      if (decl && TREE_CODE (decl) == COMPLEX_CST)
2937
        {
2938
          tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2939
          if (CONSTANT_CLASS_P (part))
2940
            return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2941
        }
2942
    }
2943
 
2944
  /* For MEMs simplify_gen_subreg may generate an invalid new address
2945
     because, e.g., the original address is considered mode-dependent
2946
     by the target, which restricts simplify_subreg from invoking
2947
     adjust_address_nv.  Instead of preparing fallback support for an
2948
     invalid address, we call adjust_address_nv directly.  */
2949
  if (MEM_P (cplx))
2950
    return adjust_address_nv (cplx, imode,
2951
                              imag_p ? GET_MODE_SIZE (imode) : 0);
2952
 
2953
  /* If the sub-object is at least word sized, then we know that subregging
2954
     will work.  This special case is important, since extract_bit_field
2955
     wants to operate on integer modes, and there's rarely an OImode to
2956
     correspond to TCmode.  */
2957
  if (ibitsize >= BITS_PER_WORD
2958
      /* For hard regs we have exact predicates.  Assume we can split
2959
         the original object if it spans an even number of hard regs.
2960
         This special case is important for SCmode on 64-bit platforms
2961
         where the natural size of floating-point regs is 32-bit.  */
2962
      || (REG_P (cplx)
2963
          && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2964
          && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2965
    {
2966
      rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2967
                                     imag_p ? GET_MODE_SIZE (imode) : 0);
2968
      if (ret)
2969
        return ret;
2970
      else
2971
        /* simplify_gen_subreg may fail for sub-word MEMs.  */
2972
        gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2973
    }
2974
 
2975
  return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2976
                            true, false, NULL_RTX, imode, imode);
2977
}
2978
 
2979
/* A subroutine of emit_move_insn_1.  Yet another lowpart generator.
2980
   NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
2981
   represented in NEW_MODE.  If FORCE is true, this will never happen, as
2982
   we'll force-create a SUBREG if needed.  */
2983
 
2984
static rtx
2985
emit_move_change_mode (enum machine_mode new_mode,
2986
                       enum machine_mode old_mode, rtx x, bool force)
2987
{
2988
  rtx ret;
2989
 
2990
  if (push_operand (x, GET_MODE (x)))
2991
    {
2992
      ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2993
      MEM_COPY_ATTRIBUTES (ret, x);
2994
    }
2995
  else if (MEM_P (x))
2996
    {
2997
      /* We don't have to worry about changing the address since the
2998
         size in bytes is supposed to be the same.  */
2999
      if (reload_in_progress)
3000
        {
3001
          /* Copy the MEM to change the mode and move any
3002
             substitutions from the old MEM to the new one.  */
3003
          ret = adjust_address_nv (x, new_mode, 0);
3004
          copy_replacements (x, ret);
3005
        }
3006
      else
3007
        ret = adjust_address (x, new_mode, 0);
3008
    }
3009
  else
3010
    {
3011
      /* Note that we do want simplify_subreg's behavior of validating
3012
         that the new mode is ok for a hard register.  If we were to use
3013
         simplify_gen_subreg, we would create the subreg, but would
3014
         probably run into the target not being able to implement it.  */
3015
      /* Except, of course, when FORCE is true, when this is exactly what
3016
         we want.  Which is needed for CCmodes on some targets.  */
3017
      if (force)
3018
        ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3019
      else
3020
        ret = simplify_subreg (new_mode, x, old_mode, 0);
3021
    }
3022
 
3023
  return ret;
3024
}
3025
 
3026
/* A subroutine of emit_move_insn_1.  Generate a move from Y into X using
3027
   an integer mode of the same size as MODE.  Returns the instruction
3028
   emitted, or NULL if such a move could not be generated.  */
3029
 
3030
static rtx
3031
emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3032
{
3033
  enum machine_mode imode;
3034
  enum insn_code code;
3035
 
3036
  /* There must exist a mode of the exact size we require.  */
3037
  imode = int_mode_for_mode (mode);
3038
  if (imode == BLKmode)
3039
    return NULL_RTX;
3040
 
3041
  /* The target must support moves in this mode.  */
3042
  code = optab_handler (mov_optab, imode);
3043
  if (code == CODE_FOR_nothing)
3044
    return NULL_RTX;
3045
 
3046
  x = emit_move_change_mode (imode, mode, x, force);
3047
  if (x == NULL_RTX)
3048
    return NULL_RTX;
3049
  y = emit_move_change_mode (imode, mode, y, force);
3050
  if (y == NULL_RTX)
3051
    return NULL_RTX;
3052
  return emit_insn (GEN_FCN (code) (x, y));
3053
}
3054
 
3055
/* A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
3056
   Return an equivalent MEM that does not use an auto-increment.  */
3057
 
3058
static rtx
3059
emit_move_resolve_push (enum machine_mode mode, rtx x)
3060
{
3061
  enum rtx_code code = GET_CODE (XEXP (x, 0));
3062
  HOST_WIDE_INT adjust;
3063
  rtx temp;
3064
 
3065
  adjust = GET_MODE_SIZE (mode);
3066
#ifdef PUSH_ROUNDING
3067
  adjust = PUSH_ROUNDING (adjust);
3068
#endif
3069
  if (code == PRE_DEC || code == POST_DEC)
3070
    adjust = -adjust;
3071
  else if (code == PRE_MODIFY || code == POST_MODIFY)
3072
    {
3073
      rtx expr = XEXP (XEXP (x, 0), 1);
3074
      HOST_WIDE_INT val;
3075
 
3076
      gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3077
      gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3078
      val = INTVAL (XEXP (expr, 1));
3079
      if (GET_CODE (expr) == MINUS)
3080
        val = -val;
3081
      gcc_assert (adjust == val || adjust == -val);
3082
      adjust = val;
3083
    }
3084
 
3085
  /* Do not use anti_adjust_stack, since we don't want to update
3086
     stack_pointer_delta.  */
3087
  temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3088
                              GEN_INT (adjust), stack_pointer_rtx,
3089
                              0, OPTAB_LIB_WIDEN);
3090
  if (temp != stack_pointer_rtx)
3091
    emit_move_insn (stack_pointer_rtx, temp);
3092
 
3093
  switch (code)
3094
    {
3095
    case PRE_INC:
3096
    case PRE_DEC:
3097
    case PRE_MODIFY:
3098
      temp = stack_pointer_rtx;
3099
      break;
3100
    case POST_INC:
3101
    case POST_DEC:
3102
    case POST_MODIFY:
3103
      temp = plus_constant (stack_pointer_rtx, -adjust);
3104
      break;
3105
    default:
3106
      gcc_unreachable ();
3107
    }
3108
 
3109
  return replace_equiv_address (x, temp);
3110
}
3111
 
3112
/* A subroutine of emit_move_complex.  Generate a move from Y into X.
3113
   X is known to satisfy push_operand, and MODE is known to be complex.
3114
   Returns the last instruction emitted.  */
3115
 
3116
rtx
3117
emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3118
{
3119
  enum machine_mode submode = GET_MODE_INNER (mode);
3120
  bool imag_first;
3121
 
3122
#ifdef PUSH_ROUNDING
3123
  unsigned int submodesize = GET_MODE_SIZE (submode);
3124
 
3125
  /* In case we output to the stack, but the size is smaller than the
3126
     machine can push exactly, we need to use move instructions.  */
3127
  if (PUSH_ROUNDING (submodesize) != submodesize)
3128
    {
3129
      x = emit_move_resolve_push (mode, x);
3130
      return emit_move_insn (x, y);
3131
    }
3132
#endif
3133
 
3134
  /* Note that the real part always precedes the imag part in memory
3135
     regardless of machine's endianness.  */
3136
  switch (GET_CODE (XEXP (x, 0)))
3137
    {
3138
    case PRE_DEC:
3139
    case POST_DEC:
3140
      imag_first = true;
3141
      break;
3142
    case PRE_INC:
3143
    case POST_INC:
3144
      imag_first = false;
3145
      break;
3146
    default:
3147
      gcc_unreachable ();
3148
    }
3149
 
3150
  emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3151
                  read_complex_part (y, imag_first));
3152
  return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3153
                         read_complex_part (y, !imag_first));
3154
}
3155
 
3156
/* A subroutine of emit_move_complex.  Perform the move from Y to X
3157
   via two moves of the parts.  Returns the last instruction emitted.  */
3158
 
3159
rtx
3160
emit_move_complex_parts (rtx x, rtx y)
3161
{
3162
  /* Show the output dies here.  This is necessary for SUBREGs
3163
     of pseudos since we cannot track their lifetimes correctly;
3164
     hard regs shouldn't appear here except as return values.  */
3165
  if (!reload_completed && !reload_in_progress
3166
      && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3167
    emit_clobber (x);
3168
 
3169
  write_complex_part (x, read_complex_part (y, false), false);
3170
  write_complex_part (x, read_complex_part (y, true), true);
3171
 
3172
  return get_last_insn ();
3173
}
3174
 
3175
/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3176
   MODE is known to be complex.  Returns the last instruction emitted.  */
3177
 
3178
static rtx
3179
emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3180
{
3181
  bool try_int;
3182
 
3183
  /* Need to take special care for pushes, to maintain proper ordering
3184
     of the data, and possibly extra padding.  */
3185
  if (push_operand (x, mode))
3186
    return emit_move_complex_push (mode, x, y);
3187
 
3188
  /* See if we can coerce the target into moving both values at once.  */
3189
 
3190
  /* Move floating point as parts.  */
3191
  if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3192
      && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3193
    try_int = false;
3194
  /* Not possible if the values are inherently not adjacent.  */
3195
  else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3196
    try_int = false;
3197
  /* Is possible if both are registers (or subregs of registers).  */
3198
  else if (register_operand (x, mode) && register_operand (y, mode))
3199
    try_int = true;
3200
  /* If one of the operands is a memory, and alignment constraints
3201
     are friendly enough, we may be able to do combined memory operations.
3202
     We do not attempt this if Y is a constant because that combination is
3203
     usually better with the by-parts thing below.  */
3204
  else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3205
           && (!STRICT_ALIGNMENT
3206
               || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3207
    try_int = true;
3208
  else
3209
    try_int = false;
3210
 
3211
  if (try_int)
3212
    {
3213
      rtx ret;
3214
 
3215
      /* For memory to memory moves, optimal behavior can be had with the
3216
         existing block move logic.  */
3217
      if (MEM_P (x) && MEM_P (y))
3218
        {
3219
          emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3220
                           BLOCK_OP_NO_LIBCALL);
3221
          return get_last_insn ();
3222
        }
3223
 
3224
      ret = emit_move_via_integer (mode, x, y, true);
3225
      if (ret)
3226
        return ret;
3227
    }
3228
 
3229
  return emit_move_complex_parts (x, y);
3230
}
3231
 
3232
/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3233
   MODE is known to be MODE_CC.  Returns the last instruction emitted.  */
3234
 
3235
static rtx
3236
emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3237
{
3238
  rtx ret;
3239
 
3240
  /* Assume all MODE_CC modes are equivalent; if we have movcc, use it.  */
3241
  if (mode != CCmode)
3242
    {
3243
      enum insn_code code = optab_handler (mov_optab, CCmode);
3244
      if (code != CODE_FOR_nothing)
3245
        {
3246
          x = emit_move_change_mode (CCmode, mode, x, true);
3247
          y = emit_move_change_mode (CCmode, mode, y, true);
3248
          return emit_insn (GEN_FCN (code) (x, y));
3249
        }
3250
    }
3251
 
3252
  /* Otherwise, find the MODE_INT mode of the same width.  */
3253
  ret = emit_move_via_integer (mode, x, y, false);
3254
  gcc_assert (ret != NULL);
3255
  return ret;
3256
}
3257
 
3258
/* Return true if word I of OP lies entirely in the
3259
   undefined bits of a paradoxical subreg.  */
3260
 
3261
static bool
3262
undefined_operand_subword_p (const_rtx op, int i)
3263
{
3264
  enum machine_mode innermode, innermostmode;
3265
  int offset;
3266
  if (GET_CODE (op) != SUBREG)
3267
    return false;
3268
  innermode = GET_MODE (op);
3269
  innermostmode = GET_MODE (SUBREG_REG (op));
3270
  offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3271
  /* The SUBREG_BYTE represents offset, as if the value were stored in
3272
     memory, except for a paradoxical subreg where we define
3273
     SUBREG_BYTE to be 0; undo this exception as in
3274
     simplify_subreg.  */
3275
  if (SUBREG_BYTE (op) == 0
3276
      && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3277
    {
3278
      int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3279
      if (WORDS_BIG_ENDIAN)
3280
        offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3281
      if (BYTES_BIG_ENDIAN)
3282
        offset += difference % UNITS_PER_WORD;
3283
    }
3284
  if (offset >= GET_MODE_SIZE (innermostmode)
3285
      || offset <= -GET_MODE_SIZE (word_mode))
3286
    return true;
3287
  return false;
3288
}
3289
 
3290
/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3291
   MODE is any multi-word or full-word mode that lacks a move_insn
3292
   pattern.  Note that you will get better code if you define such
3293
   patterns, even if they must turn into multiple assembler instructions.  */
3294
 
3295
static rtx
3296
emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3297
{
3298
  rtx last_insn = 0;
3299
  rtx seq, inner;
3300
  bool need_clobber;
3301
  int i;
3302
 
3303
  gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3304
 
3305
  /* If X is a push on the stack, do the push now and replace
3306
     X with a reference to the stack pointer.  */
3307
  if (push_operand (x, mode))
3308
    x = emit_move_resolve_push (mode, x);
3309
 
3310
  /* If we are in reload, see if either operand is a MEM whose address
3311
     is scheduled for replacement.  */
3312
  if (reload_in_progress && MEM_P (x)
3313
      && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3314
    x = replace_equiv_address_nv (x, inner);
3315
  if (reload_in_progress && MEM_P (y)
3316
      && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3317
    y = replace_equiv_address_nv (y, inner);
3318
 
3319
  start_sequence ();
3320
 
3321
  need_clobber = false;
3322
  for (i = 0;
3323
       i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3324
       i++)
3325
    {
3326
      rtx xpart = operand_subword (x, i, 1, mode);
3327
      rtx ypart;
3328
 
3329
      /* Do not generate code for a move if it would come entirely
3330
         from the undefined bits of a paradoxical subreg.  */
3331
      if (undefined_operand_subword_p (y, i))
3332
        continue;
3333
 
3334
      ypart = operand_subword (y, i, 1, mode);
3335
 
3336
      /* If we can't get a part of Y, put Y into memory if it is a
3337
         constant.  Otherwise, force it into a register.  Then we must
3338
         be able to get a part of Y.  */
3339
      if (ypart == 0 && CONSTANT_P (y))
3340
        {
3341
          y = use_anchored_address (force_const_mem (mode, y));
3342
          ypart = operand_subword (y, i, 1, mode);
3343
        }
3344
      else if (ypart == 0)
3345
        ypart = operand_subword_force (y, i, mode);
3346
 
3347
      gcc_assert (xpart && ypart);
3348
 
3349
      need_clobber |= (GET_CODE (xpart) == SUBREG);
3350
 
3351
      last_insn = emit_move_insn (xpart, ypart);
3352
    }
3353
 
3354
  seq = get_insns ();
3355
  end_sequence ();
3356
 
3357
  /* Show the output dies here.  This is necessary for SUBREGs
3358
     of pseudos since we cannot track their lifetimes correctly;
3359
     hard regs shouldn't appear here except as return values.
3360
     We never want to emit such a clobber after reload.  */
3361
  if (x != y
3362
      && ! (reload_in_progress || reload_completed)
3363
      && need_clobber != 0)
3364
    emit_clobber (x);
3365
 
3366
  emit_insn (seq);
3367
 
3368
  return last_insn;
3369
}
3370
 
3371
/* Low level part of emit_move_insn.
3372
   Called just like emit_move_insn, but assumes X and Y
3373
   are basically valid.  */
3374
 
3375
rtx
3376
emit_move_insn_1 (rtx x, rtx y)
3377
{
3378
  enum machine_mode mode = GET_MODE (x);
3379
  enum insn_code code;
3380
 
3381
  gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3382
 
3383
  code = optab_handler (mov_optab, mode);
3384
  if (code != CODE_FOR_nothing)
3385
    return emit_insn (GEN_FCN (code) (x, y));
3386
 
3387
  /* Expand complex moves by moving real part and imag part.  */
3388
  if (COMPLEX_MODE_P (mode))
3389
    return emit_move_complex (mode, x, y);
3390
 
3391
  if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3392
      || ALL_FIXED_POINT_MODE_P (mode))
3393
    {
3394
      rtx result = emit_move_via_integer (mode, x, y, true);
3395
 
3396
      /* If we can't find an integer mode, use multi words.  */
3397
      if (result)
3398
        return result;
3399
      else
3400
        return emit_move_multi_word (mode, x, y);
3401
    }
3402
 
3403
  if (GET_MODE_CLASS (mode) == MODE_CC)
3404
    return emit_move_ccmode (mode, x, y);
3405
 
3406
  /* Try using a move pattern for the corresponding integer mode.  This is
3407
     only safe when simplify_subreg can convert MODE constants into integer
3408
     constants.  At present, it can only do this reliably if the value
3409
     fits within a HOST_WIDE_INT.  */
3410
  if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3411
    {
3412
      rtx ret = emit_move_via_integer (mode, x, y, false);
3413
      if (ret)
3414
        return ret;
3415
    }
3416
 
3417
  return emit_move_multi_word (mode, x, y);
3418
}
3419
 
3420
/* Generate code to copy Y into X.
3421
   Both Y and X must have the same mode, except that
3422
   Y can be a constant with VOIDmode.
3423
   This mode cannot be BLKmode; use emit_block_move for that.
3424
 
3425
   Return the last instruction emitted.  */
3426
 
3427
rtx
3428
emit_move_insn (rtx x, rtx y)
3429
{
3430
  enum machine_mode mode = GET_MODE (x);
3431
  rtx y_cst = NULL_RTX;
3432
  rtx last_insn, set;
3433
 
3434
  gcc_assert (mode != BLKmode
3435
              && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3436
 
3437
  if (CONSTANT_P (y))
3438
    {
3439
      if (optimize
3440
          && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3441
          && (last_insn = compress_float_constant (x, y)))
3442
        return last_insn;
3443
 
3444
      y_cst = y;
3445
 
3446
      if (!targetm.legitimate_constant_p (mode, y))
3447
        {
3448
          y = force_const_mem (mode, y);
3449
 
3450
          /* If the target's cannot_force_const_mem prevented the spill,
3451
             assume that the target's move expanders will also take care
3452
             of the non-legitimate constant.  */
3453
          if (!y)
3454
            y = y_cst;
3455
          else
3456
            y = use_anchored_address (y);
3457
        }
3458
    }
3459
 
3460
  /* If X or Y are memory references, verify that their addresses are valid
3461
     for the machine.  */
3462
  if (MEM_P (x)
3463
      && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3464
                                         MEM_ADDR_SPACE (x))
3465
          && ! push_operand (x, GET_MODE (x))))
3466
    x = validize_mem (x);
3467
 
3468
  if (MEM_P (y)
3469
      && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3470
                                        MEM_ADDR_SPACE (y)))
3471
    y = validize_mem (y);
3472
 
3473
  gcc_assert (mode != BLKmode);
3474
 
3475
  last_insn = emit_move_insn_1 (x, y);
3476
 
3477
  if (y_cst && REG_P (x)
3478
      && (set = single_set (last_insn)) != NULL_RTX
3479
      && SET_DEST (set) == x
3480
      && ! rtx_equal_p (y_cst, SET_SRC (set)))
3481
    set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3482
 
3483
  return last_insn;
3484
}
3485
 
3486
/* If Y is representable exactly in a narrower mode, and the target can
3487
   perform the extension directly from constant or memory, then emit the
3488
   move as an extension.  */
3489
 
3490
static rtx
3491
compress_float_constant (rtx x, rtx y)
3492
{
3493
  enum machine_mode dstmode = GET_MODE (x);
3494
  enum machine_mode orig_srcmode = GET_MODE (y);
3495
  enum machine_mode srcmode;
3496
  REAL_VALUE_TYPE r;
3497
  int oldcost, newcost;
3498
  bool speed = optimize_insn_for_speed_p ();
3499
 
3500
  REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3501
 
3502
  if (targetm.legitimate_constant_p (dstmode, y))
3503
    oldcost = set_src_cost (y, speed);
3504
  else
3505
    oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3506
 
3507
  for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3508
       srcmode != orig_srcmode;
3509
       srcmode = GET_MODE_WIDER_MODE (srcmode))
3510
    {
3511
      enum insn_code ic;
3512
      rtx trunc_y, last_insn;
3513
 
3514
      /* Skip if the target can't extend this way.  */
3515
      ic = can_extend_p (dstmode, srcmode, 0);
3516
      if (ic == CODE_FOR_nothing)
3517
        continue;
3518
 
3519
      /* Skip if the narrowed value isn't exact.  */
3520
      if (! exact_real_truncate (srcmode, &r))
3521
        continue;
3522
 
3523
      trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3524
 
3525
      if (targetm.legitimate_constant_p (srcmode, trunc_y))
3526
        {
3527
          /* Skip if the target needs extra instructions to perform
3528
             the extension.  */
3529
          if (!insn_operand_matches (ic, 1, trunc_y))
3530
            continue;
3531
          /* This is valid, but may not be cheaper than the original. */
3532
          newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3533
                                  speed);
3534
          if (oldcost < newcost)
3535
            continue;
3536
        }
3537
      else if (float_extend_from_mem[dstmode][srcmode])
3538
        {
3539
          trunc_y = force_const_mem (srcmode, trunc_y);
3540
          /* This is valid, but may not be cheaper than the original. */
3541
          newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3542
                                  speed);
3543
          if (oldcost < newcost)
3544
            continue;
3545
          trunc_y = validize_mem (trunc_y);
3546
        }
3547
      else
3548
        continue;
3549
 
3550
      /* For CSE's benefit, force the compressed constant pool entry
3551
         into a new pseudo.  This constant may be used in different modes,
3552
         and if not, combine will put things back together for us.  */
3553
      trunc_y = force_reg (srcmode, trunc_y);
3554
      emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3555
      last_insn = get_last_insn ();
3556
 
3557
      if (REG_P (x))
3558
        set_unique_reg_note (last_insn, REG_EQUAL, y);
3559
 
3560
      return last_insn;
3561
    }
3562
 
3563
  return NULL_RTX;
3564
}
3565
 
3566
/* Pushing data onto the stack.  */
3567
 
3568
/* Push a block of length SIZE (perhaps variable)
3569
   and return an rtx to address the beginning of the block.
3570
   The value may be virtual_outgoing_args_rtx.
3571
 
3572
   EXTRA is the number of bytes of padding to push in addition to SIZE.
3573
   BELOW nonzero means this padding comes at low addresses;
3574
   otherwise, the padding comes at high addresses.  */
3575
 
3576
rtx
3577
push_block (rtx size, int extra, int below)
3578
{
3579
  rtx temp;
3580
 
3581
  size = convert_modes (Pmode, ptr_mode, size, 1);
3582
  if (CONSTANT_P (size))
3583
    anti_adjust_stack (plus_constant (size, extra));
3584
  else if (REG_P (size) && extra == 0)
3585
    anti_adjust_stack (size);
3586
  else
3587
    {
3588
      temp = copy_to_mode_reg (Pmode, size);
3589
      if (extra != 0)
3590
        temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3591
                             temp, 0, OPTAB_LIB_WIDEN);
3592
      anti_adjust_stack (temp);
3593
    }
3594
 
3595
#ifndef STACK_GROWS_DOWNWARD
3596
  if (0)
3597
#else
3598
  if (1)
3599
#endif
3600
    {
3601
      temp = virtual_outgoing_args_rtx;
3602
      if (extra != 0 && below)
3603
        temp = plus_constant (temp, extra);
3604
    }
3605
  else
3606
    {
3607
      if (CONST_INT_P (size))
3608
        temp = plus_constant (virtual_outgoing_args_rtx,
3609
                              -INTVAL (size) - (below ? 0 : extra));
3610
      else if (extra != 0 && !below)
3611
        temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3612
                             negate_rtx (Pmode, plus_constant (size, extra)));
3613
      else
3614
        temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3615
                             negate_rtx (Pmode, size));
3616
    }
3617
 
3618
  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3619
}
3620
 
3621
/* A utility routine that returns the base of an auto-inc memory, or NULL.  */
3622
 
3623
static rtx
3624
mem_autoinc_base (rtx mem)
3625
{
3626
  if (MEM_P (mem))
3627
    {
3628
      rtx addr = XEXP (mem, 0);
3629
      if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3630
        return XEXP (addr, 0);
3631
    }
3632
  return NULL;
3633
}
3634
 
3635
/* A utility routine used here, in reload, and in try_split.  The insns
3636
   after PREV up to and including LAST are known to adjust the stack,
3637
   with a final value of END_ARGS_SIZE.  Iterate backward from LAST
3638
   placing notes as appropriate.  PREV may be NULL, indicating the
3639
   entire insn sequence prior to LAST should be scanned.
3640
 
3641
   The set of allowed stack pointer modifications is small:
3642
     (1) One or more auto-inc style memory references (aka pushes),
3643
     (2) One or more addition/subtraction with the SP as destination,
3644
     (3) A single move insn with the SP as destination,
3645
     (4) A call_pop insn,
3646
     (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3647
 
3648
   Insns in the sequence that do not modify the SP are ignored,
3649
   except for noreturn calls.
3650
 
3651
   The return value is the amount of adjustment that can be trivially
3652
   verified, via immediate operand or auto-inc.  If the adjustment
3653
   cannot be trivially extracted, the return value is INT_MIN.  */
3654
 
3655
HOST_WIDE_INT
3656
find_args_size_adjust (rtx insn)
3657
{
3658
  rtx dest, set, pat;
3659
  int i;
3660
 
3661
  pat = PATTERN (insn);
3662
  set = NULL;
3663
 
3664
  /* Look for a call_pop pattern.  */
3665
  if (CALL_P (insn))
3666
    {
3667
      /* We have to allow non-call_pop patterns for the case
3668
         of emit_single_push_insn of a TLS address.  */
3669
      if (GET_CODE (pat) != PARALLEL)
3670
        return 0;
3671
 
3672
      /* All call_pop have a stack pointer adjust in the parallel.
3673
         The call itself is always first, and the stack adjust is
3674
         usually last, so search from the end.  */
3675
      for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3676
        {
3677
          set = XVECEXP (pat, 0, i);
3678
          if (GET_CODE (set) != SET)
3679
            continue;
3680
          dest = SET_DEST (set);
3681
          if (dest == stack_pointer_rtx)
3682
            break;
3683
        }
3684
      /* We'd better have found the stack pointer adjust.  */
3685
      if (i == 0)
3686
        return 0;
3687
      /* Fall through to process the extracted SET and DEST
3688
         as if it was a standalone insn.  */
3689
    }
3690
  else if (GET_CODE (pat) == SET)
3691
    set = pat;
3692
  else if ((set = single_set (insn)) != NULL)
3693
    ;
3694
  else if (GET_CODE (pat) == PARALLEL)
3695
    {
3696
      /* ??? Some older ports use a parallel with a stack adjust
3697
         and a store for a PUSH_ROUNDING pattern, rather than a
3698
         PRE/POST_MODIFY rtx.  Don't force them to update yet...  */
3699
      /* ??? See h8300 and m68k, pushqi1.  */
3700
      for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3701
        {
3702
          set = XVECEXP (pat, 0, i);
3703
          if (GET_CODE (set) != SET)
3704
            continue;
3705
          dest = SET_DEST (set);
3706
          if (dest == stack_pointer_rtx)
3707
            break;
3708
 
3709
          /* We do not expect an auto-inc of the sp in the parallel.  */
3710
          gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3711
          gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3712
                               != stack_pointer_rtx);
3713
        }
3714
      if (i < 0)
3715
        return 0;
3716
    }
3717
  else
3718
    return 0;
3719
 
3720
  dest = SET_DEST (set);
3721
 
3722
  /* Look for direct modifications of the stack pointer.  */
3723
  if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3724
    {
3725
      /* Look for a trivial adjustment, otherwise assume nothing.  */
3726
      /* Note that the SPU restore_stack_block pattern refers to
3727
         the stack pointer in V4SImode.  Consider that non-trivial.  */
3728
      if (SCALAR_INT_MODE_P (GET_MODE (dest))
3729
          && GET_CODE (SET_SRC (set)) == PLUS
3730
          && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3731
          && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3732
        return INTVAL (XEXP (SET_SRC (set), 1));
3733
      /* ??? Reload can generate no-op moves, which will be cleaned
3734
         up later.  Recognize it and continue searching.  */
3735
      else if (rtx_equal_p (dest, SET_SRC (set)))
3736
        return 0;
3737
      else
3738
        return HOST_WIDE_INT_MIN;
3739
    }
3740
  else
3741
    {
3742
      rtx mem, addr;
3743
 
3744
      /* Otherwise only think about autoinc patterns.  */
3745
      if (mem_autoinc_base (dest) == stack_pointer_rtx)
3746
        {
3747
          mem = dest;
3748
          gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3749
                               != stack_pointer_rtx);
3750
        }
3751
      else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3752
        mem = SET_SRC (set);
3753
      else
3754
        return 0;
3755
 
3756
      addr = XEXP (mem, 0);
3757
      switch (GET_CODE (addr))
3758
        {
3759
        case PRE_INC:
3760
        case POST_INC:
3761
          return GET_MODE_SIZE (GET_MODE (mem));
3762
        case PRE_DEC:
3763
        case POST_DEC:
3764
          return -GET_MODE_SIZE (GET_MODE (mem));
3765
        case PRE_MODIFY:
3766
        case POST_MODIFY:
3767
          addr = XEXP (addr, 1);
3768
          gcc_assert (GET_CODE (addr) == PLUS);
3769
          gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3770
          gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3771
          return INTVAL (XEXP (addr, 1));
3772
        default:
3773
          gcc_unreachable ();
3774
        }
3775
    }
3776
}
3777
 
3778
int
3779
fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3780
{
3781
  int args_size = end_args_size;
3782
  bool saw_unknown = false;
3783
  rtx insn;
3784
 
3785
  for (insn = last; insn != prev; insn = PREV_INSN (insn))
3786
    {
3787
      HOST_WIDE_INT this_delta;
3788
 
3789
      if (!NONDEBUG_INSN_P (insn))
3790
        continue;
3791
 
3792
      this_delta = find_args_size_adjust (insn);
3793
      if (this_delta == 0)
3794
        {
3795
          if (!CALL_P (insn)
3796
              || ACCUMULATE_OUTGOING_ARGS
3797
              || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3798
            continue;
3799
        }
3800
 
3801
      gcc_assert (!saw_unknown);
3802
      if (this_delta == HOST_WIDE_INT_MIN)
3803
        saw_unknown = true;
3804
 
3805
      add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3806
#ifdef STACK_GROWS_DOWNWARD
3807
      this_delta = -this_delta;
3808
#endif
3809
      args_size -= this_delta;
3810
    }
3811
 
3812
  return saw_unknown ? INT_MIN : args_size;
3813
}
3814
 
3815
#ifdef PUSH_ROUNDING
3816
/* Emit single push insn.  */
3817
 
3818
static void
3819
emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3820
{
3821
  rtx dest_addr;
3822
  unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3823
  rtx dest;
3824
  enum insn_code icode;
3825
 
3826
  stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3827
  /* If there is push pattern, use it.  Otherwise try old way of throwing
3828
     MEM representing push operation to move expander.  */
3829
  icode = optab_handler (push_optab, mode);
3830
  if (icode != CODE_FOR_nothing)
3831
    {
3832
      struct expand_operand ops[1];
3833
 
3834
      create_input_operand (&ops[0], x, mode);
3835
      if (maybe_expand_insn (icode, 1, ops))
3836
        return;
3837
    }
3838
  if (GET_MODE_SIZE (mode) == rounded_size)
3839
    dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3840
  /* If we are to pad downward, adjust the stack pointer first and
3841
     then store X into the stack location using an offset.  This is
3842
     because emit_move_insn does not know how to pad; it does not have
3843
     access to type.  */
3844
  else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3845
    {
3846
      unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3847
      HOST_WIDE_INT offset;
3848
 
3849
      emit_move_insn (stack_pointer_rtx,
3850
                      expand_binop (Pmode,
3851
#ifdef STACK_GROWS_DOWNWARD
3852
                                    sub_optab,
3853
#else
3854
                                    add_optab,
3855
#endif
3856
                                    stack_pointer_rtx,
3857
                                    GEN_INT (rounded_size),
3858
                                    NULL_RTX, 0, OPTAB_LIB_WIDEN));
3859
 
3860
      offset = (HOST_WIDE_INT) padding_size;
3861
#ifdef STACK_GROWS_DOWNWARD
3862
      if (STACK_PUSH_CODE == POST_DEC)
3863
        /* We have already decremented the stack pointer, so get the
3864
           previous value.  */
3865
        offset += (HOST_WIDE_INT) rounded_size;
3866
#else
3867
      if (STACK_PUSH_CODE == POST_INC)
3868
        /* We have already incremented the stack pointer, so get the
3869
           previous value.  */
3870
        offset -= (HOST_WIDE_INT) rounded_size;
3871
#endif
3872
      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3873
    }
3874
  else
3875
    {
3876
#ifdef STACK_GROWS_DOWNWARD
3877
      /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
3878
      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3879
                                GEN_INT (-(HOST_WIDE_INT) rounded_size));
3880
#else
3881
      /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
3882
      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3883
                                GEN_INT (rounded_size));
3884
#endif
3885
      dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3886
    }
3887
 
3888
  dest = gen_rtx_MEM (mode, dest_addr);
3889
 
3890
  if (type != 0)
3891
    {
3892
      set_mem_attributes (dest, type, 1);
3893
 
3894
      if (flag_optimize_sibling_calls)
3895
        /* Function incoming arguments may overlap with sibling call
3896
           outgoing arguments and we cannot allow reordering of reads
3897
           from function arguments with stores to outgoing arguments
3898
           of sibling calls.  */
3899
        set_mem_alias_set (dest, 0);
3900
    }
3901
  emit_move_insn (dest, x);
3902
}
3903
 
3904
/* Emit and annotate a single push insn.  */
3905
 
3906
static void
3907
emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3908
{
3909
  int delta, old_delta = stack_pointer_delta;
3910
  rtx prev = get_last_insn ();
3911
  rtx last;
3912
 
3913
  emit_single_push_insn_1 (mode, x, type);
3914
 
3915
  last = get_last_insn ();
3916
 
3917
  /* Notice the common case where we emitted exactly one insn.  */
3918
  if (PREV_INSN (last) == prev)
3919
    {
3920
      add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
3921
      return;
3922
    }
3923
 
3924
  delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
3925
  gcc_assert (delta == INT_MIN || delta == old_delta);
3926
}
3927
#endif
3928
 
3929
/* Generate code to push X onto the stack, assuming it has mode MODE and
3930
   type TYPE.
3931
   MODE is redundant except when X is a CONST_INT (since they don't
3932
   carry mode info).
3933
   SIZE is an rtx for the size of data to be copied (in bytes),
3934
   needed only if X is BLKmode.
3935
 
3936
   ALIGN (in bits) is maximum alignment we can assume.
3937
 
3938
   If PARTIAL and REG are both nonzero, then copy that many of the first
3939
   bytes of X into registers starting with REG, and push the rest of X.
3940
   The amount of space pushed is decreased by PARTIAL bytes.
3941
   REG must be a hard register in this case.
3942
   If REG is zero but PARTIAL is not, take any all others actions for an
3943
   argument partially in registers, but do not actually load any
3944
   registers.
3945
 
3946
   EXTRA is the amount in bytes of extra space to leave next to this arg.
3947
   This is ignored if an argument block has already been allocated.
3948
 
3949
   On a machine that lacks real push insns, ARGS_ADDR is the address of
3950
   the bottom of the argument block for this call.  We use indexing off there
3951
   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
3952
   argument block has not been preallocated.
3953
 
3954
   ARGS_SO_FAR is the size of args previously pushed for this call.
3955
 
3956
   REG_PARM_STACK_SPACE is nonzero if functions require stack space
3957
   for arguments passed in registers.  If nonzero, it will be the number
3958
   of bytes required.  */
3959
 
3960
void
3961
emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3962
                unsigned int align, int partial, rtx reg, int extra,
3963
                rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3964
                rtx alignment_pad)
3965
{
3966
  rtx xinner;
3967
  enum direction stack_direction
3968
#ifdef STACK_GROWS_DOWNWARD
3969
    = downward;
3970
#else
3971
    = upward;
3972
#endif
3973
 
3974
  /* Decide where to pad the argument: `downward' for below,
3975
     `upward' for above, or `none' for don't pad it.
3976
     Default is below for small data on big-endian machines; else above.  */
3977
  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3978
 
3979
  /* Invert direction if stack is post-decrement.
3980
     FIXME: why?  */
3981
  if (STACK_PUSH_CODE == POST_DEC)
3982
    if (where_pad != none)
3983
      where_pad = (where_pad == downward ? upward : downward);
3984
 
3985
  xinner = x;
3986
 
3987
  if (mode == BLKmode
3988
      || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3989
    {
3990
      /* Copy a block into the stack, entirely or partially.  */
3991
 
3992
      rtx temp;
3993
      int used;
3994
      int offset;
3995
      int skip;
3996
 
3997
      offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3998
      used = partial - offset;
3999
 
4000
      if (mode != BLKmode)
4001
        {
4002
          /* A value is to be stored in an insufficiently aligned
4003
             stack slot; copy via a suitably aligned slot if
4004
             necessary.  */
4005
          size = GEN_INT (GET_MODE_SIZE (mode));
4006
          if (!MEM_P (xinner))
4007
            {
4008
              temp = assign_temp (type, 0, 1, 1);
4009
              emit_move_insn (temp, xinner);
4010
              xinner = temp;
4011
            }
4012
        }
4013
 
4014
      gcc_assert (size);
4015
 
4016
      /* USED is now the # of bytes we need not copy to the stack
4017
         because registers will take care of them.  */
4018
 
4019
      if (partial != 0)
4020
        xinner = adjust_address (xinner, BLKmode, used);
4021
 
4022
      /* If the partial register-part of the arg counts in its stack size,
4023
         skip the part of stack space corresponding to the registers.
4024
         Otherwise, start copying to the beginning of the stack space,
4025
         by setting SKIP to 0.  */
4026
      skip = (reg_parm_stack_space == 0) ? 0 : used;
4027
 
4028
#ifdef PUSH_ROUNDING
4029
      /* Do it with several push insns if that doesn't take lots of insns
4030
         and if there is no difficulty with push insns that skip bytes
4031
         on the stack for alignment purposes.  */
4032
      if (args_addr == 0
4033
          && PUSH_ARGS
4034
          && CONST_INT_P (size)
4035
          && skip == 0
4036
          && MEM_ALIGN (xinner) >= align
4037
          && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4038
          /* Here we avoid the case of a structure whose weak alignment
4039
             forces many pushes of a small amount of data,
4040
             and such small pushes do rounding that causes trouble.  */
4041
          && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4042
              || align >= BIGGEST_ALIGNMENT
4043
              || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4044
                  == (align / BITS_PER_UNIT)))
4045
          && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4046
        {
4047
          /* Push padding now if padding above and stack grows down,
4048
             or if padding below and stack grows up.
4049
             But if space already allocated, this has already been done.  */
4050
          if (extra && args_addr == 0
4051
              && where_pad != none && where_pad != stack_direction)
4052
            anti_adjust_stack (GEN_INT (extra));
4053
 
4054
          move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4055
        }
4056
      else
4057
#endif /* PUSH_ROUNDING  */
4058
        {
4059
          rtx target;
4060
 
4061
          /* Otherwise make space on the stack and copy the data
4062
             to the address of that space.  */
4063
 
4064
          /* Deduct words put into registers from the size we must copy.  */
4065
          if (partial != 0)
4066
            {
4067
              if (CONST_INT_P (size))
4068
                size = GEN_INT (INTVAL (size) - used);
4069
              else
4070
                size = expand_binop (GET_MODE (size), sub_optab, size,
4071
                                     GEN_INT (used), NULL_RTX, 0,
4072
                                     OPTAB_LIB_WIDEN);
4073
            }
4074
 
4075
          /* Get the address of the stack space.
4076
             In this case, we do not deal with EXTRA separately.
4077
             A single stack adjust will do.  */
4078
          if (! args_addr)
4079
            {
4080
              temp = push_block (size, extra, where_pad == downward);
4081
              extra = 0;
4082
            }
4083
          else if (CONST_INT_P (args_so_far))
4084
            temp = memory_address (BLKmode,
4085
                                   plus_constant (args_addr,
4086
                                                  skip + INTVAL (args_so_far)));
4087
          else
4088
            temp = memory_address (BLKmode,
4089
                                   plus_constant (gen_rtx_PLUS (Pmode,
4090
                                                                args_addr,
4091
                                                                args_so_far),
4092
                                                  skip));
4093
 
4094
          if (!ACCUMULATE_OUTGOING_ARGS)
4095
            {
4096
              /* If the source is referenced relative to the stack pointer,
4097
                 copy it to another register to stabilize it.  We do not need
4098
                 to do this if we know that we won't be changing sp.  */
4099
 
4100
              if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4101
                  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4102
                temp = copy_to_reg (temp);
4103
            }
4104
 
4105
          target = gen_rtx_MEM (BLKmode, temp);
4106
 
4107
          /* We do *not* set_mem_attributes here, because incoming arguments
4108
             may overlap with sibling call outgoing arguments and we cannot
4109
             allow reordering of reads from function arguments with stores
4110
             to outgoing arguments of sibling calls.  We do, however, want
4111
             to record the alignment of the stack slot.  */
4112
          /* ALIGN may well be better aligned than TYPE, e.g. due to
4113
             PARM_BOUNDARY.  Assume the caller isn't lying.  */
4114
          set_mem_align (target, align);
4115
 
4116
          emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4117
        }
4118
    }
4119
  else if (partial > 0)
4120
    {
4121
      /* Scalar partly in registers.  */
4122
 
4123
      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4124
      int i;
4125
      int not_stack;
4126
      /* # bytes of start of argument
4127
         that we must make space for but need not store.  */
4128
      int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4129
      int args_offset = INTVAL (args_so_far);
4130
      int skip;
4131
 
4132
      /* Push padding now if padding above and stack grows down,
4133
         or if padding below and stack grows up.
4134
         But if space already allocated, this has already been done.  */
4135
      if (extra && args_addr == 0
4136
          && where_pad != none && where_pad != stack_direction)
4137
        anti_adjust_stack (GEN_INT (extra));
4138
 
4139
      /* If we make space by pushing it, we might as well push
4140
         the real data.  Otherwise, we can leave OFFSET nonzero
4141
         and leave the space uninitialized.  */
4142
      if (args_addr == 0)
4143
        offset = 0;
4144
 
4145
      /* Now NOT_STACK gets the number of words that we don't need to
4146
         allocate on the stack.  Convert OFFSET to words too.  */
4147
      not_stack = (partial - offset) / UNITS_PER_WORD;
4148
      offset /= UNITS_PER_WORD;
4149
 
4150
      /* If the partial register-part of the arg counts in its stack size,
4151
         skip the part of stack space corresponding to the registers.
4152
         Otherwise, start copying to the beginning of the stack space,
4153
         by setting SKIP to 0.  */
4154
      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4155
 
4156
      if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4157
        x = validize_mem (force_const_mem (mode, x));
4158
 
4159
      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4160
         SUBREGs of such registers are not allowed.  */
4161
      if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4162
           && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4163
        x = copy_to_reg (x);
4164
 
4165
      /* Loop over all the words allocated on the stack for this arg.  */
4166
      /* We can do it by words, because any scalar bigger than a word
4167
         has a size a multiple of a word.  */
4168
#ifndef PUSH_ARGS_REVERSED
4169
      for (i = not_stack; i < size; i++)
4170
#else
4171
      for (i = size - 1; i >= not_stack; i--)
4172
#endif
4173
        if (i >= not_stack + offset)
4174
          emit_push_insn (operand_subword_force (x, i, mode),
4175
                          word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4176
                          0, args_addr,
4177
                          GEN_INT (args_offset + ((i - not_stack + skip)
4178
                                                  * UNITS_PER_WORD)),
4179
                          reg_parm_stack_space, alignment_pad);
4180
    }
4181
  else
4182
    {
4183
      rtx addr;
4184
      rtx dest;
4185
 
4186
      /* Push padding now if padding above and stack grows down,
4187
         or if padding below and stack grows up.
4188
         But if space already allocated, this has already been done.  */
4189
      if (extra && args_addr == 0
4190
          && where_pad != none && where_pad != stack_direction)
4191
        anti_adjust_stack (GEN_INT (extra));
4192
 
4193
#ifdef PUSH_ROUNDING
4194
      if (args_addr == 0 && PUSH_ARGS)
4195
        emit_single_push_insn (mode, x, type);
4196
      else
4197
#endif
4198
        {
4199
          if (CONST_INT_P (args_so_far))
4200
            addr
4201
              = memory_address (mode,
4202
                                plus_constant (args_addr,
4203
                                               INTVAL (args_so_far)));
4204
          else
4205
            addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4206
                                                       args_so_far));
4207
          dest = gen_rtx_MEM (mode, addr);
4208
 
4209
          /* We do *not* set_mem_attributes here, because incoming arguments
4210
             may overlap with sibling call outgoing arguments and we cannot
4211
             allow reordering of reads from function arguments with stores
4212
             to outgoing arguments of sibling calls.  We do, however, want
4213
             to record the alignment of the stack slot.  */
4214
          /* ALIGN may well be better aligned than TYPE, e.g. due to
4215
             PARM_BOUNDARY.  Assume the caller isn't lying.  */
4216
          set_mem_align (dest, align);
4217
 
4218
          emit_move_insn (dest, x);
4219
        }
4220
    }
4221
 
4222
  /* If part should go in registers, copy that part
4223
     into the appropriate registers.  Do this now, at the end,
4224
     since mem-to-mem copies above may do function calls.  */
4225
  if (partial > 0 && reg != 0)
4226
    {
4227
      /* Handle calls that pass values in multiple non-contiguous locations.
4228
         The Irix 6 ABI has examples of this.  */
4229
      if (GET_CODE (reg) == PARALLEL)
4230
        emit_group_load (reg, x, type, -1);
4231
      else
4232
        {
4233
          gcc_assert (partial % UNITS_PER_WORD == 0);
4234
          move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4235
        }
4236
    }
4237
 
4238
  if (extra && args_addr == 0 && where_pad == stack_direction)
4239
    anti_adjust_stack (GEN_INT (extra));
4240
 
4241
  if (alignment_pad && args_addr == 0)
4242
    anti_adjust_stack (alignment_pad);
4243
}
4244
 
4245
/* Return X if X can be used as a subtarget in a sequence of arithmetic
4246
   operations.  */
4247
 
4248
static rtx
4249
get_subtarget (rtx x)
4250
{
4251
  return (optimize
4252
          || x == 0
4253
           /* Only registers can be subtargets.  */
4254
           || !REG_P (x)
4255
           /* Don't use hard regs to avoid extending their life.  */
4256
           || REGNO (x) < FIRST_PSEUDO_REGISTER
4257
          ? 0 : x);
4258
}
4259
 
4260
/* A subroutine of expand_assignment.  Optimize FIELD op= VAL, where
4261
   FIELD is a bitfield.  Returns true if the optimization was successful,
4262
   and there's nothing else to do.  */
4263
 
4264
static bool
4265
optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4266
                                 unsigned HOST_WIDE_INT bitpos,
4267
                                 unsigned HOST_WIDE_INT bitregion_start,
4268
                                 unsigned HOST_WIDE_INT bitregion_end,
4269
                                 enum machine_mode mode1, rtx str_rtx,
4270
                                 tree to, tree src)
4271
{
4272
  enum machine_mode str_mode = GET_MODE (str_rtx);
4273
  unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4274
  tree op0, op1;
4275
  rtx value, result;
4276
  optab binop;
4277
  gimple srcstmt;
4278
  enum tree_code code;
4279
 
4280
  if (mode1 != VOIDmode
4281
      || bitsize >= BITS_PER_WORD
4282
      || str_bitsize > BITS_PER_WORD
4283
      || TREE_SIDE_EFFECTS (to)
4284
      || TREE_THIS_VOLATILE (to))
4285
    return false;
4286
 
4287
  STRIP_NOPS (src);
4288
  if (TREE_CODE (src) != SSA_NAME)
4289
    return false;
4290
  if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4291
    return false;
4292
 
4293
  srcstmt = get_gimple_for_ssa_name (src);
4294
  if (!srcstmt
4295
      || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4296
    return false;
4297
 
4298
  code = gimple_assign_rhs_code (srcstmt);
4299
 
4300
  op0 = gimple_assign_rhs1 (srcstmt);
4301
 
4302
  /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4303
     to find its initialization.  Hopefully the initialization will
4304
     be from a bitfield load.  */
4305
  if (TREE_CODE (op0) == SSA_NAME)
4306
    {
4307
      gimple op0stmt = get_gimple_for_ssa_name (op0);
4308
 
4309
      /* We want to eventually have OP0 be the same as TO, which
4310
         should be a bitfield.  */
4311
      if (!op0stmt
4312
          || !is_gimple_assign (op0stmt)
4313
          || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4314
        return false;
4315
      op0 = gimple_assign_rhs1 (op0stmt);
4316
    }
4317
 
4318
  op1 = gimple_assign_rhs2 (srcstmt);
4319
 
4320
  if (!operand_equal_p (to, op0, 0))
4321
    return false;
4322
 
4323
  if (MEM_P (str_rtx))
4324
    {
4325
      unsigned HOST_WIDE_INT offset1;
4326
 
4327
      if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4328
        str_mode = word_mode;
4329
      str_mode = get_best_mode (bitsize, bitpos,
4330
                                bitregion_start, bitregion_end,
4331
                                MEM_ALIGN (str_rtx), str_mode, 0);
4332
      if (str_mode == VOIDmode)
4333
        return false;
4334
      str_bitsize = GET_MODE_BITSIZE (str_mode);
4335
 
4336
      offset1 = bitpos;
4337
      bitpos %= str_bitsize;
4338
      offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4339
      str_rtx = adjust_address (str_rtx, str_mode, offset1);
4340
    }
4341
  else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4342
    return false;
4343
 
4344
  /* If the bit field covers the whole REG/MEM, store_field
4345
     will likely generate better code.  */
4346
  if (bitsize >= str_bitsize)
4347
    return false;
4348
 
4349
  /* We can't handle fields split across multiple entities.  */
4350
  if (bitpos + bitsize > str_bitsize)
4351
    return false;
4352
 
4353
  if (BYTES_BIG_ENDIAN)
4354
    bitpos = str_bitsize - bitpos - bitsize;
4355
 
4356
  switch (code)
4357
    {
4358
    case PLUS_EXPR:
4359
    case MINUS_EXPR:
4360
      /* For now, just optimize the case of the topmost bitfield
4361
         where we don't need to do any masking and also
4362
         1 bit bitfields where xor can be used.
4363
         We might win by one instruction for the other bitfields
4364
         too if insv/extv instructions aren't used, so that
4365
         can be added later.  */
4366
      if (bitpos + bitsize != str_bitsize
4367
          && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4368
        break;
4369
 
4370
      value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4371
      value = convert_modes (str_mode,
4372
                             TYPE_MODE (TREE_TYPE (op1)), value,
4373
                             TYPE_UNSIGNED (TREE_TYPE (op1)));
4374
 
4375
      /* We may be accessing data outside the field, which means
4376
         we can alias adjacent data.  */
4377
      if (MEM_P (str_rtx))
4378
        {
4379
          str_rtx = shallow_copy_rtx (str_rtx);
4380
          set_mem_alias_set (str_rtx, 0);
4381
          set_mem_expr (str_rtx, 0);
4382
        }
4383
 
4384
      binop = code == PLUS_EXPR ? add_optab : sub_optab;
4385
      if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4386
        {
4387
          value = expand_and (str_mode, value, const1_rtx, NULL);
4388
          binop = xor_optab;
4389
        }
4390
      value = expand_shift (LSHIFT_EXPR, str_mode, value,
4391
                            bitpos, NULL_RTX, 1);
4392
      result = expand_binop (str_mode, binop, str_rtx,
4393
                             value, str_rtx, 1, OPTAB_WIDEN);
4394
      if (result != str_rtx)
4395
        emit_move_insn (str_rtx, result);
4396
      return true;
4397
 
4398
    case BIT_IOR_EXPR:
4399
    case BIT_XOR_EXPR:
4400
      if (TREE_CODE (op1) != INTEGER_CST)
4401
        break;
4402
      value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4403
      value = convert_modes (GET_MODE (str_rtx),
4404
                             TYPE_MODE (TREE_TYPE (op1)), value,
4405
                             TYPE_UNSIGNED (TREE_TYPE (op1)));
4406
 
4407
      /* We may be accessing data outside the field, which means
4408
         we can alias adjacent data.  */
4409
      if (MEM_P (str_rtx))
4410
        {
4411
          str_rtx = shallow_copy_rtx (str_rtx);
4412
          set_mem_alias_set (str_rtx, 0);
4413
          set_mem_expr (str_rtx, 0);
4414
        }
4415
 
4416
      binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4417
      if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4418
        {
4419
          rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4420
                              - 1);
4421
          value = expand_and (GET_MODE (str_rtx), value, mask,
4422
                              NULL_RTX);
4423
        }
4424
      value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4425
                            bitpos, NULL_RTX, 1);
4426
      result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4427
                             value, str_rtx, 1, OPTAB_WIDEN);
4428
      if (result != str_rtx)
4429
        emit_move_insn (str_rtx, result);
4430
      return true;
4431
 
4432
    default:
4433
      break;
4434
    }
4435
 
4436
  return false;
4437
}
4438
 
4439
/* In the C++ memory model, consecutive bit fields in a structure are
4440
   considered one memory location.
4441
 
4442
   Given a COMPONENT_REF, this function returns the bit range of
4443
   consecutive bits in which this COMPONENT_REF belongs in.  The
4444
   values are returned in *BITSTART and *BITEND.  If either the C++
4445
   memory model is not activated, or this memory access is not thread
4446
   visible, 0 is returned in *BITSTART and *BITEND.
4447
 
4448
   EXP is the COMPONENT_REF.
4449
   INNERDECL is the actual object being referenced.
4450
   BITPOS is the position in bits where the bit starts within the structure.
4451
   BITSIZE is size in bits of the field being referenced in EXP.
4452
 
4453
   For example, while storing into FOO.A here...
4454
 
4455
      struct {
4456
        BIT 0:
4457
          unsigned int a : 4;
4458
          unsigned int b : 1;
4459
        BIT 8:
4460
          unsigned char c;
4461
          unsigned int d : 6;
4462
      } foo;
4463
 
4464
   ...we are not allowed to store past <b>, so for the layout above, a
4465
   range of 0..7 (because no one cares if we store into the
4466
   padding).  */
4467
 
4468
static void
4469
get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4470
               unsigned HOST_WIDE_INT *bitend,
4471
               tree exp, tree innerdecl,
4472
               HOST_WIDE_INT bitpos, HOST_WIDE_INT bitsize)
4473
{
4474
  tree field, record_type, fld;
4475
  bool found_field = false;
4476
  bool prev_field_is_bitfield;
4477
 
4478
  gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4479
 
4480
  /* If other threads can't see this value, no need to restrict stores.  */
4481
  if (ALLOW_STORE_DATA_RACES
4482
      || ((TREE_CODE (innerdecl) == MEM_REF
4483
           || TREE_CODE (innerdecl) == TARGET_MEM_REF)
4484
          && !ptr_deref_may_alias_global_p (TREE_OPERAND (innerdecl, 0)))
4485
      || (DECL_P (innerdecl)
4486
          && ((TREE_CODE (innerdecl) == VAR_DECL
4487
               && DECL_THREAD_LOCAL_P (innerdecl))
4488
              || !TREE_STATIC (innerdecl))))
4489
    {
4490
      *bitstart = *bitend = 0;
4491
      return;
4492
    }
4493
 
4494
  /* Bit field we're storing into.  */
4495
  field = TREE_OPERAND (exp, 1);
4496
  record_type = DECL_FIELD_CONTEXT (field);
4497
 
4498
  /* Count the contiguous bitfields for the memory location that
4499
     contains FIELD.  */
4500
  *bitstart = 0;
4501
  prev_field_is_bitfield = true;
4502
  for (fld = TYPE_FIELDS (record_type); fld; fld = DECL_CHAIN (fld))
4503
    {
4504
      tree t, offset;
4505
      enum machine_mode mode;
4506
      int unsignedp, volatilep;
4507
 
4508
      if (TREE_CODE (fld) != FIELD_DECL)
4509
        continue;
4510
 
4511
      t = build3 (COMPONENT_REF, TREE_TYPE (exp),
4512
                  unshare_expr (TREE_OPERAND (exp, 0)),
4513
                  fld, NULL_TREE);
4514
      get_inner_reference (t, &bitsize, &bitpos, &offset,
4515
                           &mode, &unsignedp, &volatilep, true);
4516
 
4517
      if (field == fld)
4518
        found_field = true;
4519
 
4520
      if (DECL_BIT_FIELD_TYPE (fld) && bitsize > 0)
4521
        {
4522
          if (prev_field_is_bitfield == false)
4523
            {
4524
              *bitstart = bitpos;
4525
              prev_field_is_bitfield = true;
4526
            }
4527
        }
4528
      else
4529
        {
4530
          prev_field_is_bitfield = false;
4531
          if (found_field)
4532
            break;
4533
        }
4534
    }
4535
  gcc_assert (found_field);
4536
 
4537
  if (fld)
4538
    {
4539
      /* We found the end of the bit field sequence.  Include the
4540
         padding up to the next field and be done.  */
4541
      *bitend = bitpos - 1;
4542
    }
4543
  else
4544
    {
4545
      /* If this is the last element in the structure, include the padding
4546
         at the end of structure.  */
4547
      *bitend = TREE_INT_CST_LOW (TYPE_SIZE (record_type)) - 1;
4548
    }
4549
}
4550
 
4551
/* Returns true if the MEM_REF REF refers to an object that does not
4552
   reside in memory and has non-BLKmode.  */
4553
 
4554
static bool
4555
mem_ref_refers_to_non_mem_p (tree ref)
4556
{
4557
  tree base = TREE_OPERAND (ref, 0);
4558
  if (TREE_CODE (base) != ADDR_EXPR)
4559
    return false;
4560
  base = TREE_OPERAND (base, 0);
4561
  return (DECL_P (base)
4562
          && !TREE_ADDRESSABLE (base)
4563
          && DECL_MODE (base) != BLKmode
4564
          && DECL_RTL_SET_P (base)
4565
          && !MEM_P (DECL_RTL (base)));
4566
}
4567
 
4568
/* Expand an assignment that stores the value of FROM into TO.  If NONTEMPORAL
4569
   is true, try generating a nontemporal store.  */
4570
 
4571
void
4572
expand_assignment (tree to, tree from, bool nontemporal)
4573
{
4574
  rtx to_rtx = 0;
4575
  rtx result;
4576
  enum machine_mode mode;
4577
  unsigned int align;
4578
  enum insn_code icode;
4579
 
4580
  /* Don't crash if the lhs of the assignment was erroneous.  */
4581
  if (TREE_CODE (to) == ERROR_MARK)
4582
    {
4583
      expand_normal (from);
4584
      return;
4585
    }
4586
 
4587
  /* Optimize away no-op moves without side-effects.  */
4588
  if (operand_equal_p (to, from, 0))
4589
    return;
4590
 
4591
  /* Handle misaligned stores.  */
4592
  mode = TYPE_MODE (TREE_TYPE (to));
4593
  if ((TREE_CODE (to) == MEM_REF
4594
       || TREE_CODE (to) == TARGET_MEM_REF)
4595
      && mode != BLKmode
4596
      && ((align = get_object_or_type_alignment (to))
4597
          < GET_MODE_ALIGNMENT (mode))
4598
      && ((icode = optab_handler (movmisalign_optab, mode))
4599
          != CODE_FOR_nothing))
4600
    {
4601
      addr_space_t as
4602
        = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0))));
4603
      struct expand_operand ops[2];
4604
      enum machine_mode address_mode;
4605
      rtx reg, op0, mem;
4606
 
4607
      reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4608
      reg = force_not_mem (reg);
4609
 
4610
      if (TREE_CODE (to) == MEM_REF)
4611
        {
4612
          tree base = TREE_OPERAND (to, 0);
4613
          address_mode = targetm.addr_space.address_mode (as);
4614
          op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4615
          op0 = convert_memory_address_addr_space (address_mode, op0, as);
4616
          if (!integer_zerop (TREE_OPERAND (to, 1)))
4617
            {
4618
              rtx off
4619
                = immed_double_int_const (mem_ref_offset (to), address_mode);
4620
              op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
4621
            }
4622
          op0 = memory_address_addr_space (mode, op0, as);
4623
          mem = gen_rtx_MEM (mode, op0);
4624
          set_mem_attributes (mem, to, 0);
4625
          set_mem_addr_space (mem, as);
4626
        }
4627
      else if (TREE_CODE (to) == TARGET_MEM_REF)
4628
        {
4629
          struct mem_address addr;
4630
          get_address_description (to, &addr);
4631
          op0 = addr_for_mem_ref (&addr, as, true);
4632
          op0 = memory_address_addr_space (mode, op0, as);
4633
          mem = gen_rtx_MEM (mode, op0);
4634
          set_mem_attributes (mem, to, 0);
4635
          set_mem_addr_space (mem, as);
4636
        }
4637
      else
4638
        gcc_unreachable ();
4639
      if (TREE_THIS_VOLATILE (to))
4640
        MEM_VOLATILE_P (mem) = 1;
4641
 
4642
      create_fixed_operand (&ops[0], mem);
4643
      create_input_operand (&ops[1], reg, mode);
4644
      /* The movmisalign<mode> pattern cannot fail, else the assignment would
4645
         silently be omitted.  */
4646
      expand_insn (icode, 2, ops);
4647
      return;
4648
    }
4649
 
4650
  /* Assignment of a structure component needs special treatment
4651
     if the structure component's rtx is not simply a MEM.
4652
     Assignment of an array element at a constant index, and assignment of
4653
     an array element in an unaligned packed structure field, has the same
4654
     problem.  Same for (partially) storing into a non-memory object.  */
4655
  if (handled_component_p (to)
4656
      || (TREE_CODE (to) == MEM_REF
4657
          && mem_ref_refers_to_non_mem_p (to))
4658
      || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4659
    {
4660
      enum machine_mode mode1;
4661
      HOST_WIDE_INT bitsize, bitpos;
4662
      unsigned HOST_WIDE_INT bitregion_start = 0;
4663
      unsigned HOST_WIDE_INT bitregion_end = 0;
4664
      tree offset;
4665
      int unsignedp;
4666
      int volatilep = 0;
4667
      tree tem;
4668
      bool misalignp;
4669
      rtx mem = NULL_RTX;
4670
 
4671
      push_temp_slots ();
4672
      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4673
                                 &unsignedp, &volatilep, true);
4674
 
4675
      if (TREE_CODE (to) == COMPONENT_REF
4676
          && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4677
        get_bit_range (&bitregion_start, &bitregion_end,
4678
                       to, tem, bitpos, bitsize);
4679
 
4680
      /* If we are going to use store_bit_field and extract_bit_field,
4681
         make sure to_rtx will be safe for multiple use.  */
4682
      mode = TYPE_MODE (TREE_TYPE (tem));
4683
      if (TREE_CODE (tem) == MEM_REF
4684
          && mode != BLKmode
4685
          && ((align = get_object_or_type_alignment (tem))
4686
              < GET_MODE_ALIGNMENT (mode))
4687
          && ((icode = optab_handler (movmisalign_optab, mode))
4688
              != CODE_FOR_nothing))
4689
        {
4690
          enum machine_mode address_mode;
4691
          rtx op0;
4692
          struct expand_operand ops[2];
4693
          addr_space_t as = TYPE_ADDR_SPACE
4694
              (TREE_TYPE (TREE_TYPE (TREE_OPERAND (tem, 0))));
4695
          tree base = TREE_OPERAND (tem, 0);
4696
 
4697
          misalignp = true;
4698
          to_rtx = gen_reg_rtx (mode);
4699
 
4700
          address_mode = targetm.addr_space.address_mode (as);
4701
          op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4702
          op0 = convert_memory_address_addr_space (address_mode, op0, as);
4703
          if (!integer_zerop (TREE_OPERAND (tem, 1)))
4704
            {
4705
              rtx off = immed_double_int_const (mem_ref_offset (tem),
4706
                                                address_mode);
4707
              op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
4708
            }
4709
          op0 = memory_address_addr_space (mode, op0, as);
4710
          mem = gen_rtx_MEM (mode, op0);
4711
          set_mem_attributes (mem, tem, 0);
4712
          set_mem_addr_space (mem, as);
4713
          if (TREE_THIS_VOLATILE (tem))
4714
            MEM_VOLATILE_P (mem) = 1;
4715
 
4716
          /* If the misaligned store doesn't overwrite all bits, perform
4717
             rmw cycle on MEM.  */
4718
          if (bitsize != GET_MODE_BITSIZE (mode))
4719
            {
4720
              create_input_operand (&ops[0], to_rtx, mode);
4721
              create_fixed_operand (&ops[1], mem);
4722
              /* The movmisalign<mode> pattern cannot fail, else the assignment
4723
                 would silently be omitted.  */
4724
              expand_insn (icode, 2, ops);
4725
 
4726
              mem = copy_rtx (mem);
4727
            }
4728
        }
4729
      else
4730
        {
4731
          misalignp = false;
4732
          to_rtx = expand_normal (tem);
4733
        }
4734
 
4735
      /* If the bitfield is volatile, we want to access it in the
4736
         field's mode, not the computed mode.
4737
         If a MEM has VOIDmode (external with incomplete type),
4738
         use BLKmode for it instead.  */
4739
      if (MEM_P (to_rtx))
4740
        {
4741
          if (volatilep && flag_strict_volatile_bitfields > 0)
4742
            to_rtx = adjust_address (to_rtx, mode1, 0);
4743
          else if (GET_MODE (to_rtx) == VOIDmode)
4744
            to_rtx = adjust_address (to_rtx, BLKmode, 0);
4745
        }
4746
 
4747
      if (offset != 0)
4748
        {
4749
          enum machine_mode address_mode;
4750
          rtx offset_rtx;
4751
 
4752
          if (!MEM_P (to_rtx))
4753
            {
4754
              /* We can get constant negative offsets into arrays with broken
4755
                 user code.  Translate this to a trap instead of ICEing.  */
4756
              gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4757
              expand_builtin_trap ();
4758
              to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4759
            }
4760
 
4761
          offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4762
          address_mode
4763
            = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4764
          if (GET_MODE (offset_rtx) != address_mode)
4765
            offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4766
 
4767
          /* A constant address in TO_RTX can have VOIDmode, we must not try
4768
             to call force_reg for that case.  Avoid that case.  */
4769
          if (MEM_P (to_rtx)
4770
              && GET_MODE (to_rtx) == BLKmode
4771
              && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4772
              && bitsize > 0
4773
              && (bitpos % bitsize) == 0
4774
              && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4775
              && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4776
            {
4777
              to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4778
              bitpos = 0;
4779
            }
4780
 
4781
          to_rtx = offset_address (to_rtx, offset_rtx,
4782
                                   highest_pow2_factor_for_target (to,
4783
                                                                   offset));
4784
        }
4785
 
4786
      /* No action is needed if the target is not a memory and the field
4787
         lies completely outside that target.  This can occur if the source
4788
         code contains an out-of-bounds access to a small array.  */
4789
      if (!MEM_P (to_rtx)
4790
          && GET_MODE (to_rtx) != BLKmode
4791
          && (unsigned HOST_WIDE_INT) bitpos
4792
             >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4793
        {
4794
          expand_normal (from);
4795
          result = NULL;
4796
        }
4797
      /* Handle expand_expr of a complex value returning a CONCAT.  */
4798
      else if (GET_CODE (to_rtx) == CONCAT)
4799
        {
4800
          unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4801
          if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4802
              && bitpos == 0
4803
              && bitsize == mode_bitsize)
4804
            result = store_expr (from, to_rtx, false, nontemporal);
4805
          else if (bitsize == mode_bitsize / 2
4806
                   && (bitpos == 0 || bitpos == mode_bitsize / 2))
4807
            result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4808
                                 nontemporal);
4809
          else if (bitpos + bitsize <= mode_bitsize / 2)
4810
            result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4811
                                  bitregion_start, bitregion_end,
4812
                                  mode1, from, TREE_TYPE (tem),
4813
                                  get_alias_set (to), nontemporal);
4814
          else if (bitpos >= mode_bitsize / 2)
4815
            result = store_field (XEXP (to_rtx, 1), bitsize,
4816
                                  bitpos - mode_bitsize / 2,
4817
                                  bitregion_start, bitregion_end,
4818
                                  mode1, from,
4819
                                  TREE_TYPE (tem), get_alias_set (to),
4820
                                  nontemporal);
4821
          else if (bitpos == 0 && bitsize == mode_bitsize)
4822
            {
4823
              rtx from_rtx;
4824
              result = expand_normal (from);
4825
              from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4826
                                              TYPE_MODE (TREE_TYPE (from)), 0);
4827
              emit_move_insn (XEXP (to_rtx, 0),
4828
                              read_complex_part (from_rtx, false));
4829
              emit_move_insn (XEXP (to_rtx, 1),
4830
                              read_complex_part (from_rtx, true));
4831
            }
4832
          else
4833
            {
4834
              rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4835
                                            GET_MODE_SIZE (GET_MODE (to_rtx)),
4836
                                            0);
4837
              write_complex_part (temp, XEXP (to_rtx, 0), false);
4838
              write_complex_part (temp, XEXP (to_rtx, 1), true);
4839
              result = store_field (temp, bitsize, bitpos,
4840
                                    bitregion_start, bitregion_end,
4841
                                    mode1, from,
4842
                                    TREE_TYPE (tem), get_alias_set (to),
4843
                                    nontemporal);
4844
              emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4845
              emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4846
            }
4847
        }
4848
      else
4849
        {
4850
          if (MEM_P (to_rtx))
4851
            {
4852
              /* If the field is at offset zero, we could have been given the
4853
                 DECL_RTX of the parent struct.  Don't munge it.  */
4854
              to_rtx = shallow_copy_rtx (to_rtx);
4855
 
4856
              set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4857
 
4858
              /* Deal with volatile and readonly fields.  The former is only
4859
                 done for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
4860
              if (volatilep)
4861
                MEM_VOLATILE_P (to_rtx) = 1;
4862
              if (component_uses_parent_alias_set (to))
4863
                MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4864
            }
4865
 
4866
          if (optimize_bitfield_assignment_op (bitsize, bitpos,
4867
                                               bitregion_start, bitregion_end,
4868
                                               mode1,
4869
                                               to_rtx, to, from))
4870
            result = NULL;
4871
          else
4872
            result = store_field (to_rtx, bitsize, bitpos,
4873
                                  bitregion_start, bitregion_end,
4874
                                  mode1, from,
4875
                                  TREE_TYPE (tem), get_alias_set (to),
4876
                                  nontemporal);
4877
        }
4878
 
4879
      if (misalignp)
4880
        {
4881
          struct expand_operand ops[2];
4882
 
4883
          create_fixed_operand (&ops[0], mem);
4884
          create_input_operand (&ops[1], to_rtx, mode);
4885
          /* The movmisalign<mode> pattern cannot fail, else the assignment
4886
             would silently be omitted.  */
4887
          expand_insn (icode, 2, ops);
4888
        }
4889
 
4890
      if (result)
4891
        preserve_temp_slots (result);
4892
      free_temp_slots ();
4893
      pop_temp_slots ();
4894
      return;
4895
    }
4896
 
4897
  /* If the rhs is a function call and its value is not an aggregate,
4898
     call the function before we start to compute the lhs.
4899
     This is needed for correct code for cases such as
4900
     val = setjmp (buf) on machines where reference to val
4901
     requires loading up part of an address in a separate insn.
4902
 
4903
     Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4904
     since it might be a promoted variable where the zero- or sign- extension
4905
     needs to be done.  Handling this in the normal way is safe because no
4906
     computation is done before the call.  The same is true for SSA names.  */
4907
  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4908
      && COMPLETE_TYPE_P (TREE_TYPE (from))
4909
      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4910
      && ! (((TREE_CODE (to) == VAR_DECL
4911
              || TREE_CODE (to) == PARM_DECL
4912
              || TREE_CODE (to) == RESULT_DECL)
4913
             && REG_P (DECL_RTL (to)))
4914
            || TREE_CODE (to) == SSA_NAME))
4915
    {
4916
      rtx value;
4917
 
4918
      push_temp_slots ();
4919
      value = expand_normal (from);
4920
      if (to_rtx == 0)
4921
        to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4922
 
4923
      /* Handle calls that return values in multiple non-contiguous locations.
4924
         The Irix 6 ABI has examples of this.  */
4925
      if (GET_CODE (to_rtx) == PARALLEL)
4926
        emit_group_load (to_rtx, value, TREE_TYPE (from),
4927
                         int_size_in_bytes (TREE_TYPE (from)));
4928
      else if (GET_MODE (to_rtx) == BLKmode)
4929
        emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4930
      else
4931
        {
4932
          if (POINTER_TYPE_P (TREE_TYPE (to)))
4933
            value = convert_memory_address_addr_space
4934
                      (GET_MODE (to_rtx), value,
4935
                       TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4936
 
4937
          emit_move_insn (to_rtx, value);
4938
        }
4939
      preserve_temp_slots (to_rtx);
4940
      free_temp_slots ();
4941
      pop_temp_slots ();
4942
      return;
4943
    }
4944
 
4945
  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.  */
4946
  to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4947
 
4948
  /* Don't move directly into a return register.  */
4949
  if (TREE_CODE (to) == RESULT_DECL
4950
      && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4951
    {
4952
      rtx temp;
4953
 
4954
      push_temp_slots ();
4955
      if (REG_P (to_rtx) && TYPE_MODE (TREE_TYPE (from)) == BLKmode)
4956
        temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
4957
      else
4958
        temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4959
 
4960
      if (GET_CODE (to_rtx) == PARALLEL)
4961
        emit_group_load (to_rtx, temp, TREE_TYPE (from),
4962
                         int_size_in_bytes (TREE_TYPE (from)));
4963
      else if (temp)
4964
        emit_move_insn (to_rtx, temp);
4965
 
4966
      preserve_temp_slots (to_rtx);
4967
      free_temp_slots ();
4968
      pop_temp_slots ();
4969
      return;
4970
    }
4971
 
4972
  /* In case we are returning the contents of an object which overlaps
4973
     the place the value is being stored, use a safe function when copying
4974
     a value through a pointer into a structure value return block.  */
4975
  if (TREE_CODE (to) == RESULT_DECL
4976
      && TREE_CODE (from) == INDIRECT_REF
4977
      && ADDR_SPACE_GENERIC_P
4978
           (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4979
      && refs_may_alias_p (to, from)
4980
      && cfun->returns_struct
4981
      && !cfun->returns_pcc_struct)
4982
    {
4983
      rtx from_rtx, size;
4984
 
4985
      push_temp_slots ();
4986
      size = expr_size (from);
4987
      from_rtx = expand_normal (from);
4988
 
4989
      emit_library_call (memmove_libfunc, LCT_NORMAL,
4990
                         VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4991
                         XEXP (from_rtx, 0), Pmode,
4992
                         convert_to_mode (TYPE_MODE (sizetype),
4993
                                          size, TYPE_UNSIGNED (sizetype)),
4994
                         TYPE_MODE (sizetype));
4995
 
4996
      preserve_temp_slots (to_rtx);
4997
      free_temp_slots ();
4998
      pop_temp_slots ();
4999
      return;
5000
    }
5001
 
5002
  /* Compute FROM and store the value in the rtx we got.  */
5003
 
5004
  push_temp_slots ();
5005
  result = store_expr (from, to_rtx, 0, nontemporal);
5006
  preserve_temp_slots (result);
5007
  free_temp_slots ();
5008
  pop_temp_slots ();
5009
  return;
5010
}
5011
 
5012
/* Emits nontemporal store insn that moves FROM to TO.  Returns true if this
5013
   succeeded, false otherwise.  */
5014
 
5015
bool
5016
emit_storent_insn (rtx to, rtx from)
5017
{
5018
  struct expand_operand ops[2];
5019
  enum machine_mode mode = GET_MODE (to);
5020
  enum insn_code code = optab_handler (storent_optab, mode);
5021
 
5022
  if (code == CODE_FOR_nothing)
5023
    return false;
5024
 
5025
  create_fixed_operand (&ops[0], to);
5026
  create_input_operand (&ops[1], from, mode);
5027
  return maybe_expand_insn (code, 2, ops);
5028
}
5029
 
5030
/* Generate code for computing expression EXP,
5031
   and storing the value into TARGET.
5032
 
5033
   If the mode is BLKmode then we may return TARGET itself.
5034
   It turns out that in BLKmode it doesn't cause a problem.
5035
   because C has no operators that could combine two different
5036
   assignments into the same BLKmode object with different values
5037
   with no sequence point.  Will other languages need this to
5038
   be more thorough?
5039
 
5040
   If CALL_PARAM_P is nonzero, this is a store into a call param on the
5041
   stack, and block moves may need to be treated specially.
5042
 
5043
   If NONTEMPORAL is true, try using a nontemporal store instruction.  */
5044
 
5045
rtx
5046
store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5047
{
5048
  rtx temp;
5049
  rtx alt_rtl = NULL_RTX;
5050
  location_t loc = EXPR_LOCATION (exp);
5051
 
5052
  if (VOID_TYPE_P (TREE_TYPE (exp)))
5053
    {
5054
      /* C++ can generate ?: expressions with a throw expression in one
5055
         branch and an rvalue in the other. Here, we resolve attempts to
5056
         store the throw expression's nonexistent result.  */
5057
      gcc_assert (!call_param_p);
5058
      expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5059
      return NULL_RTX;
5060
    }
5061
  if (TREE_CODE (exp) == COMPOUND_EXPR)
5062
    {
5063
      /* Perform first part of compound expression, then assign from second
5064
         part.  */
5065
      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5066
                   call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5067
      return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5068
                         nontemporal);
5069
    }
5070
  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5071
    {
5072
      /* For conditional expression, get safe form of the target.  Then
5073
         test the condition, doing the appropriate assignment on either
5074
         side.  This avoids the creation of unnecessary temporaries.
5075
         For non-BLKmode, it is more efficient not to do this.  */
5076
 
5077
      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5078
 
5079
      do_pending_stack_adjust ();
5080
      NO_DEFER_POP;
5081
      jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5082
      store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5083
                  nontemporal);
5084
      emit_jump_insn (gen_jump (lab2));
5085
      emit_barrier ();
5086
      emit_label (lab1);
5087
      store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5088
                  nontemporal);
5089
      emit_label (lab2);
5090
      OK_DEFER_POP;
5091
 
5092
      return NULL_RTX;
5093
    }
5094
  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5095
    /* If this is a scalar in a register that is stored in a wider mode
5096
       than the declared mode, compute the result into its declared mode
5097
       and then convert to the wider mode.  Our value is the computed
5098
       expression.  */
5099
    {
5100
      rtx inner_target = 0;
5101
 
5102
      /* We can do the conversion inside EXP, which will often result
5103
         in some optimizations.  Do the conversion in two steps: first
5104
         change the signedness, if needed, then the extend.  But don't
5105
         do this if the type of EXP is a subtype of something else
5106
         since then the conversion might involve more than just
5107
         converting modes.  */
5108
      if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5109
          && TREE_TYPE (TREE_TYPE (exp)) == 0
5110
          && GET_MODE_PRECISION (GET_MODE (target))
5111
             == TYPE_PRECISION (TREE_TYPE (exp)))
5112
        {
5113
          if (TYPE_UNSIGNED (TREE_TYPE (exp))
5114
              != SUBREG_PROMOTED_UNSIGNED_P (target))
5115
            {
5116
              /* Some types, e.g. Fortran's logical*4, won't have a signed
5117
                 version, so use the mode instead.  */
5118
              tree ntype
5119
                = (signed_or_unsigned_type_for
5120
                   (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5121
              if (ntype == NULL)
5122
                ntype = lang_hooks.types.type_for_mode
5123
                  (TYPE_MODE (TREE_TYPE (exp)),
5124
                   SUBREG_PROMOTED_UNSIGNED_P (target));
5125
 
5126
              exp = fold_convert_loc (loc, ntype, exp);
5127
            }
5128
 
5129
          exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5130
                                  (GET_MODE (SUBREG_REG (target)),
5131
                                   SUBREG_PROMOTED_UNSIGNED_P (target)),
5132
                                  exp);
5133
 
5134
          inner_target = SUBREG_REG (target);
5135
        }
5136
 
5137
      temp = expand_expr (exp, inner_target, VOIDmode,
5138
                          call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5139
 
5140
      /* If TEMP is a VOIDmode constant, use convert_modes to make
5141
         sure that we properly convert it.  */
5142
      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5143
        {
5144
          temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5145
                                temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5146
          temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5147
                                GET_MODE (target), temp,
5148
                                SUBREG_PROMOTED_UNSIGNED_P (target));
5149
        }
5150
 
5151
      convert_move (SUBREG_REG (target), temp,
5152
                    SUBREG_PROMOTED_UNSIGNED_P (target));
5153
 
5154
      return NULL_RTX;
5155
    }
5156
  else if ((TREE_CODE (exp) == STRING_CST
5157
            || (TREE_CODE (exp) == MEM_REF
5158
                && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5159
                && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5160
                   == STRING_CST
5161
                && integer_zerop (TREE_OPERAND (exp, 1))))
5162
           && !nontemporal && !call_param_p
5163
           && MEM_P (target))
5164
    {
5165
      /* Optimize initialization of an array with a STRING_CST.  */
5166
      HOST_WIDE_INT exp_len, str_copy_len;
5167
      rtx dest_mem;
5168
      tree str = TREE_CODE (exp) == STRING_CST
5169
                 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5170
 
5171
      exp_len = int_expr_size (exp);
5172
      if (exp_len <= 0)
5173
        goto normal_expr;
5174
 
5175
      if (TREE_STRING_LENGTH (str) <= 0)
5176
        goto normal_expr;
5177
 
5178
      str_copy_len = strlen (TREE_STRING_POINTER (str));
5179
      if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5180
        goto normal_expr;
5181
 
5182
      str_copy_len = TREE_STRING_LENGTH (str);
5183
      if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5184
          && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5185
        {
5186
          str_copy_len += STORE_MAX_PIECES - 1;
5187
          str_copy_len &= ~(STORE_MAX_PIECES - 1);
5188
        }
5189
      str_copy_len = MIN (str_copy_len, exp_len);
5190
      if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5191
                                CONST_CAST (char *, TREE_STRING_POINTER (str)),
5192
                                MEM_ALIGN (target), false))
5193
        goto normal_expr;
5194
 
5195
      dest_mem = target;
5196
 
5197
      dest_mem = store_by_pieces (dest_mem,
5198
                                  str_copy_len, builtin_strncpy_read_str,
5199
                                  CONST_CAST (char *,
5200
                                              TREE_STRING_POINTER (str)),
5201
                                  MEM_ALIGN (target), false,
5202
                                  exp_len > str_copy_len ? 1 : 0);
5203
      if (exp_len > str_copy_len)
5204
        clear_storage (adjust_address (dest_mem, BLKmode, 0),
5205
                       GEN_INT (exp_len - str_copy_len),
5206
                       BLOCK_OP_NORMAL);
5207
      return NULL_RTX;
5208
    }
5209
  else
5210
    {
5211
      rtx tmp_target;
5212
 
5213
  normal_expr:
5214
      /* If we want to use a nontemporal store, force the value to
5215
         register first.  */
5216
      tmp_target = nontemporal ? NULL_RTX : target;
5217
      temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5218
                               (call_param_p
5219
                                ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5220
                               &alt_rtl);
5221
    }
5222
 
5223
  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5224
     the same as that of TARGET, adjust the constant.  This is needed, for
5225
     example, in case it is a CONST_DOUBLE and we want only a word-sized
5226
     value.  */
5227
  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5228
      && TREE_CODE (exp) != ERROR_MARK
5229
      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5230
    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5231
                          temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5232
 
5233
  /* If value was not generated in the target, store it there.
5234
     Convert the value to TARGET's type first if necessary and emit the
5235
     pending incrementations that have been queued when expanding EXP.
5236
     Note that we cannot emit the whole queue blindly because this will
5237
     effectively disable the POST_INC optimization later.
5238
 
5239
     If TEMP and TARGET compare equal according to rtx_equal_p, but
5240
     one or both of them are volatile memory refs, we have to distinguish
5241
     two cases:
5242
     - expand_expr has used TARGET.  In this case, we must not generate
5243
       another copy.  This can be detected by TARGET being equal according
5244
       to == .
5245
     - expand_expr has not used TARGET - that means that the source just
5246
       happens to have the same RTX form.  Since temp will have been created
5247
       by expand_expr, it will compare unequal according to == .
5248
       We must generate a copy in this case, to reach the correct number
5249
       of volatile memory references.  */
5250
 
5251
  if ((! rtx_equal_p (temp, target)
5252
       || (temp != target && (side_effects_p (temp)
5253
                              || side_effects_p (target))))
5254
      && TREE_CODE (exp) != ERROR_MARK
5255
      /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5256
         but TARGET is not valid memory reference, TEMP will differ
5257
         from TARGET although it is really the same location.  */
5258
      && !(alt_rtl
5259
           && rtx_equal_p (alt_rtl, target)
5260
           && !side_effects_p (alt_rtl)
5261
           && !side_effects_p (target))
5262
      /* If there's nothing to copy, don't bother.  Don't call
5263
         expr_size unless necessary, because some front-ends (C++)
5264
         expr_size-hook must not be given objects that are not
5265
         supposed to be bit-copied or bit-initialized.  */
5266
      && expr_size (exp) != const0_rtx)
5267
    {
5268
      if (GET_MODE (temp) != GET_MODE (target)
5269
          && GET_MODE (temp) != VOIDmode)
5270
        {
5271
          int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5272
          if (GET_MODE (target) == BLKmode
5273
              && GET_MODE (temp) == BLKmode)
5274
            emit_block_move (target, temp, expr_size (exp),
5275
                             (call_param_p
5276
                              ? BLOCK_OP_CALL_PARM
5277
                              : BLOCK_OP_NORMAL));
5278
          else if (GET_MODE (target) == BLKmode)
5279
            store_bit_field (target, INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5280
                             0, 0, 0, GET_MODE (temp), temp);
5281
          else
5282
            convert_move (target, temp, unsignedp);
5283
        }
5284
 
5285
      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5286
        {
5287
          /* Handle copying a string constant into an array.  The string
5288
             constant may be shorter than the array.  So copy just the string's
5289
             actual length, and clear the rest.  First get the size of the data
5290
             type of the string, which is actually the size of the target.  */
5291
          rtx size = expr_size (exp);
5292
 
5293
          if (CONST_INT_P (size)
5294
              && INTVAL (size) < TREE_STRING_LENGTH (exp))
5295
            emit_block_move (target, temp, size,
5296
                             (call_param_p
5297
                              ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5298
          else
5299
            {
5300
              enum machine_mode pointer_mode
5301
                = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5302
              enum machine_mode address_mode
5303
                = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
5304
 
5305
              /* Compute the size of the data to copy from the string.  */
5306
              tree copy_size
5307
                = size_binop_loc (loc, MIN_EXPR,
5308
                                  make_tree (sizetype, size),
5309
                                  size_int (TREE_STRING_LENGTH (exp)));
5310
              rtx copy_size_rtx
5311
                = expand_expr (copy_size, NULL_RTX, VOIDmode,
5312
                               (call_param_p
5313
                                ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5314
              rtx label = 0;
5315
 
5316
              /* Copy that much.  */
5317
              copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5318
                                               TYPE_UNSIGNED (sizetype));
5319
              emit_block_move (target, temp, copy_size_rtx,
5320
                               (call_param_p
5321
                                ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5322
 
5323
              /* Figure out how much is left in TARGET that we have to clear.
5324
                 Do all calculations in pointer_mode.  */
5325
              if (CONST_INT_P (copy_size_rtx))
5326
                {
5327
                  size = plus_constant (size, -INTVAL (copy_size_rtx));
5328
                  target = adjust_address (target, BLKmode,
5329
                                           INTVAL (copy_size_rtx));
5330
                }
5331
              else
5332
                {
5333
                  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5334
                                       copy_size_rtx, NULL_RTX, 0,
5335
                                       OPTAB_LIB_WIDEN);
5336
 
5337
                  if (GET_MODE (copy_size_rtx) != address_mode)
5338
                    copy_size_rtx = convert_to_mode (address_mode,
5339
                                                     copy_size_rtx,
5340
                                                     TYPE_UNSIGNED (sizetype));
5341
 
5342
                  target = offset_address (target, copy_size_rtx,
5343
                                           highest_pow2_factor (copy_size));
5344
                  label = gen_label_rtx ();
5345
                  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5346
                                           GET_MODE (size), 0, label);
5347
                }
5348
 
5349
              if (size != const0_rtx)
5350
                clear_storage (target, size, BLOCK_OP_NORMAL);
5351
 
5352
              if (label)
5353
                emit_label (label);
5354
            }
5355
        }
5356
      /* Handle calls that return values in multiple non-contiguous locations.
5357
         The Irix 6 ABI has examples of this.  */
5358
      else if (GET_CODE (target) == PARALLEL)
5359
        emit_group_load (target, temp, TREE_TYPE (exp),
5360
                         int_size_in_bytes (TREE_TYPE (exp)));
5361
      else if (GET_MODE (temp) == BLKmode)
5362
        emit_block_move (target, temp, expr_size (exp),
5363
                         (call_param_p
5364
                          ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5365
      else if (nontemporal
5366
               && emit_storent_insn (target, temp))
5367
        /* If we managed to emit a nontemporal store, there is nothing else to
5368
           do.  */
5369
        ;
5370
      else
5371
        {
5372
          temp = force_operand (temp, target);
5373
          if (temp != target)
5374
            emit_move_insn (target, temp);
5375
        }
5376
    }
5377
 
5378
  return NULL_RTX;
5379
}
5380
 
5381
/* Return true if field F of structure TYPE is a flexible array.  */
5382
 
5383
static bool
5384
flexible_array_member_p (const_tree f, const_tree type)
5385
{
5386
  const_tree tf;
5387
 
5388
  tf = TREE_TYPE (f);
5389
  return (DECL_CHAIN (f) == NULL
5390
          && TREE_CODE (tf) == ARRAY_TYPE
5391
          && TYPE_DOMAIN (tf)
5392
          && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5393
          && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5394
          && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5395
          && int_size_in_bytes (type) >= 0);
5396
}
5397
 
5398
/* If FOR_CTOR_P, return the number of top-level elements that a constructor
5399
   must have in order for it to completely initialize a value of type TYPE.
5400
   Return -1 if the number isn't known.
5401
 
5402
   If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE.  */
5403
 
5404
static HOST_WIDE_INT
5405
count_type_elements (const_tree type, bool for_ctor_p)
5406
{
5407
  switch (TREE_CODE (type))
5408
    {
5409
    case ARRAY_TYPE:
5410
      {
5411
        tree nelts;
5412
 
5413
        nelts = array_type_nelts (type);
5414
        if (nelts && host_integerp (nelts, 1))
5415
          {
5416
            unsigned HOST_WIDE_INT n;
5417
 
5418
            n = tree_low_cst (nelts, 1) + 1;
5419
            if (n == 0 || for_ctor_p)
5420
              return n;
5421
            else
5422
              return n * count_type_elements (TREE_TYPE (type), false);
5423
          }
5424
        return for_ctor_p ? -1 : 1;
5425
      }
5426
 
5427
    case RECORD_TYPE:
5428
      {
5429
        unsigned HOST_WIDE_INT n;
5430
        tree f;
5431
 
5432
        n = 0;
5433
        for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5434
          if (TREE_CODE (f) == FIELD_DECL)
5435
            {
5436
              if (!for_ctor_p)
5437
                n += count_type_elements (TREE_TYPE (f), false);
5438
              else if (!flexible_array_member_p (f, type))
5439
                /* Don't count flexible arrays, which are not supposed
5440
                   to be initialized.  */
5441
                n += 1;
5442
            }
5443
 
5444
        return n;
5445
      }
5446
 
5447
    case UNION_TYPE:
5448
    case QUAL_UNION_TYPE:
5449
      {
5450
        tree f;
5451
        HOST_WIDE_INT n, m;
5452
 
5453
        gcc_assert (!for_ctor_p);
5454
        /* Estimate the number of scalars in each field and pick the
5455
           maximum.  Other estimates would do instead; the idea is simply
5456
           to make sure that the estimate is not sensitive to the ordering
5457
           of the fields.  */
5458
        n = 1;
5459
        for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5460
          if (TREE_CODE (f) == FIELD_DECL)
5461
            {
5462
              m = count_type_elements (TREE_TYPE (f), false);
5463
              /* If the field doesn't span the whole union, add an extra
5464
                 scalar for the rest.  */
5465
              if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5466
                                    TYPE_SIZE (type)) != 1)
5467
                m++;
5468
              if (n < m)
5469
                n = m;
5470
            }
5471
        return n;
5472
      }
5473
 
5474
    case COMPLEX_TYPE:
5475
      return 2;
5476
 
5477
    case VECTOR_TYPE:
5478
      return TYPE_VECTOR_SUBPARTS (type);
5479
 
5480
    case INTEGER_TYPE:
5481
    case REAL_TYPE:
5482
    case FIXED_POINT_TYPE:
5483
    case ENUMERAL_TYPE:
5484
    case BOOLEAN_TYPE:
5485
    case POINTER_TYPE:
5486
    case OFFSET_TYPE:
5487
    case REFERENCE_TYPE:
5488
    case NULLPTR_TYPE:
5489
      return 1;
5490
 
5491
    case ERROR_MARK:
5492
      return 0;
5493
 
5494
    case VOID_TYPE:
5495
    case METHOD_TYPE:
5496
    case FUNCTION_TYPE:
5497
    case LANG_TYPE:
5498
    default:
5499
      gcc_unreachable ();
5500
    }
5501
}
5502
 
5503
/* Helper for categorize_ctor_elements.  Identical interface.  */
5504
 
5505
static bool
5506
categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5507
                            HOST_WIDE_INT *p_init_elts, bool *p_complete)
5508
{
5509
  unsigned HOST_WIDE_INT idx;
5510
  HOST_WIDE_INT nz_elts, init_elts, num_fields;
5511
  tree value, purpose, elt_type;
5512
 
5513
  /* Whether CTOR is a valid constant initializer, in accordance with what
5514
     initializer_constant_valid_p does.  If inferred from the constructor
5515
     elements, true until proven otherwise.  */
5516
  bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5517
  bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5518
 
5519
  nz_elts = 0;
5520
  init_elts = 0;
5521
  num_fields = 0;
5522
  elt_type = NULL_TREE;
5523
 
5524
  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5525
    {
5526
      HOST_WIDE_INT mult = 1;
5527
 
5528
      if (TREE_CODE (purpose) == RANGE_EXPR)
5529
        {
5530
          tree lo_index = TREE_OPERAND (purpose, 0);
5531
          tree hi_index = TREE_OPERAND (purpose, 1);
5532
 
5533
          if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
5534
            mult = (tree_low_cst (hi_index, 1)
5535
                    - tree_low_cst (lo_index, 1) + 1);
5536
        }
5537
      num_fields += mult;
5538
      elt_type = TREE_TYPE (value);
5539
 
5540
      switch (TREE_CODE (value))
5541
        {
5542
        case CONSTRUCTOR:
5543
          {
5544
            HOST_WIDE_INT nz = 0, ic = 0;
5545
 
5546
            bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5547
                                                           p_complete);
5548
 
5549
            nz_elts += mult * nz;
5550
            init_elts += mult * ic;
5551
 
5552
            if (const_from_elts_p && const_p)
5553
              const_p = const_elt_p;
5554
          }
5555
          break;
5556
 
5557
        case INTEGER_CST:
5558
        case REAL_CST:
5559
        case FIXED_CST:
5560
          if (!initializer_zerop (value))
5561
            nz_elts += mult;
5562
          init_elts += mult;
5563
          break;
5564
 
5565
        case STRING_CST:
5566
          nz_elts += mult * TREE_STRING_LENGTH (value);
5567
          init_elts += mult * TREE_STRING_LENGTH (value);
5568
          break;
5569
 
5570
        case COMPLEX_CST:
5571
          if (!initializer_zerop (TREE_REALPART (value)))
5572
            nz_elts += mult;
5573
          if (!initializer_zerop (TREE_IMAGPART (value)))
5574
            nz_elts += mult;
5575
          init_elts += mult;
5576
          break;
5577
 
5578
        case VECTOR_CST:
5579
          {
5580
            tree v;
5581
            for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
5582
              {
5583
                if (!initializer_zerop (TREE_VALUE (v)))
5584
                  nz_elts += mult;
5585
                init_elts += mult;
5586
              }
5587
          }
5588
          break;
5589
 
5590
        default:
5591
          {
5592
            HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5593
            nz_elts += mult * tc;
5594
            init_elts += mult * tc;
5595
 
5596
            if (const_from_elts_p && const_p)
5597
              const_p = initializer_constant_valid_p (value, elt_type)
5598
                        != NULL_TREE;
5599
          }
5600
          break;
5601
        }
5602
    }
5603
 
5604
  if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5605
                                                num_fields, elt_type))
5606
    *p_complete = false;
5607
 
5608
  *p_nz_elts += nz_elts;
5609
  *p_init_elts += init_elts;
5610
 
5611
  return const_p;
5612
}
5613
 
5614
/* Examine CTOR to discover:
5615
   * how many scalar fields are set to nonzero values,
5616
     and place it in *P_NZ_ELTS;
5617
   * how many scalar fields in total are in CTOR,
5618
     and place it in *P_ELT_COUNT.
5619
   * whether the constructor is complete -- in the sense that every
5620
     meaningful byte is explicitly given a value --
5621
     and place it in *P_COMPLETE.
5622
 
5623
   Return whether or not CTOR is a valid static constant initializer, the same
5624
   as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0".  */
5625
 
5626
bool
5627
categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5628
                          HOST_WIDE_INT *p_init_elts, bool *p_complete)
5629
{
5630
  *p_nz_elts = 0;
5631
  *p_init_elts = 0;
5632
  *p_complete = true;
5633
 
5634
  return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5635
}
5636
 
5637
/* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5638
   of which had type LAST_TYPE.  Each element was itself a complete
5639
   initializer, in the sense that every meaningful byte was explicitly
5640
   given a value.  Return true if the same is true for the constructor
5641
   as a whole.  */
5642
 
5643
bool
5644
complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5645
                          const_tree last_type)
5646
{
5647
  if (TREE_CODE (type) == UNION_TYPE
5648
      || TREE_CODE (type) == QUAL_UNION_TYPE)
5649
    {
5650
      if (num_elts == 0)
5651
        return false;
5652
 
5653
      gcc_assert (num_elts == 1 && last_type);
5654
 
5655
      /* ??? We could look at each element of the union, and find the
5656
         largest element.  Which would avoid comparing the size of the
5657
         initialized element against any tail padding in the union.
5658
         Doesn't seem worth the effort...  */
5659
      return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5660
    }
5661
 
5662
  return count_type_elements (type, true) == num_elts;
5663
}
5664
 
5665
/* Return 1 if EXP contains mostly (3/4)  zeros.  */
5666
 
5667
static int
5668
mostly_zeros_p (const_tree exp)
5669
{
5670
  if (TREE_CODE (exp) == CONSTRUCTOR)
5671
    {
5672
      HOST_WIDE_INT nz_elts, init_elts;
5673
      bool complete_p;
5674
 
5675
      categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5676
      return !complete_p || nz_elts < init_elts / 4;
5677
    }
5678
 
5679
  return initializer_zerop (exp);
5680
}
5681
 
5682
/* Return 1 if EXP contains all zeros.  */
5683
 
5684
static int
5685
all_zeros_p (const_tree exp)
5686
{
5687
  if (TREE_CODE (exp) == CONSTRUCTOR)
5688
    {
5689
      HOST_WIDE_INT nz_elts, init_elts;
5690
      bool complete_p;
5691
 
5692
      categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5693
      return nz_elts == 0;
5694
    }
5695
 
5696
  return initializer_zerop (exp);
5697
}
5698
 
5699
/* Helper function for store_constructor.
5700
   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5701
   TYPE is the type of the CONSTRUCTOR, not the element type.
5702
   CLEARED is as for store_constructor.
5703
   ALIAS_SET is the alias set to use for any stores.
5704
 
5705
   This provides a recursive shortcut back to store_constructor when it isn't
5706
   necessary to go through store_field.  This is so that we can pass through
5707
   the cleared field to let store_constructor know that we may not have to
5708
   clear a substructure if the outer structure has already been cleared.  */
5709
 
5710
static void
5711
store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5712
                         HOST_WIDE_INT bitpos, enum machine_mode mode,
5713
                         tree exp, tree type, int cleared,
5714
                         alias_set_type alias_set)
5715
{
5716
  if (TREE_CODE (exp) == CONSTRUCTOR
5717
      /* We can only call store_constructor recursively if the size and
5718
         bit position are on a byte boundary.  */
5719
      && bitpos % BITS_PER_UNIT == 0
5720
      && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5721
      /* If we have a nonzero bitpos for a register target, then we just
5722
         let store_field do the bitfield handling.  This is unlikely to
5723
         generate unnecessary clear instructions anyways.  */
5724
      && (bitpos == 0 || MEM_P (target)))
5725
    {
5726
      if (MEM_P (target))
5727
        target
5728
          = adjust_address (target,
5729
                            GET_MODE (target) == BLKmode
5730
                            || 0 != (bitpos
5731
                                     % GET_MODE_ALIGNMENT (GET_MODE (target)))
5732
                            ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5733
 
5734
 
5735
      /* Update the alias set, if required.  */
5736
      if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5737
          && MEM_ALIAS_SET (target) != 0)
5738
        {
5739
          target = copy_rtx (target);
5740
          set_mem_alias_set (target, alias_set);
5741
        }
5742
 
5743
      store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5744
    }
5745
  else
5746
    store_field (target, bitsize, bitpos, 0, 0, mode, exp, type, alias_set,
5747
                 false);
5748
}
5749
 
5750
/* Store the value of constructor EXP into the rtx TARGET.
5751
   TARGET is either a REG or a MEM; we know it cannot conflict, since
5752
   safe_from_p has been called.
5753
   CLEARED is true if TARGET is known to have been zero'd.
5754
   SIZE is the number of bytes of TARGET we are allowed to modify: this
5755
   may not be the same as the size of EXP if we are assigning to a field
5756
   which has been packed to exclude padding bits.  */
5757
 
5758
static void
5759
store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5760
{
5761
  tree type = TREE_TYPE (exp);
5762
#ifdef WORD_REGISTER_OPERATIONS
5763
  HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5764
#endif
5765
 
5766
  switch (TREE_CODE (type))
5767
    {
5768
    case RECORD_TYPE:
5769
    case UNION_TYPE:
5770
    case QUAL_UNION_TYPE:
5771
      {
5772
        unsigned HOST_WIDE_INT idx;
5773
        tree field, value;
5774
 
5775
        /* If size is zero or the target is already cleared, do nothing.  */
5776
        if (size == 0 || cleared)
5777
          cleared = 1;
5778
        /* We either clear the aggregate or indicate the value is dead.  */
5779
        else if ((TREE_CODE (type) == UNION_TYPE
5780
                  || TREE_CODE (type) == QUAL_UNION_TYPE)
5781
                 && ! CONSTRUCTOR_ELTS (exp))
5782
          /* If the constructor is empty, clear the union.  */
5783
          {
5784
            clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5785
            cleared = 1;
5786
          }
5787
 
5788
        /* If we are building a static constructor into a register,
5789
           set the initial value as zero so we can fold the value into
5790
           a constant.  But if more than one register is involved,
5791
           this probably loses.  */
5792
        else if (REG_P (target) && TREE_STATIC (exp)
5793
                 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5794
          {
5795
            emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5796
            cleared = 1;
5797
          }
5798
 
5799
        /* If the constructor has fewer fields than the structure or
5800
           if we are initializing the structure to mostly zeros, clear
5801
           the whole structure first.  Don't do this if TARGET is a
5802
           register whose mode size isn't equal to SIZE since
5803
           clear_storage can't handle this case.  */
5804
        else if (size > 0
5805
                 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5806
                      != fields_length (type))
5807
                     || mostly_zeros_p (exp))
5808
                 && (!REG_P (target)
5809
                     || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5810
                         == size)))
5811
          {
5812
            clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5813
            cleared = 1;
5814
          }
5815
 
5816
        if (REG_P (target) && !cleared)
5817
          emit_clobber (target);
5818
 
5819
        /* Store each element of the constructor into the
5820
           corresponding field of TARGET.  */
5821
        FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5822
          {
5823
            enum machine_mode mode;
5824
            HOST_WIDE_INT bitsize;
5825
            HOST_WIDE_INT bitpos = 0;
5826
            tree offset;
5827
            rtx to_rtx = target;
5828
 
5829
            /* Just ignore missing fields.  We cleared the whole
5830
               structure, above, if any fields are missing.  */
5831
            if (field == 0)
5832
              continue;
5833
 
5834
            if (cleared && initializer_zerop (value))
5835
              continue;
5836
 
5837
            if (host_integerp (DECL_SIZE (field), 1))
5838
              bitsize = tree_low_cst (DECL_SIZE (field), 1);
5839
            else
5840
              bitsize = -1;
5841
 
5842
            mode = DECL_MODE (field);
5843
            if (DECL_BIT_FIELD (field))
5844
              mode = VOIDmode;
5845
 
5846
            offset = DECL_FIELD_OFFSET (field);
5847
            if (host_integerp (offset, 0)
5848
                && host_integerp (bit_position (field), 0))
5849
              {
5850
                bitpos = int_bit_position (field);
5851
                offset = 0;
5852
              }
5853
            else
5854
              bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5855
 
5856
            if (offset)
5857
              {
5858
                enum machine_mode address_mode;
5859
                rtx offset_rtx;
5860
 
5861
                offset
5862
                  = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5863
                                                    make_tree (TREE_TYPE (exp),
5864
                                                               target));
5865
 
5866
                offset_rtx = expand_normal (offset);
5867
                gcc_assert (MEM_P (to_rtx));
5868
 
5869
                address_mode
5870
                  = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5871
                if (GET_MODE (offset_rtx) != address_mode)
5872
                  offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5873
 
5874
                to_rtx = offset_address (to_rtx, offset_rtx,
5875
                                         highest_pow2_factor (offset));
5876
              }
5877
 
5878
#ifdef WORD_REGISTER_OPERATIONS
5879
            /* If this initializes a field that is smaller than a
5880
               word, at the start of a word, try to widen it to a full
5881
               word.  This special case allows us to output C++ member
5882
               function initializations in a form that the optimizers
5883
               can understand.  */
5884
            if (REG_P (target)
5885
                && bitsize < BITS_PER_WORD
5886
                && bitpos % BITS_PER_WORD == 0
5887
                && GET_MODE_CLASS (mode) == MODE_INT
5888
                && TREE_CODE (value) == INTEGER_CST
5889
                && exp_size >= 0
5890
                && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5891
              {
5892
                tree type = TREE_TYPE (value);
5893
 
5894
                if (TYPE_PRECISION (type) < BITS_PER_WORD)
5895
                  {
5896
                    type = lang_hooks.types.type_for_size
5897
                      (BITS_PER_WORD, TYPE_UNSIGNED (type));
5898
                    value = fold_convert (type, value);
5899
                  }
5900
 
5901
                if (BYTES_BIG_ENDIAN)
5902
                  value
5903
                   = fold_build2 (LSHIFT_EXPR, type, value,
5904
                                   build_int_cst (type,
5905
                                                  BITS_PER_WORD - bitsize));
5906
                bitsize = BITS_PER_WORD;
5907
                mode = word_mode;
5908
              }
5909
#endif
5910
 
5911
            if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5912
                && DECL_NONADDRESSABLE_P (field))
5913
              {
5914
                to_rtx = copy_rtx (to_rtx);
5915
                MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5916
              }
5917
 
5918
            store_constructor_field (to_rtx, bitsize, bitpos, mode,
5919
                                     value, type, cleared,
5920
                                     get_alias_set (TREE_TYPE (field)));
5921
          }
5922
        break;
5923
      }
5924
    case ARRAY_TYPE:
5925
      {
5926
        tree value, index;
5927
        unsigned HOST_WIDE_INT i;
5928
        int need_to_clear;
5929
        tree domain;
5930
        tree elttype = TREE_TYPE (type);
5931
        int const_bounds_p;
5932
        HOST_WIDE_INT minelt = 0;
5933
        HOST_WIDE_INT maxelt = 0;
5934
 
5935
        domain = TYPE_DOMAIN (type);
5936
        const_bounds_p = (TYPE_MIN_VALUE (domain)
5937
                          && TYPE_MAX_VALUE (domain)
5938
                          && host_integerp (TYPE_MIN_VALUE (domain), 0)
5939
                          && host_integerp (TYPE_MAX_VALUE (domain), 0));
5940
 
5941
        /* If we have constant bounds for the range of the type, get them.  */
5942
        if (const_bounds_p)
5943
          {
5944
            minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5945
            maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5946
          }
5947
 
5948
        /* If the constructor has fewer elements than the array, clear
5949
           the whole array first.  Similarly if this is static
5950
           constructor of a non-BLKmode object.  */
5951
        if (cleared)
5952
          need_to_clear = 0;
5953
        else if (REG_P (target) && TREE_STATIC (exp))
5954
          need_to_clear = 1;
5955
        else
5956
          {
5957
            unsigned HOST_WIDE_INT idx;
5958
            tree index, value;
5959
            HOST_WIDE_INT count = 0, zero_count = 0;
5960
            need_to_clear = ! const_bounds_p;
5961
 
5962
            /* This loop is a more accurate version of the loop in
5963
               mostly_zeros_p (it handles RANGE_EXPR in an index).  It
5964
               is also needed to check for missing elements.  */
5965
            FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5966
              {
5967
                HOST_WIDE_INT this_node_count;
5968
 
5969
                if (need_to_clear)
5970
                  break;
5971
 
5972
                if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5973
                  {
5974
                    tree lo_index = TREE_OPERAND (index, 0);
5975
                    tree hi_index = TREE_OPERAND (index, 1);
5976
 
5977
                    if (! host_integerp (lo_index, 1)
5978
                        || ! host_integerp (hi_index, 1))
5979
                      {
5980
                        need_to_clear = 1;
5981
                        break;
5982
                      }
5983
 
5984
                    this_node_count = (tree_low_cst (hi_index, 1)
5985
                                       - tree_low_cst (lo_index, 1) + 1);
5986
                  }
5987
                else
5988
                  this_node_count = 1;
5989
 
5990
                count += this_node_count;
5991
                if (mostly_zeros_p (value))
5992
                  zero_count += this_node_count;
5993
              }
5994
 
5995
            /* Clear the entire array first if there are any missing
5996
               elements, or if the incidence of zero elements is >=
5997
               75%.  */
5998
            if (! need_to_clear
5999
                && (count < maxelt - minelt + 1
6000
                    || 4 * zero_count >= 3 * count))
6001
              need_to_clear = 1;
6002
          }
6003
 
6004
        if (need_to_clear && size > 0)
6005
          {
6006
            if (REG_P (target))
6007
              emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
6008
            else
6009
              clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6010
            cleared = 1;
6011
          }
6012
 
6013
        if (!cleared && REG_P (target))
6014
          /* Inform later passes that the old value is dead.  */
6015
          emit_clobber (target);
6016
 
6017
        /* Store each element of the constructor into the
6018
           corresponding element of TARGET, determined by counting the
6019
           elements.  */
6020
        FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6021
          {
6022
            enum machine_mode mode;
6023
            HOST_WIDE_INT bitsize;
6024
            HOST_WIDE_INT bitpos;
6025
            rtx xtarget = target;
6026
 
6027
            if (cleared && initializer_zerop (value))
6028
              continue;
6029
 
6030
            mode = TYPE_MODE (elttype);
6031
            if (mode == BLKmode)
6032
              bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
6033
                         ? tree_low_cst (TYPE_SIZE (elttype), 1)
6034
                         : -1);
6035
            else
6036
              bitsize = GET_MODE_BITSIZE (mode);
6037
 
6038
            if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6039
              {
6040
                tree lo_index = TREE_OPERAND (index, 0);
6041
                tree hi_index = TREE_OPERAND (index, 1);
6042
                rtx index_r, pos_rtx;
6043
                HOST_WIDE_INT lo, hi, count;
6044
                tree position;
6045
 
6046
                /* If the range is constant and "small", unroll the loop.  */
6047
                if (const_bounds_p
6048
                    && host_integerp (lo_index, 0)
6049
                    && host_integerp (hi_index, 0)
6050
                    && (lo = tree_low_cst (lo_index, 0),
6051
                        hi = tree_low_cst (hi_index, 0),
6052
                        count = hi - lo + 1,
6053
                        (!MEM_P (target)
6054
                         || count <= 2
6055
                         || (host_integerp (TYPE_SIZE (elttype), 1)
6056
                             && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
6057
                                 <= 40 * 8)))))
6058
                  {
6059
                    lo -= minelt;  hi -= minelt;
6060
                    for (; lo <= hi; lo++)
6061
                      {
6062
                        bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
6063
 
6064
                        if (MEM_P (target)
6065
                            && !MEM_KEEP_ALIAS_SET_P (target)
6066
                            && TREE_CODE (type) == ARRAY_TYPE
6067
                            && TYPE_NONALIASED_COMPONENT (type))
6068
                          {
6069
                            target = copy_rtx (target);
6070
                            MEM_KEEP_ALIAS_SET_P (target) = 1;
6071
                          }
6072
 
6073
                        store_constructor_field
6074
                          (target, bitsize, bitpos, mode, value, type, cleared,
6075
                           get_alias_set (elttype));
6076
                      }
6077
                  }
6078
                else
6079
                  {
6080
                    rtx loop_start = gen_label_rtx ();
6081
                    rtx loop_end = gen_label_rtx ();
6082
                    tree exit_cond;
6083
 
6084
                    expand_normal (hi_index);
6085
 
6086
                    index = build_decl (EXPR_LOCATION (exp),
6087
                                        VAR_DECL, NULL_TREE, domain);
6088
                    index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6089
                    SET_DECL_RTL (index, index_r);
6090
                    store_expr (lo_index, index_r, 0, false);
6091
 
6092
                    /* Build the head of the loop.  */
6093
                    do_pending_stack_adjust ();
6094
                    emit_label (loop_start);
6095
 
6096
                    /* Assign value to element index.  */
6097
                    position =
6098
                      fold_convert (ssizetype,
6099
                                    fold_build2 (MINUS_EXPR,
6100
                                                 TREE_TYPE (index),
6101
                                                 index,
6102
                                                 TYPE_MIN_VALUE (domain)));
6103
 
6104
                    position =
6105
                        size_binop (MULT_EXPR, position,
6106
                                    fold_convert (ssizetype,
6107
                                                  TYPE_SIZE_UNIT (elttype)));
6108
 
6109
                    pos_rtx = expand_normal (position);
6110
                    xtarget = offset_address (target, pos_rtx,
6111
                                              highest_pow2_factor (position));
6112
                    xtarget = adjust_address (xtarget, mode, 0);
6113
                    if (TREE_CODE (value) == CONSTRUCTOR)
6114
                      store_constructor (value, xtarget, cleared,
6115
                                         bitsize / BITS_PER_UNIT);
6116
                    else
6117
                      store_expr (value, xtarget, 0, false);
6118
 
6119
                    /* Generate a conditional jump to exit the loop.  */
6120
                    exit_cond = build2 (LT_EXPR, integer_type_node,
6121
                                        index, hi_index);
6122
                    jumpif (exit_cond, loop_end, -1);
6123
 
6124
                    /* Update the loop counter, and jump to the head of
6125
                       the loop.  */
6126
                    expand_assignment (index,
6127
                                       build2 (PLUS_EXPR, TREE_TYPE (index),
6128
                                               index, integer_one_node),
6129
                                       false);
6130
 
6131
                    emit_jump (loop_start);
6132
 
6133
                    /* Build the end of the loop.  */
6134
                    emit_label (loop_end);
6135
                  }
6136
              }
6137
            else if ((index != 0 && ! host_integerp (index, 0))
6138
                     || ! host_integerp (TYPE_SIZE (elttype), 1))
6139
              {
6140
                tree position;
6141
 
6142
                if (index == 0)
6143
                  index = ssize_int (1);
6144
 
6145
                if (minelt)
6146
                  index = fold_convert (ssizetype,
6147
                                        fold_build2 (MINUS_EXPR,
6148
                                                     TREE_TYPE (index),
6149
                                                     index,
6150
                                                     TYPE_MIN_VALUE (domain)));
6151
 
6152
                position =
6153
                  size_binop (MULT_EXPR, index,
6154
                              fold_convert (ssizetype,
6155
                                            TYPE_SIZE_UNIT (elttype)));
6156
                xtarget = offset_address (target,
6157
                                          expand_normal (position),
6158
                                          highest_pow2_factor (position));
6159
                xtarget = adjust_address (xtarget, mode, 0);
6160
                store_expr (value, xtarget, 0, false);
6161
              }
6162
            else
6163
              {
6164
                if (index != 0)
6165
                  bitpos = ((tree_low_cst (index, 0) - minelt)
6166
                            * tree_low_cst (TYPE_SIZE (elttype), 1));
6167
                else
6168
                  bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
6169
 
6170
                if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6171
                    && TREE_CODE (type) == ARRAY_TYPE
6172
                    && TYPE_NONALIASED_COMPONENT (type))
6173
                  {
6174
                    target = copy_rtx (target);
6175
                    MEM_KEEP_ALIAS_SET_P (target) = 1;
6176
                  }
6177
                store_constructor_field (target, bitsize, bitpos, mode, value,
6178
                                         type, cleared, get_alias_set (elttype));
6179
              }
6180
          }
6181
        break;
6182
      }
6183
 
6184
    case VECTOR_TYPE:
6185
      {
6186
        unsigned HOST_WIDE_INT idx;
6187
        constructor_elt *ce;
6188
        int i;
6189
        int need_to_clear;
6190
        int icode = 0;
6191
        tree elttype = TREE_TYPE (type);
6192
        int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
6193
        enum machine_mode eltmode = TYPE_MODE (elttype);
6194
        HOST_WIDE_INT bitsize;
6195
        HOST_WIDE_INT bitpos;
6196
        rtvec vector = NULL;
6197
        unsigned n_elts;
6198
        alias_set_type alias;
6199
 
6200
        gcc_assert (eltmode != BLKmode);
6201
 
6202
        n_elts = TYPE_VECTOR_SUBPARTS (type);
6203
        if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6204
          {
6205
            enum machine_mode mode = GET_MODE (target);
6206
 
6207
            icode = (int) optab_handler (vec_init_optab, mode);
6208
            if (icode != CODE_FOR_nothing)
6209
              {
6210
                unsigned int i;
6211
 
6212
                vector = rtvec_alloc (n_elts);
6213
                for (i = 0; i < n_elts; i++)
6214
                  RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6215
              }
6216
          }
6217
 
6218
        /* If the constructor has fewer elements than the vector,
6219
           clear the whole array first.  Similarly if this is static
6220
           constructor of a non-BLKmode object.  */
6221
        if (cleared)
6222
          need_to_clear = 0;
6223
        else if (REG_P (target) && TREE_STATIC (exp))
6224
          need_to_clear = 1;
6225
        else
6226
          {
6227
            unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6228
            tree value;
6229
 
6230
            FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6231
              {
6232
                int n_elts_here = tree_low_cst
6233
                  (int_const_binop (TRUNC_DIV_EXPR,
6234
                                    TYPE_SIZE (TREE_TYPE (value)),
6235
                                    TYPE_SIZE (elttype)), 1);
6236
 
6237
                count += n_elts_here;
6238
                if (mostly_zeros_p (value))
6239
                  zero_count += n_elts_here;
6240
              }
6241
 
6242
            /* Clear the entire vector first if there are any missing elements,
6243
               or if the incidence of zero elements is >= 75%.  */
6244
            need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6245
          }
6246
 
6247
        if (need_to_clear && size > 0 && !vector)
6248
          {
6249
            if (REG_P (target))
6250
              emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6251
            else
6252
              clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6253
            cleared = 1;
6254
          }
6255
 
6256
        /* Inform later passes that the old value is dead.  */
6257
        if (!cleared && !vector && REG_P (target))
6258
          emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6259
 
6260
        if (MEM_P (target))
6261
          alias = MEM_ALIAS_SET (target);
6262
        else
6263
          alias = get_alias_set (elttype);
6264
 
6265
        /* Store each element of the constructor into the corresponding
6266
           element of TARGET, determined by counting the elements.  */
6267
        for (idx = 0, i = 0;
6268
             VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6269
             idx++, i += bitsize / elt_size)
6270
          {
6271
            HOST_WIDE_INT eltpos;
6272
            tree value = ce->value;
6273
 
6274
            bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
6275
            if (cleared && initializer_zerop (value))
6276
              continue;
6277
 
6278
            if (ce->index)
6279
              eltpos = tree_low_cst (ce->index, 1);
6280
            else
6281
              eltpos = i;
6282
 
6283
            if (vector)
6284
              {
6285
                /* Vector CONSTRUCTORs should only be built from smaller
6286
                   vectors in the case of BLKmode vectors.  */
6287
                gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6288
                RTVEC_ELT (vector, eltpos)
6289
                  = expand_normal (value);
6290
              }
6291
            else
6292
              {
6293
                enum machine_mode value_mode =
6294
                  TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6295
                  ? TYPE_MODE (TREE_TYPE (value))
6296
                  : eltmode;
6297
                bitpos = eltpos * elt_size;
6298
                store_constructor_field (target, bitsize, bitpos,
6299
                                         value_mode, value, type,
6300
                                         cleared, alias);
6301
              }
6302
          }
6303
 
6304
        if (vector)
6305
          emit_insn (GEN_FCN (icode)
6306
                     (target,
6307
                      gen_rtx_PARALLEL (GET_MODE (target), vector)));
6308
        break;
6309
      }
6310
 
6311
    default:
6312
      gcc_unreachable ();
6313
    }
6314
}
6315
 
6316
/* Store the value of EXP (an expression tree)
6317
   into a subfield of TARGET which has mode MODE and occupies
6318
   BITSIZE bits, starting BITPOS bits from the start of TARGET.
6319
   If MODE is VOIDmode, it means that we are storing into a bit-field.
6320
 
6321
   BITREGION_START is bitpos of the first bitfield in this region.
6322
   BITREGION_END is the bitpos of the ending bitfield in this region.
6323
   These two fields are 0, if the C++ memory model does not apply,
6324
   or we are not interested in keeping track of bitfield regions.
6325
 
6326
   Always return const0_rtx unless we have something particular to
6327
   return.
6328
 
6329
   TYPE is the type of the underlying object,
6330
 
6331
   ALIAS_SET is the alias set for the destination.  This value will
6332
   (in general) be different from that for TARGET, since TARGET is a
6333
   reference to the containing structure.
6334
 
6335
   If NONTEMPORAL is true, try generating a nontemporal store.  */
6336
 
6337
static rtx
6338
store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6339
             unsigned HOST_WIDE_INT bitregion_start,
6340
             unsigned HOST_WIDE_INT bitregion_end,
6341
             enum machine_mode mode, tree exp, tree type,
6342
             alias_set_type alias_set, bool nontemporal)
6343
{
6344
  if (TREE_CODE (exp) == ERROR_MARK)
6345
    return const0_rtx;
6346
 
6347
  /* If we have nothing to store, do nothing unless the expression has
6348
     side-effects.  */
6349
  if (bitsize == 0)
6350
    return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6351
 
6352
  /* If we are storing into an unaligned field of an aligned union that is
6353
     in a register, we may have the mode of TARGET being an integer mode but
6354
     MODE == BLKmode.  In that case, get an aligned object whose size and
6355
     alignment are the same as TARGET and store TARGET into it (we can avoid
6356
     the store if the field being stored is the entire width of TARGET).  Then
6357
     call ourselves recursively to store the field into a BLKmode version of
6358
     that object.  Finally, load from the object into TARGET.  This is not
6359
     very efficient in general, but should only be slightly more expensive
6360
     than the otherwise-required unaligned accesses.  Perhaps this can be
6361
     cleaned up later.  It's tempting to make OBJECT readonly, but it's set
6362
     twice, once with emit_move_insn and once via store_field.  */
6363
 
6364
  if (mode == BLKmode
6365
      && (REG_P (target) || GET_CODE (target) == SUBREG))
6366
    {
6367
      rtx object = assign_temp (type, 0, 1, 1);
6368
      rtx blk_object = adjust_address (object, BLKmode, 0);
6369
 
6370
      if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
6371
        emit_move_insn (object, target);
6372
 
6373
      store_field (blk_object, bitsize, bitpos,
6374
                   bitregion_start, bitregion_end,
6375
                   mode, exp, type, MEM_ALIAS_SET (blk_object), nontemporal);
6376
 
6377
      emit_move_insn (target, object);
6378
 
6379
      /* We want to return the BLKmode version of the data.  */
6380
      return blk_object;
6381
    }
6382
 
6383
  if (GET_CODE (target) == CONCAT)
6384
    {
6385
      /* We're storing into a struct containing a single __complex.  */
6386
 
6387
      gcc_assert (!bitpos);
6388
      return store_expr (exp, target, 0, nontemporal);
6389
    }
6390
 
6391
  /* If the structure is in a register or if the component
6392
     is a bit field, we cannot use addressing to access it.
6393
     Use bit-field techniques or SUBREG to store in it.  */
6394
 
6395
  if (mode == VOIDmode
6396
      || (mode != BLKmode && ! direct_store[(int) mode]
6397
          && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6398
          && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6399
      || REG_P (target)
6400
      || GET_CODE (target) == SUBREG
6401
      /* If the field isn't aligned enough to store as an ordinary memref,
6402
         store it as a bit field.  */
6403
      || (mode != BLKmode
6404
          && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6405
                || bitpos % GET_MODE_ALIGNMENT (mode))
6406
               && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6407
              || (bitpos % BITS_PER_UNIT != 0)))
6408
      || (bitsize >= 0 && mode != BLKmode
6409
          && GET_MODE_BITSIZE (mode) > bitsize)
6410
      /* If the RHS and field are a constant size and the size of the
6411
         RHS isn't the same size as the bitfield, we must use bitfield
6412
         operations.  */
6413
      || (bitsize >= 0
6414
          && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6415
          && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6416
      /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6417
         decl we must use bitfield operations.  */
6418
      || (bitsize >= 0
6419
          && TREE_CODE (exp) == MEM_REF
6420
          && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6421
          && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6422
          && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6423
          && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6424
    {
6425
      rtx temp;
6426
      gimple nop_def;
6427
 
6428
      /* If EXP is a NOP_EXPR of precision less than its mode, then that
6429
         implies a mask operation.  If the precision is the same size as
6430
         the field we're storing into, that mask is redundant.  This is
6431
         particularly common with bit field assignments generated by the
6432
         C front end.  */
6433
      nop_def = get_def_for_expr (exp, NOP_EXPR);
6434
      if (nop_def)
6435
        {
6436
          tree type = TREE_TYPE (exp);
6437
          if (INTEGRAL_TYPE_P (type)
6438
              && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6439
              && bitsize == TYPE_PRECISION (type))
6440
            {
6441
              tree op = gimple_assign_rhs1 (nop_def);
6442
              type = TREE_TYPE (op);
6443
              if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6444
                exp = op;
6445
            }
6446
        }
6447
 
6448
      temp = expand_normal (exp);
6449
 
6450
      /* If BITSIZE is narrower than the size of the type of EXP
6451
         we will be narrowing TEMP.  Normally, what's wanted are the
6452
         low-order bits.  However, if EXP's type is a record and this is
6453
         big-endian machine, we want the upper BITSIZE bits.  */
6454
      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6455
          && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6456
          && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6457
        temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6458
                             GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6459
                             NULL_RTX, 1);
6460
 
6461
      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
6462
         MODE.  */
6463
      if (mode != VOIDmode && mode != BLKmode
6464
          && mode != TYPE_MODE (TREE_TYPE (exp)))
6465
        temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6466
 
6467
      /* If the modes of TEMP and TARGET are both BLKmode, both
6468
         must be in memory and BITPOS must be aligned on a byte
6469
         boundary.  If so, we simply do a block copy.  Likewise
6470
         for a BLKmode-like TARGET.  */
6471
      if (GET_MODE (temp) == BLKmode
6472
          && (GET_MODE (target) == BLKmode
6473
              || (MEM_P (target)
6474
                  && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6475
                  && (bitpos % BITS_PER_UNIT) == 0
6476
                  && (bitsize % BITS_PER_UNIT) == 0)))
6477
        {
6478
          gcc_assert (MEM_P (target) && MEM_P (temp)
6479
                      && (bitpos % BITS_PER_UNIT) == 0);
6480
 
6481
          target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6482
          emit_block_move (target, temp,
6483
                           GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6484
                                    / BITS_PER_UNIT),
6485
                           BLOCK_OP_NORMAL);
6486
 
6487
          return const0_rtx;
6488
        }
6489
 
6490
      /* Store the value in the bitfield.  */
6491
      store_bit_field (target, bitsize, bitpos,
6492
                       bitregion_start, bitregion_end,
6493
                       mode, temp);
6494
 
6495
      return const0_rtx;
6496
    }
6497
  else
6498
    {
6499
      /* Now build a reference to just the desired component.  */
6500
      rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6501
 
6502
      if (to_rtx == target)
6503
        to_rtx = copy_rtx (to_rtx);
6504
 
6505
      if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6506
        set_mem_alias_set (to_rtx, alias_set);
6507
 
6508
      return store_expr (exp, to_rtx, 0, nontemporal);
6509
    }
6510
}
6511
 
6512
/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6513
   an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6514
   codes and find the ultimate containing object, which we return.
6515
 
6516
   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6517
   bit position, and *PUNSIGNEDP to the signedness of the field.
6518
   If the position of the field is variable, we store a tree
6519
   giving the variable offset (in units) in *POFFSET.
6520
   This offset is in addition to the bit position.
6521
   If the position is not variable, we store 0 in *POFFSET.
6522
 
6523
   If any of the extraction expressions is volatile,
6524
   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
6525
 
6526
   If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6527
   Otherwise, it is a mode that can be used to access the field.
6528
 
6529
   If the field describes a variable-sized object, *PMODE is set to
6530
   BLKmode and *PBITSIZE is set to -1.  An access cannot be made in
6531
   this case, but the address of the object can be found.
6532
 
6533
   If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6534
   look through nodes that serve as markers of a greater alignment than
6535
   the one that can be deduced from the expression.  These nodes make it
6536
   possible for front-ends to prevent temporaries from being created by
6537
   the middle-end on alignment considerations.  For that purpose, the
6538
   normal operating mode at high-level is to always pass FALSE so that
6539
   the ultimate containing object is really returned; moreover, the
6540
   associated predicate handled_component_p will always return TRUE
6541
   on these nodes, thus indicating that they are essentially handled
6542
   by get_inner_reference.  TRUE should only be passed when the caller
6543
   is scanning the expression in order to build another representation
6544
   and specifically knows how to handle these nodes; as such, this is
6545
   the normal operating mode in the RTL expanders.  */
6546
 
6547
tree
6548
get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6549
                     HOST_WIDE_INT *pbitpos, tree *poffset,
6550
                     enum machine_mode *pmode, int *punsignedp,
6551
                     int *pvolatilep, bool keep_aligning)
6552
{
6553
  tree size_tree = 0;
6554
  enum machine_mode mode = VOIDmode;
6555
  bool blkmode_bitfield = false;
6556
  tree offset = size_zero_node;
6557
  double_int bit_offset = double_int_zero;
6558
 
6559
  /* First get the mode, signedness, and size.  We do this from just the
6560
     outermost expression.  */
6561
  *pbitsize = -1;
6562
  if (TREE_CODE (exp) == COMPONENT_REF)
6563
    {
6564
      tree field = TREE_OPERAND (exp, 1);
6565
      size_tree = DECL_SIZE (field);
6566
      if (!DECL_BIT_FIELD (field))
6567
        mode = DECL_MODE (field);
6568
      else if (DECL_MODE (field) == BLKmode)
6569
        blkmode_bitfield = true;
6570
      else if (TREE_THIS_VOLATILE (exp)
6571
               && flag_strict_volatile_bitfields > 0)
6572
        /* Volatile bitfields should be accessed in the mode of the
6573
             field's type, not the mode computed based on the bit
6574
             size.  */
6575
        mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6576
 
6577
      *punsignedp = DECL_UNSIGNED (field);
6578
    }
6579
  else if (TREE_CODE (exp) == BIT_FIELD_REF)
6580
    {
6581
      size_tree = TREE_OPERAND (exp, 1);
6582
      *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6583
                     || TYPE_UNSIGNED (TREE_TYPE (exp)));
6584
 
6585
      /* For vector types, with the correct size of access, use the mode of
6586
         inner type.  */
6587
      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6588
          && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6589
          && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6590
        mode = TYPE_MODE (TREE_TYPE (exp));
6591
    }
6592
  else
6593
    {
6594
      mode = TYPE_MODE (TREE_TYPE (exp));
6595
      *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6596
 
6597
      if (mode == BLKmode)
6598
        size_tree = TYPE_SIZE (TREE_TYPE (exp));
6599
      else
6600
        *pbitsize = GET_MODE_BITSIZE (mode);
6601
    }
6602
 
6603
  if (size_tree != 0)
6604
    {
6605
      if (! host_integerp (size_tree, 1))
6606
        mode = BLKmode, *pbitsize = -1;
6607
      else
6608
        *pbitsize = tree_low_cst (size_tree, 1);
6609
    }
6610
 
6611
  /* Compute cumulative bit-offset for nested component-refs and array-refs,
6612
     and find the ultimate containing object.  */
6613
  while (1)
6614
    {
6615
      switch (TREE_CODE (exp))
6616
        {
6617
        case BIT_FIELD_REF:
6618
          bit_offset
6619
            = double_int_add (bit_offset,
6620
                              tree_to_double_int (TREE_OPERAND (exp, 2)));
6621
          break;
6622
 
6623
        case COMPONENT_REF:
6624
          {
6625
            tree field = TREE_OPERAND (exp, 1);
6626
            tree this_offset = component_ref_field_offset (exp);
6627
 
6628
            /* If this field hasn't been filled in yet, don't go past it.
6629
               This should only happen when folding expressions made during
6630
               type construction.  */
6631
            if (this_offset == 0)
6632
              break;
6633
 
6634
            offset = size_binop (PLUS_EXPR, offset, this_offset);
6635
            bit_offset = double_int_add (bit_offset,
6636
                                         tree_to_double_int
6637
                                           (DECL_FIELD_BIT_OFFSET (field)));
6638
 
6639
            /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
6640
          }
6641
          break;
6642
 
6643
        case ARRAY_REF:
6644
        case ARRAY_RANGE_REF:
6645
          {
6646
            tree index = TREE_OPERAND (exp, 1);
6647
            tree low_bound = array_ref_low_bound (exp);
6648
            tree unit_size = array_ref_element_size (exp);
6649
 
6650
            /* We assume all arrays have sizes that are a multiple of a byte.
6651
               First subtract the lower bound, if any, in the type of the
6652
               index, then convert to sizetype and multiply by the size of
6653
               the array element.  */
6654
            if (! integer_zerop (low_bound))
6655
              index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6656
                                   index, low_bound);
6657
 
6658
            offset = size_binop (PLUS_EXPR, offset,
6659
                                 size_binop (MULT_EXPR,
6660
                                             fold_convert (sizetype, index),
6661
                                             unit_size));
6662
          }
6663
          break;
6664
 
6665
        case REALPART_EXPR:
6666
          break;
6667
 
6668
        case IMAGPART_EXPR:
6669
          bit_offset = double_int_add (bit_offset,
6670
                                       uhwi_to_double_int (*pbitsize));
6671
          break;
6672
 
6673
        case VIEW_CONVERT_EXPR:
6674
          if (keep_aligning && STRICT_ALIGNMENT
6675
              && (TYPE_ALIGN (TREE_TYPE (exp))
6676
               > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6677
              && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6678
                  < BIGGEST_ALIGNMENT)
6679
              && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6680
                  || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6681
            goto done;
6682
          break;
6683
 
6684
        case MEM_REF:
6685
          /* Hand back the decl for MEM[&decl, off].  */
6686
          if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6687
            {
6688
              tree off = TREE_OPERAND (exp, 1);
6689
              if (!integer_zerop (off))
6690
                {
6691
                  double_int boff, coff = mem_ref_offset (exp);
6692
                  boff = double_int_lshift (coff,
6693
                                            BITS_PER_UNIT == 8
6694
                                            ? 3 : exact_log2 (BITS_PER_UNIT),
6695
                                            HOST_BITS_PER_DOUBLE_INT, true);
6696
                  bit_offset = double_int_add (bit_offset, boff);
6697
                }
6698
              exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6699
            }
6700
          goto done;
6701
 
6702
        default:
6703
          goto done;
6704
        }
6705
 
6706
      /* If any reference in the chain is volatile, the effect is volatile.  */
6707
      if (TREE_THIS_VOLATILE (exp))
6708
        *pvolatilep = 1;
6709
 
6710
      exp = TREE_OPERAND (exp, 0);
6711
    }
6712
 done:
6713
 
6714
  /* If OFFSET is constant, see if we can return the whole thing as a
6715
     constant bit position.  Make sure to handle overflow during
6716
     this conversion.  */
6717
  if (TREE_CODE (offset) == INTEGER_CST)
6718
    {
6719
      double_int tem = tree_to_double_int (offset);
6720
      tem = double_int_sext (tem, TYPE_PRECISION (sizetype));
6721
      tem = double_int_lshift (tem,
6722
                               BITS_PER_UNIT == 8
6723
                               ? 3 : exact_log2 (BITS_PER_UNIT),
6724
                               HOST_BITS_PER_DOUBLE_INT, true);
6725
      tem = double_int_add (tem, bit_offset);
6726
      if (double_int_fits_in_shwi_p (tem))
6727
        {
6728
          *pbitpos = double_int_to_shwi (tem);
6729
          *poffset = offset = NULL_TREE;
6730
        }
6731
    }
6732
 
6733
  /* Otherwise, split it up.  */
6734
  if (offset)
6735
    {
6736
      /* Avoid returning a negative bitpos as this may wreak havoc later.  */
6737
      if (double_int_negative_p (bit_offset))
6738
        {
6739
          double_int mask
6740
            = double_int_mask (BITS_PER_UNIT == 8
6741
                               ? 3 : exact_log2 (BITS_PER_UNIT));
6742
          double_int tem = double_int_and_not (bit_offset, mask);
6743
          /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6744
             Subtract it to BIT_OFFSET and add it (scaled) to OFFSET.  */
6745
          bit_offset = double_int_sub (bit_offset, tem);
6746
          tem = double_int_rshift (tem,
6747
                                   BITS_PER_UNIT == 8
6748
                                   ? 3 : exact_log2 (BITS_PER_UNIT),
6749
                                   HOST_BITS_PER_DOUBLE_INT, true);
6750
          offset = size_binop (PLUS_EXPR, offset,
6751
                               double_int_to_tree (sizetype, tem));
6752
        }
6753
 
6754
      *pbitpos = double_int_to_shwi (bit_offset);
6755
      *poffset = offset;
6756
    }
6757
 
6758
  /* We can use BLKmode for a byte-aligned BLKmode bitfield.  */
6759
  if (mode == VOIDmode
6760
      && blkmode_bitfield
6761
      && (*pbitpos % BITS_PER_UNIT) == 0
6762
      && (*pbitsize % BITS_PER_UNIT) == 0)
6763
    *pmode = BLKmode;
6764
  else
6765
    *pmode = mode;
6766
 
6767
  return exp;
6768
}
6769
 
6770
/* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6771
   ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6772
   EXP is marked as PACKED.  */
6773
 
6774
bool
6775
contains_packed_reference (const_tree exp)
6776
{
6777
  bool packed_p = false;
6778
 
6779
  while (1)
6780
    {
6781
      switch (TREE_CODE (exp))
6782
        {
6783
        case COMPONENT_REF:
6784
          {
6785
            tree field = TREE_OPERAND (exp, 1);
6786
            packed_p = DECL_PACKED (field)
6787
                       || TYPE_PACKED (TREE_TYPE (field))
6788
                       || TYPE_PACKED (TREE_TYPE (exp));
6789
            if (packed_p)
6790
              goto done;
6791
          }
6792
          break;
6793
 
6794
        case BIT_FIELD_REF:
6795
        case ARRAY_REF:
6796
        case ARRAY_RANGE_REF:
6797
        case REALPART_EXPR:
6798
        case IMAGPART_EXPR:
6799
        case VIEW_CONVERT_EXPR:
6800
          break;
6801
 
6802
        default:
6803
          goto done;
6804
        }
6805
      exp = TREE_OPERAND (exp, 0);
6806
    }
6807
 done:
6808
  return packed_p;
6809
}
6810
 
6811
/* Return a tree of sizetype representing the size, in bytes, of the element
6812
   of EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6813
 
6814
tree
6815
array_ref_element_size (tree exp)
6816
{
6817
  tree aligned_size = TREE_OPERAND (exp, 3);
6818
  tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6819
  location_t loc = EXPR_LOCATION (exp);
6820
 
6821
  /* If a size was specified in the ARRAY_REF, it's the size measured
6822
     in alignment units of the element type.  So multiply by that value.  */
6823
  if (aligned_size)
6824
    {
6825
      /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6826
         sizetype from another type of the same width and signedness.  */
6827
      if (TREE_TYPE (aligned_size) != sizetype)
6828
        aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6829
      return size_binop_loc (loc, MULT_EXPR, aligned_size,
6830
                             size_int (TYPE_ALIGN_UNIT (elmt_type)));
6831
    }
6832
 
6833
  /* Otherwise, take the size from that of the element type.  Substitute
6834
     any PLACEHOLDER_EXPR that we have.  */
6835
  else
6836
    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6837
}
6838
 
6839
/* Return a tree representing the lower bound of the array mentioned in
6840
   EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6841
 
6842
tree
6843
array_ref_low_bound (tree exp)
6844
{
6845
  tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6846
 
6847
  /* If a lower bound is specified in EXP, use it.  */
6848
  if (TREE_OPERAND (exp, 2))
6849
    return TREE_OPERAND (exp, 2);
6850
 
6851
  /* Otherwise, if there is a domain type and it has a lower bound, use it,
6852
     substituting for a PLACEHOLDER_EXPR as needed.  */
6853
  if (domain_type && TYPE_MIN_VALUE (domain_type))
6854
    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6855
 
6856
  /* Otherwise, return a zero of the appropriate type.  */
6857
  return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6858
}
6859
 
6860
/* Return a tree representing the upper bound of the array mentioned in
6861
   EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6862
 
6863
tree
6864
array_ref_up_bound (tree exp)
6865
{
6866
  tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6867
 
6868
  /* If there is a domain type and it has an upper bound, use it, substituting
6869
     for a PLACEHOLDER_EXPR as needed.  */
6870
  if (domain_type && TYPE_MAX_VALUE (domain_type))
6871
    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6872
 
6873
  /* Otherwise fail.  */
6874
  return NULL_TREE;
6875
}
6876
 
6877
/* Return a tree representing the offset, in bytes, of the field referenced
6878
   by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
6879
 
6880
tree
6881
component_ref_field_offset (tree exp)
6882
{
6883
  tree aligned_offset = TREE_OPERAND (exp, 2);
6884
  tree field = TREE_OPERAND (exp, 1);
6885
  location_t loc = EXPR_LOCATION (exp);
6886
 
6887
  /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6888
     in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
6889
     value.  */
6890
  if (aligned_offset)
6891
    {
6892
      /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6893
         sizetype from another type of the same width and signedness.  */
6894
      if (TREE_TYPE (aligned_offset) != sizetype)
6895
        aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6896
      return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6897
                             size_int (DECL_OFFSET_ALIGN (field)
6898
                                       / BITS_PER_UNIT));
6899
    }
6900
 
6901
  /* Otherwise, take the offset from that of the field.  Substitute
6902
     any PLACEHOLDER_EXPR that we have.  */
6903
  else
6904
    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6905
}
6906
 
6907
/* Alignment in bits the TARGET of an assignment may be assumed to have.  */
6908
 
6909
static unsigned HOST_WIDE_INT
6910
target_align (const_tree target)
6911
{
6912
  /* We might have a chain of nested references with intermediate misaligning
6913
     bitfields components, so need to recurse to find out.  */
6914
 
6915
  unsigned HOST_WIDE_INT this_align, outer_align;
6916
 
6917
  switch (TREE_CODE (target))
6918
    {
6919
    case BIT_FIELD_REF:
6920
      return 1;
6921
 
6922
    case COMPONENT_REF:
6923
      this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6924
      outer_align = target_align (TREE_OPERAND (target, 0));
6925
      return MIN (this_align, outer_align);
6926
 
6927
    case ARRAY_REF:
6928
    case ARRAY_RANGE_REF:
6929
      this_align = TYPE_ALIGN (TREE_TYPE (target));
6930
      outer_align = target_align (TREE_OPERAND (target, 0));
6931
      return MIN (this_align, outer_align);
6932
 
6933
    CASE_CONVERT:
6934
    case NON_LVALUE_EXPR:
6935
    case VIEW_CONVERT_EXPR:
6936
      this_align = TYPE_ALIGN (TREE_TYPE (target));
6937
      outer_align = target_align (TREE_OPERAND (target, 0));
6938
      return MAX (this_align, outer_align);
6939
 
6940
    default:
6941
      return TYPE_ALIGN (TREE_TYPE (target));
6942
    }
6943
}
6944
 
6945
 
6946
/* Given an rtx VALUE that may contain additions and multiplications, return
6947
   an equivalent value that just refers to a register, memory, or constant.
6948
   This is done by generating instructions to perform the arithmetic and
6949
   returning a pseudo-register containing the value.
6950
 
6951
   The returned value may be a REG, SUBREG, MEM or constant.  */
6952
 
6953
rtx
6954
force_operand (rtx value, rtx target)
6955
{
6956
  rtx op1, op2;
6957
  /* Use subtarget as the target for operand 0 of a binary operation.  */
6958
  rtx subtarget = get_subtarget (target);
6959
  enum rtx_code code = GET_CODE (value);
6960
 
6961
  /* Check for subreg applied to an expression produced by loop optimizer.  */
6962
  if (code == SUBREG
6963
      && !REG_P (SUBREG_REG (value))
6964
      && !MEM_P (SUBREG_REG (value)))
6965
    {
6966
      value
6967
        = simplify_gen_subreg (GET_MODE (value),
6968
                               force_reg (GET_MODE (SUBREG_REG (value)),
6969
                                          force_operand (SUBREG_REG (value),
6970
                                                         NULL_RTX)),
6971
                               GET_MODE (SUBREG_REG (value)),
6972
                               SUBREG_BYTE (value));
6973
      code = GET_CODE (value);
6974
    }
6975
 
6976
  /* Check for a PIC address load.  */
6977
  if ((code == PLUS || code == MINUS)
6978
      && XEXP (value, 0) == pic_offset_table_rtx
6979
      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6980
          || GET_CODE (XEXP (value, 1)) == LABEL_REF
6981
          || GET_CODE (XEXP (value, 1)) == CONST))
6982
    {
6983
      if (!subtarget)
6984
        subtarget = gen_reg_rtx (GET_MODE (value));
6985
      emit_move_insn (subtarget, value);
6986
      return subtarget;
6987
    }
6988
 
6989
  if (ARITHMETIC_P (value))
6990
    {
6991
      op2 = XEXP (value, 1);
6992
      if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6993
        subtarget = 0;
6994
      if (code == MINUS && CONST_INT_P (op2))
6995
        {
6996
          code = PLUS;
6997
          op2 = negate_rtx (GET_MODE (value), op2);
6998
        }
6999
 
7000
      /* Check for an addition with OP2 a constant integer and our first
7001
         operand a PLUS of a virtual register and something else.  In that
7002
         case, we want to emit the sum of the virtual register and the
7003
         constant first and then add the other value.  This allows virtual
7004
         register instantiation to simply modify the constant rather than
7005
         creating another one around this addition.  */
7006
      if (code == PLUS && CONST_INT_P (op2)
7007
          && GET_CODE (XEXP (value, 0)) == PLUS
7008
          && REG_P (XEXP (XEXP (value, 0), 0))
7009
          && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7010
          && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7011
        {
7012
          rtx temp = expand_simple_binop (GET_MODE (value), code,
7013
                                          XEXP (XEXP (value, 0), 0), op2,
7014
                                          subtarget, 0, OPTAB_LIB_WIDEN);
7015
          return expand_simple_binop (GET_MODE (value), code, temp,
7016
                                      force_operand (XEXP (XEXP (value,
7017
                                                                 0), 1), 0),
7018
                                      target, 0, OPTAB_LIB_WIDEN);
7019
        }
7020
 
7021
      op1 = force_operand (XEXP (value, 0), subtarget);
7022
      op2 = force_operand (op2, NULL_RTX);
7023
      switch (code)
7024
        {
7025
        case MULT:
7026
          return expand_mult (GET_MODE (value), op1, op2, target, 1);
7027
        case DIV:
7028
          if (!INTEGRAL_MODE_P (GET_MODE (value)))
7029
            return expand_simple_binop (GET_MODE (value), code, op1, op2,
7030
                                        target, 1, OPTAB_LIB_WIDEN);
7031
          else
7032
            return expand_divmod (0,
7033
                                  FLOAT_MODE_P (GET_MODE (value))
7034
                                  ? RDIV_EXPR : TRUNC_DIV_EXPR,
7035
                                  GET_MODE (value), op1, op2, target, 0);
7036
        case MOD:
7037
          return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7038
                                target, 0);
7039
        case UDIV:
7040
          return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7041
                                target, 1);
7042
        case UMOD:
7043
          return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7044
                                target, 1);
7045
        case ASHIFTRT:
7046
          return expand_simple_binop (GET_MODE (value), code, op1, op2,
7047
                                      target, 0, OPTAB_LIB_WIDEN);
7048
        default:
7049
          return expand_simple_binop (GET_MODE (value), code, op1, op2,
7050
                                      target, 1, OPTAB_LIB_WIDEN);
7051
        }
7052
    }
7053
  if (UNARY_P (value))
7054
    {
7055
      if (!target)
7056
        target = gen_reg_rtx (GET_MODE (value));
7057
      op1 = force_operand (XEXP (value, 0), NULL_RTX);
7058
      switch (code)
7059
        {
7060
        case ZERO_EXTEND:
7061
        case SIGN_EXTEND:
7062
        case TRUNCATE:
7063
        case FLOAT_EXTEND:
7064
        case FLOAT_TRUNCATE:
7065
          convert_move (target, op1, code == ZERO_EXTEND);
7066
          return target;
7067
 
7068
        case FIX:
7069
        case UNSIGNED_FIX:
7070
          expand_fix (target, op1, code == UNSIGNED_FIX);
7071
          return target;
7072
 
7073
        case FLOAT:
7074
        case UNSIGNED_FLOAT:
7075
          expand_float (target, op1, code == UNSIGNED_FLOAT);
7076
          return target;
7077
 
7078
        default:
7079
          return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7080
        }
7081
    }
7082
 
7083
#ifdef INSN_SCHEDULING
7084
  /* On machines that have insn scheduling, we want all memory reference to be
7085
     explicit, so we need to deal with such paradoxical SUBREGs.  */
7086
  if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7087
    value
7088
      = simplify_gen_subreg (GET_MODE (value),
7089
                             force_reg (GET_MODE (SUBREG_REG (value)),
7090
                                        force_operand (SUBREG_REG (value),
7091
                                                       NULL_RTX)),
7092
                             GET_MODE (SUBREG_REG (value)),
7093
                             SUBREG_BYTE (value));
7094
#endif
7095
 
7096
  return value;
7097
}
7098
 
7099
/* Subroutine of expand_expr: return nonzero iff there is no way that
7100
   EXP can reference X, which is being modified.  TOP_P is nonzero if this
7101
   call is going to be used to determine whether we need a temporary
7102
   for EXP, as opposed to a recursive call to this function.
7103
 
7104
   It is always safe for this routine to return zero since it merely
7105
   searches for optimization opportunities.  */
7106
 
7107
int
7108
safe_from_p (const_rtx x, tree exp, int top_p)
7109
{
7110
  rtx exp_rtl = 0;
7111
  int i, nops;
7112
 
7113
  if (x == 0
7114
      /* If EXP has varying size, we MUST use a target since we currently
7115
         have no way of allocating temporaries of variable size
7116
         (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7117
         So we assume here that something at a higher level has prevented a
7118
         clash.  This is somewhat bogus, but the best we can do.  Only
7119
         do this when X is BLKmode and when we are at the top level.  */
7120
      || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7121
          && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7122
          && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7123
              || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7124
              || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7125
              != INTEGER_CST)
7126
          && GET_MODE (x) == BLKmode)
7127
      /* If X is in the outgoing argument area, it is always safe.  */
7128
      || (MEM_P (x)
7129
          && (XEXP (x, 0) == virtual_outgoing_args_rtx
7130
              || (GET_CODE (XEXP (x, 0)) == PLUS
7131
                  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7132
    return 1;
7133
 
7134
  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7135
     find the underlying pseudo.  */
7136
  if (GET_CODE (x) == SUBREG)
7137
    {
7138
      x = SUBREG_REG (x);
7139
      if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7140
        return 0;
7141
    }
7142
 
7143
  /* Now look at our tree code and possibly recurse.  */
7144
  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7145
    {
7146
    case tcc_declaration:
7147
      exp_rtl = DECL_RTL_IF_SET (exp);
7148
      break;
7149
 
7150
    case tcc_constant:
7151
      return 1;
7152
 
7153
    case tcc_exceptional:
7154
      if (TREE_CODE (exp) == TREE_LIST)
7155
        {
7156
          while (1)
7157
            {
7158
              if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7159
                return 0;
7160
              exp = TREE_CHAIN (exp);
7161
              if (!exp)
7162
                return 1;
7163
              if (TREE_CODE (exp) != TREE_LIST)
7164
                return safe_from_p (x, exp, 0);
7165
            }
7166
        }
7167
      else if (TREE_CODE (exp) == CONSTRUCTOR)
7168
        {
7169
          constructor_elt *ce;
7170
          unsigned HOST_WIDE_INT idx;
7171
 
7172
          FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce)
7173
            if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7174
                || !safe_from_p (x, ce->value, 0))
7175
              return 0;
7176
          return 1;
7177
        }
7178
      else if (TREE_CODE (exp) == ERROR_MARK)
7179
        return 1;       /* An already-visited SAVE_EXPR? */
7180
      else
7181
        return 0;
7182
 
7183
    case tcc_statement:
7184
      /* The only case we look at here is the DECL_INITIAL inside a
7185
         DECL_EXPR.  */
7186
      return (TREE_CODE (exp) != DECL_EXPR
7187
              || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7188
              || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7189
              || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7190
 
7191
    case tcc_binary:
7192
    case tcc_comparison:
7193
      if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7194
        return 0;
7195
      /* Fall through.  */
7196
 
7197
    case tcc_unary:
7198
      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7199
 
7200
    case tcc_expression:
7201
    case tcc_reference:
7202
    case tcc_vl_exp:
7203
      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
7204
         the expression.  If it is set, we conflict iff we are that rtx or
7205
         both are in memory.  Otherwise, we check all operands of the
7206
         expression recursively.  */
7207
 
7208
      switch (TREE_CODE (exp))
7209
        {
7210
        case ADDR_EXPR:
7211
          /* If the operand is static or we are static, we can't conflict.
7212
             Likewise if we don't conflict with the operand at all.  */
7213
          if (staticp (TREE_OPERAND (exp, 0))
7214
              || TREE_STATIC (exp)
7215
              || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7216
            return 1;
7217
 
7218
          /* Otherwise, the only way this can conflict is if we are taking
7219
             the address of a DECL a that address if part of X, which is
7220
             very rare.  */
7221
          exp = TREE_OPERAND (exp, 0);
7222
          if (DECL_P (exp))
7223
            {
7224
              if (!DECL_RTL_SET_P (exp)
7225
                  || !MEM_P (DECL_RTL (exp)))
7226
                return 0;
7227
              else
7228
                exp_rtl = XEXP (DECL_RTL (exp), 0);
7229
            }
7230
          break;
7231
 
7232
        case MEM_REF:
7233
          if (MEM_P (x)
7234
              && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7235
                                        get_alias_set (exp)))
7236
            return 0;
7237
          break;
7238
 
7239
        case CALL_EXPR:
7240
          /* Assume that the call will clobber all hard registers and
7241
             all of memory.  */
7242
          if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7243
              || MEM_P (x))
7244
            return 0;
7245
          break;
7246
 
7247
        case WITH_CLEANUP_EXPR:
7248
        case CLEANUP_POINT_EXPR:
7249
          /* Lowered by gimplify.c.  */
7250
          gcc_unreachable ();
7251
 
7252
        case SAVE_EXPR:
7253
          return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7254
 
7255
        default:
7256
          break;
7257
        }
7258
 
7259
      /* If we have an rtx, we do not need to scan our operands.  */
7260
      if (exp_rtl)
7261
        break;
7262
 
7263
      nops = TREE_OPERAND_LENGTH (exp);
7264
      for (i = 0; i < nops; i++)
7265
        if (TREE_OPERAND (exp, i) != 0
7266
            && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7267
          return 0;
7268
 
7269
      break;
7270
 
7271
    case tcc_type:
7272
      /* Should never get a type here.  */
7273
      gcc_unreachable ();
7274
    }
7275
 
7276
  /* If we have an rtl, find any enclosed object.  Then see if we conflict
7277
     with it.  */
7278
  if (exp_rtl)
7279
    {
7280
      if (GET_CODE (exp_rtl) == SUBREG)
7281
        {
7282
          exp_rtl = SUBREG_REG (exp_rtl);
7283
          if (REG_P (exp_rtl)
7284
              && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7285
            return 0;
7286
        }
7287
 
7288
      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
7289
         are memory and they conflict.  */
7290
      return ! (rtx_equal_p (x, exp_rtl)
7291
                || (MEM_P (x) && MEM_P (exp_rtl)
7292
                    && true_dependence (exp_rtl, VOIDmode, x)));
7293
    }
7294
 
7295
  /* If we reach here, it is safe.  */
7296
  return 1;
7297
}
7298
 
7299
 
7300
/* Return the highest power of two that EXP is known to be a multiple of.
7301
   This is used in updating alignment of MEMs in array references.  */
7302
 
7303
unsigned HOST_WIDE_INT
7304
highest_pow2_factor (const_tree exp)
7305
{
7306
  unsigned HOST_WIDE_INT c0, c1;
7307
 
7308
  switch (TREE_CODE (exp))
7309
    {
7310
    case INTEGER_CST:
7311
      /* We can find the lowest bit that's a one.  If the low
7312
         HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
7313
         We need to handle this case since we can find it in a COND_EXPR,
7314
         a MIN_EXPR, or a MAX_EXPR.  If the constant overflows, we have an
7315
         erroneous program, so return BIGGEST_ALIGNMENT to avoid any
7316
         later ICE.  */
7317
      if (TREE_OVERFLOW (exp))
7318
        return BIGGEST_ALIGNMENT;
7319
      else
7320
        {
7321
          /* Note: tree_low_cst is intentionally not used here,
7322
             we don't care about the upper bits.  */
7323
          c0 = TREE_INT_CST_LOW (exp);
7324
          c0 &= -c0;
7325
          return c0 ? c0 : BIGGEST_ALIGNMENT;
7326
        }
7327
      break;
7328
 
7329
    case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
7330
      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7331
      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7332
      return MIN (c0, c1);
7333
 
7334
    case MULT_EXPR:
7335
      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7336
      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7337
      return c0 * c1;
7338
 
7339
    case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
7340
    case CEIL_DIV_EXPR:
7341
      if (integer_pow2p (TREE_OPERAND (exp, 1))
7342
          && host_integerp (TREE_OPERAND (exp, 1), 1))
7343
        {
7344
          c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7345
          c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
7346
          return MAX (1, c0 / c1);
7347
        }
7348
      break;
7349
 
7350
    case BIT_AND_EXPR:
7351
      /* The highest power of two of a bit-and expression is the maximum of
7352
         that of its operands.  We typically get here for a complex LHS and
7353
         a constant negative power of two on the RHS to force an explicit
7354
         alignment, so don't bother looking at the LHS.  */
7355
      return highest_pow2_factor (TREE_OPERAND (exp, 1));
7356
 
7357
    CASE_CONVERT:
7358
    case SAVE_EXPR:
7359
      return highest_pow2_factor (TREE_OPERAND (exp, 0));
7360
 
7361
    case COMPOUND_EXPR:
7362
      return highest_pow2_factor (TREE_OPERAND (exp, 1));
7363
 
7364
    case COND_EXPR:
7365
      c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7366
      c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
7367
      return MIN (c0, c1);
7368
 
7369
    default:
7370
      break;
7371
    }
7372
 
7373
  return 1;
7374
}
7375
 
7376
/* Similar, except that the alignment requirements of TARGET are
7377
   taken into account.  Assume it is at least as aligned as its
7378
   type, unless it is a COMPONENT_REF in which case the layout of
7379
   the structure gives the alignment.  */
7380
 
7381
static unsigned HOST_WIDE_INT
7382
highest_pow2_factor_for_target (const_tree target, const_tree exp)
7383
{
7384
  unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7385
  unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7386
 
7387
  return MAX (factor, talign);
7388
}
7389
 
7390
/* Subroutine of expand_expr.  Expand the two operands of a binary
7391
   expression EXP0 and EXP1 placing the results in OP0 and OP1.
7392
   The value may be stored in TARGET if TARGET is nonzero.  The
7393
   MODIFIER argument is as documented by expand_expr.  */
7394
 
7395
static void
7396
expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7397
                 enum expand_modifier modifier)
7398
{
7399
  if (! safe_from_p (target, exp1, 1))
7400
    target = 0;
7401
  if (operand_equal_p (exp0, exp1, 0))
7402
    {
7403
      *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7404
      *op1 = copy_rtx (*op0);
7405
    }
7406
  else
7407
    {
7408
      /* If we need to preserve evaluation order, copy exp0 into its own
7409
         temporary variable so that it can't be clobbered by exp1.  */
7410
      if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7411
        exp0 = save_expr (exp0);
7412
      *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7413
      *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7414
    }
7415
}
7416
 
7417
 
7418
/* Return a MEM that contains constant EXP.  DEFER is as for
7419
   output_constant_def and MODIFIER is as for expand_expr.  */
7420
 
7421
static rtx
7422
expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7423
{
7424
  rtx mem;
7425
 
7426
  mem = output_constant_def (exp, defer);
7427
  if (modifier != EXPAND_INITIALIZER)
7428
    mem = use_anchored_address (mem);
7429
  return mem;
7430
}
7431
 
7432
/* A subroutine of expand_expr_addr_expr.  Evaluate the address of EXP.
7433
   The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7434
 
7435
static rtx
7436
expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7437
                         enum expand_modifier modifier, addr_space_t as)
7438
{
7439
  rtx result, subtarget;
7440
  tree inner, offset;
7441
  HOST_WIDE_INT bitsize, bitpos;
7442
  int volatilep, unsignedp;
7443
  enum machine_mode mode1;
7444
 
7445
  /* If we are taking the address of a constant and are at the top level,
7446
     we have to use output_constant_def since we can't call force_const_mem
7447
     at top level.  */
7448
  /* ??? This should be considered a front-end bug.  We should not be
7449
     generating ADDR_EXPR of something that isn't an LVALUE.  The only
7450
     exception here is STRING_CST.  */
7451
  if (CONSTANT_CLASS_P (exp))
7452
    {
7453
      result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7454
      if (modifier < EXPAND_SUM)
7455
        result = force_operand (result, target);
7456
      return result;
7457
    }
7458
 
7459
  /* Everything must be something allowed by is_gimple_addressable.  */
7460
  switch (TREE_CODE (exp))
7461
    {
7462
    case INDIRECT_REF:
7463
      /* This case will happen via recursion for &a->b.  */
7464
      return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7465
 
7466
    case MEM_REF:
7467
      {
7468
        tree tem = TREE_OPERAND (exp, 0);
7469
        if (!integer_zerop (TREE_OPERAND (exp, 1)))
7470
          tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7471
        return expand_expr (tem, target, tmode, modifier);
7472
      }
7473
 
7474
    case CONST_DECL:
7475
      /* Expand the initializer like constants above.  */
7476
      result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7477
                                           0, modifier), 0);
7478
      if (modifier < EXPAND_SUM)
7479
        result = force_operand (result, target);
7480
      return result;
7481
 
7482
    case REALPART_EXPR:
7483
      /* The real part of the complex number is always first, therefore
7484
         the address is the same as the address of the parent object.  */
7485
      offset = 0;
7486
      bitpos = 0;
7487
      inner = TREE_OPERAND (exp, 0);
7488
      break;
7489
 
7490
    case IMAGPART_EXPR:
7491
      /* The imaginary part of the complex number is always second.
7492
         The expression is therefore always offset by the size of the
7493
         scalar type.  */
7494
      offset = 0;
7495
      bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7496
      inner = TREE_OPERAND (exp, 0);
7497
      break;
7498
 
7499
    default:
7500
      /* If the object is a DECL, then expand it for its rtl.  Don't bypass
7501
         expand_expr, as that can have various side effects; LABEL_DECLs for
7502
         example, may not have their DECL_RTL set yet.  Expand the rtl of
7503
         CONSTRUCTORs too, which should yield a memory reference for the
7504
         constructor's contents.  Assume language specific tree nodes can
7505
         be expanded in some interesting way.  */
7506
      gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7507
      if (DECL_P (exp)
7508
          || TREE_CODE (exp) == CONSTRUCTOR
7509
          || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7510
        {
7511
          result = expand_expr (exp, target, tmode,
7512
                                modifier == EXPAND_INITIALIZER
7513
                                ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7514
 
7515
          /* If the DECL isn't in memory, then the DECL wasn't properly
7516
             marked TREE_ADDRESSABLE, which will be either a front-end
7517
             or a tree optimizer bug.  */
7518
 
7519
          if (TREE_ADDRESSABLE (exp)
7520
              && ! MEM_P (result)
7521
              && ! targetm.calls.allocate_stack_slots_for_args())
7522
            {
7523
              error ("local frame unavailable (naked function?)");
7524
              return result;
7525
            }
7526
          else
7527
            gcc_assert (MEM_P (result));
7528
          result = XEXP (result, 0);
7529
 
7530
          /* ??? Is this needed anymore?  */
7531
          if (DECL_P (exp) && !TREE_USED (exp) == 0)
7532
            {
7533
              assemble_external (exp);
7534
              TREE_USED (exp) = 1;
7535
            }
7536
 
7537
          if (modifier != EXPAND_INITIALIZER
7538
              && modifier != EXPAND_CONST_ADDRESS
7539
              && modifier != EXPAND_SUM)
7540
            result = force_operand (result, target);
7541
          return result;
7542
        }
7543
 
7544
      /* Pass FALSE as the last argument to get_inner_reference although
7545
         we are expanding to RTL.  The rationale is that we know how to
7546
         handle "aligning nodes" here: we can just bypass them because
7547
         they won't change the final object whose address will be returned
7548
         (they actually exist only for that purpose).  */
7549
      inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7550
                                   &mode1, &unsignedp, &volatilep, false);
7551
      break;
7552
    }
7553
 
7554
  /* We must have made progress.  */
7555
  gcc_assert (inner != exp);
7556
 
7557
  subtarget = offset || bitpos ? NULL_RTX : target;
7558
  /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7559
     inner alignment, force the inner to be sufficiently aligned.  */
7560
  if (CONSTANT_CLASS_P (inner)
7561
      && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7562
    {
7563
      inner = copy_node (inner);
7564
      TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7565
      TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7566
      TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7567
    }
7568
  result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7569
 
7570
  if (offset)
7571
    {
7572
      rtx tmp;
7573
 
7574
      if (modifier != EXPAND_NORMAL)
7575
        result = force_operand (result, NULL);
7576
      tmp = expand_expr (offset, NULL_RTX, tmode,
7577
                         modifier == EXPAND_INITIALIZER
7578
                          ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7579
 
7580
      result = convert_memory_address_addr_space (tmode, result, as);
7581
      tmp = convert_memory_address_addr_space (tmode, tmp, as);
7582
 
7583
      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7584
        result = simplify_gen_binary (PLUS, tmode, result, tmp);
7585
      else
7586
        {
7587
          subtarget = bitpos ? NULL_RTX : target;
7588
          result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7589
                                        1, OPTAB_LIB_WIDEN);
7590
        }
7591
    }
7592
 
7593
  if (bitpos)
7594
    {
7595
      /* Someone beforehand should have rejected taking the address
7596
         of such an object.  */
7597
      gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7598
 
7599
      result = plus_constant (result, bitpos / BITS_PER_UNIT);
7600
      if (modifier < EXPAND_SUM)
7601
        result = force_operand (result, target);
7602
    }
7603
 
7604
  return result;
7605
}
7606
 
7607
/* A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
7608
   The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7609
 
7610
static rtx
7611
expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7612
                       enum expand_modifier modifier)
7613
{
7614
  addr_space_t as = ADDR_SPACE_GENERIC;
7615
  enum machine_mode address_mode = Pmode;
7616
  enum machine_mode pointer_mode = ptr_mode;
7617
  enum machine_mode rmode;
7618
  rtx result;
7619
 
7620
  /* Target mode of VOIDmode says "whatever's natural".  */
7621
  if (tmode == VOIDmode)
7622
    tmode = TYPE_MODE (TREE_TYPE (exp));
7623
 
7624
  if (POINTER_TYPE_P (TREE_TYPE (exp)))
7625
    {
7626
      as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7627
      address_mode = targetm.addr_space.address_mode (as);
7628
      pointer_mode = targetm.addr_space.pointer_mode (as);
7629
    }
7630
 
7631
  /* We can get called with some Weird Things if the user does silliness
7632
     like "(short) &a".  In that case, convert_memory_address won't do
7633
     the right thing, so ignore the given target mode.  */
7634
  if (tmode != address_mode && tmode != pointer_mode)
7635
    tmode = address_mode;
7636
 
7637
  result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7638
                                    tmode, modifier, as);
7639
 
7640
  /* Despite expand_expr claims concerning ignoring TMODE when not
7641
     strictly convenient, stuff breaks if we don't honor it.  Note
7642
     that combined with the above, we only do this for pointer modes.  */
7643
  rmode = GET_MODE (result);
7644
  if (rmode == VOIDmode)
7645
    rmode = tmode;
7646
  if (rmode != tmode)
7647
    result = convert_memory_address_addr_space (tmode, result, as);
7648
 
7649
  return result;
7650
}
7651
 
7652
/* Generate code for computing CONSTRUCTOR EXP.
7653
   An rtx for the computed value is returned.  If AVOID_TEMP_MEM
7654
   is TRUE, instead of creating a temporary variable in memory
7655
   NULL is returned and the caller needs to handle it differently.  */
7656
 
7657
static rtx
7658
expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7659
                    bool avoid_temp_mem)
7660
{
7661
  tree type = TREE_TYPE (exp);
7662
  enum machine_mode mode = TYPE_MODE (type);
7663
 
7664
  /* Try to avoid creating a temporary at all.  This is possible
7665
     if all of the initializer is zero.
7666
     FIXME: try to handle all [0..255] initializers we can handle
7667
     with memset.  */
7668
  if (TREE_STATIC (exp)
7669
      && !TREE_ADDRESSABLE (exp)
7670
      && target != 0 && mode == BLKmode
7671
      && all_zeros_p (exp))
7672
    {
7673
      clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7674
      return target;
7675
    }
7676
 
7677
  /* All elts simple constants => refer to a constant in memory.  But
7678
     if this is a non-BLKmode mode, let it store a field at a time
7679
     since that should make a CONST_INT or CONST_DOUBLE when we
7680
     fold.  Likewise, if we have a target we can use, it is best to
7681
     store directly into the target unless the type is large enough
7682
     that memcpy will be used.  If we are making an initializer and
7683
     all operands are constant, put it in memory as well.
7684
 
7685
     FIXME: Avoid trying to fill vector constructors piece-meal.
7686
     Output them with output_constant_def below unless we're sure
7687
     they're zeros.  This should go away when vector initializers
7688
     are treated like VECTOR_CST instead of arrays.  */
7689
  if ((TREE_STATIC (exp)
7690
       && ((mode == BLKmode
7691
            && ! (target != 0 && safe_from_p (target, exp, 1)))
7692
                  || TREE_ADDRESSABLE (exp)
7693
                  || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7694
                      && (! MOVE_BY_PIECES_P
7695
                                     (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7696
                                      TYPE_ALIGN (type)))
7697
                      && ! mostly_zeros_p (exp))))
7698
      || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7699
          && TREE_CONSTANT (exp)))
7700
    {
7701
      rtx constructor;
7702
 
7703
      if (avoid_temp_mem)
7704
        return NULL_RTX;
7705
 
7706
      constructor = expand_expr_constant (exp, 1, modifier);
7707
 
7708
      if (modifier != EXPAND_CONST_ADDRESS
7709
          && modifier != EXPAND_INITIALIZER
7710
          && modifier != EXPAND_SUM)
7711
        constructor = validize_mem (constructor);
7712
 
7713
      return constructor;
7714
    }
7715
 
7716
  /* Handle calls that pass values in multiple non-contiguous
7717
     locations.  The Irix 6 ABI has examples of this.  */
7718
  if (target == 0 || ! safe_from_p (target, exp, 1)
7719
      || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7720
    {
7721
      if (avoid_temp_mem)
7722
        return NULL_RTX;
7723
 
7724
      target
7725
        = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7726
                                                    | (TREE_READONLY (exp)
7727
                                                       * TYPE_QUAL_CONST))),
7728
                       0, TREE_ADDRESSABLE (exp), 1);
7729
    }
7730
 
7731
  store_constructor (exp, target, 0, int_expr_size (exp));
7732
  return target;
7733
}
7734
 
7735
 
7736
/* expand_expr: generate code for computing expression EXP.
7737
   An rtx for the computed value is returned.  The value is never null.
7738
   In the case of a void EXP, const0_rtx is returned.
7739
 
7740
   The value may be stored in TARGET if TARGET is nonzero.
7741
   TARGET is just a suggestion; callers must assume that
7742
   the rtx returned may not be the same as TARGET.
7743
 
7744
   If TARGET is CONST0_RTX, it means that the value will be ignored.
7745
 
7746
   If TMODE is not VOIDmode, it suggests generating the
7747
   result in mode TMODE.  But this is done only when convenient.
7748
   Otherwise, TMODE is ignored and the value generated in its natural mode.
7749
   TMODE is just a suggestion; callers must assume that
7750
   the rtx returned may not have mode TMODE.
7751
 
7752
   Note that TARGET may have neither TMODE nor MODE.  In that case, it
7753
   probably will not be used.
7754
 
7755
   If MODIFIER is EXPAND_SUM then when EXP is an addition
7756
   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7757
   or a nest of (PLUS ...) and (MINUS ...) where the terms are
7758
   products as above, or REG or MEM, or constant.
7759
   Ordinarily in such cases we would output mul or add instructions
7760
   and then return a pseudo reg containing the sum.
7761
 
7762
   EXPAND_INITIALIZER is much like EXPAND_SUM except that
7763
   it also marks a label as absolutely required (it can't be dead).
7764
   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7765
   This is used for outputting expressions used in initializers.
7766
 
7767
   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7768
   with a constant address even if that address is not normally legitimate.
7769
   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7770
 
7771
   EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7772
   a call parameter.  Such targets require special care as we haven't yet
7773
   marked TARGET so that it's safe from being trashed by libcalls.  We
7774
   don't want to use TARGET for anything but the final result;
7775
   Intermediate values must go elsewhere.   Additionally, calls to
7776
   emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7777
 
7778
   If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7779
   address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7780
   DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
7781
   COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7782
   recursively.  */
7783
 
7784
rtx
7785
expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7786
                  enum expand_modifier modifier, rtx *alt_rtl)
7787
{
7788
  rtx ret;
7789
 
7790
  /* Handle ERROR_MARK before anybody tries to access its type.  */
7791
  if (TREE_CODE (exp) == ERROR_MARK
7792
      || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7793
    {
7794
      ret = CONST0_RTX (tmode);
7795
      return ret ? ret : const0_rtx;
7796
    }
7797
 
7798
  /* If this is an expression of some kind and it has an associated line
7799
     number, then emit the line number before expanding the expression.
7800
 
7801
     We need to save and restore the file and line information so that
7802
     errors discovered during expansion are emitted with the right
7803
     information.  It would be better of the diagnostic routines
7804
     used the file/line information embedded in the tree nodes rather
7805
     than globals.  */
7806
  if (cfun && EXPR_HAS_LOCATION (exp))
7807
    {
7808
      location_t saved_location = input_location;
7809
      location_t saved_curr_loc = get_curr_insn_source_location ();
7810
      tree saved_block = get_curr_insn_block ();
7811
      input_location = EXPR_LOCATION (exp);
7812
      set_curr_insn_source_location (input_location);
7813
 
7814
      /* Record where the insns produced belong.  */
7815
      set_curr_insn_block (TREE_BLOCK (exp));
7816
 
7817
      ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7818
 
7819
      input_location = saved_location;
7820
      set_curr_insn_block (saved_block);
7821
      set_curr_insn_source_location (saved_curr_loc);
7822
    }
7823
  else
7824
    {
7825
      ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7826
    }
7827
 
7828
  return ret;
7829
}
7830
 
7831
rtx
7832
expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7833
                    enum expand_modifier modifier)
7834
{
7835
  rtx op0, op1, op2, temp;
7836
  tree type;
7837
  int unsignedp;
7838
  enum machine_mode mode;
7839
  enum tree_code code = ops->code;
7840
  optab this_optab;
7841
  rtx subtarget, original_target;
7842
  int ignore;
7843
  bool reduce_bit_field;
7844
  location_t loc = ops->location;
7845
  tree treeop0, treeop1, treeop2;
7846
#define REDUCE_BIT_FIELD(expr)  (reduce_bit_field                         \
7847
                                 ? reduce_to_bit_field_precision ((expr), \
7848
                                                                  target, \
7849
                                                                  type)   \
7850
                                 : (expr))
7851
 
7852
  type = ops->type;
7853
  mode = TYPE_MODE (type);
7854
  unsignedp = TYPE_UNSIGNED (type);
7855
 
7856
  treeop0 = ops->op0;
7857
  treeop1 = ops->op1;
7858
  treeop2 = ops->op2;
7859
 
7860
  /* We should be called only on simple (binary or unary) expressions,
7861
     exactly those that are valid in gimple expressions that aren't
7862
     GIMPLE_SINGLE_RHS (or invalid).  */
7863
  gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7864
              || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7865
              || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7866
 
7867
  ignore = (target == const0_rtx
7868
            || ((CONVERT_EXPR_CODE_P (code)
7869
                 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7870
                && TREE_CODE (type) == VOID_TYPE));
7871
 
7872
  /* We should be called only if we need the result.  */
7873
  gcc_assert (!ignore);
7874
 
7875
  /* An operation in what may be a bit-field type needs the
7876
     result to be reduced to the precision of the bit-field type,
7877
     which is narrower than that of the type's mode.  */
7878
  reduce_bit_field = (INTEGRAL_TYPE_P (type)
7879
                      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7880
 
7881
  if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7882
    target = 0;
7883
 
7884
  /* Use subtarget as the target for operand 0 of a binary operation.  */
7885
  subtarget = get_subtarget (target);
7886
  original_target = target;
7887
 
7888
  switch (code)
7889
    {
7890
    case NON_LVALUE_EXPR:
7891
    case PAREN_EXPR:
7892
    CASE_CONVERT:
7893
      if (treeop0 == error_mark_node)
7894
        return const0_rtx;
7895
 
7896
      if (TREE_CODE (type) == UNION_TYPE)
7897
        {
7898
          tree valtype = TREE_TYPE (treeop0);
7899
 
7900
          /* If both input and output are BLKmode, this conversion isn't doing
7901
             anything except possibly changing memory attribute.  */
7902
          if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7903
            {
7904
              rtx result = expand_expr (treeop0, target, tmode,
7905
                                        modifier);
7906
 
7907
              result = copy_rtx (result);
7908
              set_mem_attributes (result, type, 0);
7909
              return result;
7910
            }
7911
 
7912
          if (target == 0)
7913
            {
7914
              if (TYPE_MODE (type) != BLKmode)
7915
                target = gen_reg_rtx (TYPE_MODE (type));
7916
              else
7917
                target = assign_temp (type, 0, 1, 1);
7918
            }
7919
 
7920
          if (MEM_P (target))
7921
            /* Store data into beginning of memory target.  */
7922
            store_expr (treeop0,
7923
                        adjust_address (target, TYPE_MODE (valtype), 0),
7924
                        modifier == EXPAND_STACK_PARM,
7925
                        false);
7926
 
7927
          else
7928
            {
7929
              gcc_assert (REG_P (target));
7930
 
7931
              /* Store this field into a union of the proper type.  */
7932
              store_field (target,
7933
                           MIN ((int_size_in_bytes (TREE_TYPE
7934
                                                    (treeop0))
7935
                                 * BITS_PER_UNIT),
7936
                                (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7937
                           0, 0, 0, TYPE_MODE (valtype), treeop0,
7938
                           type, 0, false);
7939
            }
7940
 
7941
          /* Return the entire union.  */
7942
          return target;
7943
        }
7944
 
7945
      if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7946
        {
7947
          op0 = expand_expr (treeop0, target, VOIDmode,
7948
                             modifier);
7949
 
7950
          /* If the signedness of the conversion differs and OP0 is
7951
             a promoted SUBREG, clear that indication since we now
7952
             have to do the proper extension.  */
7953
          if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7954
              && GET_CODE (op0) == SUBREG)
7955
            SUBREG_PROMOTED_VAR_P (op0) = 0;
7956
 
7957
          return REDUCE_BIT_FIELD (op0);
7958
        }
7959
 
7960
      op0 = expand_expr (treeop0, NULL_RTX, mode,
7961
                         modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7962
      if (GET_MODE (op0) == mode)
7963
        ;
7964
 
7965
      /* If OP0 is a constant, just convert it into the proper mode.  */
7966
      else if (CONSTANT_P (op0))
7967
        {
7968
          tree inner_type = TREE_TYPE (treeop0);
7969
          enum machine_mode inner_mode = GET_MODE (op0);
7970
 
7971
          if (inner_mode == VOIDmode)
7972
            inner_mode = TYPE_MODE (inner_type);
7973
 
7974
          if (modifier == EXPAND_INITIALIZER)
7975
            op0 = simplify_gen_subreg (mode, op0, inner_mode,
7976
                                       subreg_lowpart_offset (mode,
7977
                                                              inner_mode));
7978
          else
7979
            op0=  convert_modes (mode, inner_mode, op0,
7980
                                 TYPE_UNSIGNED (inner_type));
7981
        }
7982
 
7983
      else if (modifier == EXPAND_INITIALIZER)
7984
        op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7985
 
7986
      else if (target == 0)
7987
        op0 = convert_to_mode (mode, op0,
7988
                               TYPE_UNSIGNED (TREE_TYPE
7989
                                              (treeop0)));
7990
      else
7991
        {
7992
          convert_move (target, op0,
7993
                        TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7994
          op0 = target;
7995
        }
7996
 
7997
      return REDUCE_BIT_FIELD (op0);
7998
 
7999
    case ADDR_SPACE_CONVERT_EXPR:
8000
      {
8001
        tree treeop0_type = TREE_TYPE (treeop0);
8002
        addr_space_t as_to;
8003
        addr_space_t as_from;
8004
 
8005
        gcc_assert (POINTER_TYPE_P (type));
8006
        gcc_assert (POINTER_TYPE_P (treeop0_type));
8007
 
8008
        as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8009
        as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8010
 
8011
        /* Conversions between pointers to the same address space should
8012
           have been implemented via CONVERT_EXPR / NOP_EXPR.  */
8013
        gcc_assert (as_to != as_from);
8014
 
8015
        /* Ask target code to handle conversion between pointers
8016
           to overlapping address spaces.  */
8017
        if (targetm.addr_space.subset_p (as_to, as_from)
8018
            || targetm.addr_space.subset_p (as_from, as_to))
8019
          {
8020
            op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8021
            op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8022
            gcc_assert (op0);
8023
            return op0;
8024
          }
8025
 
8026
        /* For disjoint address spaces, converting anything but
8027
           a null pointer invokes undefined behaviour.  We simply
8028
           always return a null pointer here.  */
8029
        return CONST0_RTX (mode);
8030
      }
8031
 
8032
    case POINTER_PLUS_EXPR:
8033
      /* Even though the sizetype mode and the pointer's mode can be different
8034
         expand is able to handle this correctly and get the correct result out
8035
         of the PLUS_EXPR code.  */
8036
      /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8037
         if sizetype precision is smaller than pointer precision.  */
8038
      if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8039
        treeop1 = fold_convert_loc (loc, type,
8040
                                    fold_convert_loc (loc, ssizetype,
8041
                                                      treeop1));
8042
    case PLUS_EXPR:
8043
      /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8044
         something else, make sure we add the register to the constant and
8045
         then to the other thing.  This case can occur during strength
8046
         reduction and doing it this way will produce better code if the
8047
         frame pointer or argument pointer is eliminated.
8048
 
8049
         fold-const.c will ensure that the constant is always in the inner
8050
         PLUS_EXPR, so the only case we need to do anything about is if
8051
         sp, ap, or fp is our second argument, in which case we must swap
8052
         the innermost first argument and our second argument.  */
8053
 
8054
      if (TREE_CODE (treeop0) == PLUS_EXPR
8055
          && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8056
          && TREE_CODE (treeop1) == VAR_DECL
8057
          && (DECL_RTL (treeop1) == frame_pointer_rtx
8058
              || DECL_RTL (treeop1) == stack_pointer_rtx
8059
              || DECL_RTL (treeop1) == arg_pointer_rtx))
8060
        {
8061
          tree t = treeop1;
8062
 
8063
          treeop1 = TREE_OPERAND (treeop0, 0);
8064
          TREE_OPERAND (treeop0, 0) = t;
8065
        }
8066
 
8067
      /* If the result is to be ptr_mode and we are adding an integer to
8068
         something, we might be forming a constant.  So try to use
8069
         plus_constant.  If it produces a sum and we can't accept it,
8070
         use force_operand.  This allows P = &ARR[const] to generate
8071
         efficient code on machines where a SYMBOL_REF is not a valid
8072
         address.
8073
 
8074
         If this is an EXPAND_SUM call, always return the sum.  */
8075
      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8076
          || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8077
        {
8078
          if (modifier == EXPAND_STACK_PARM)
8079
            target = 0;
8080
          if (TREE_CODE (treeop0) == INTEGER_CST
8081
              && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8082
              && TREE_CONSTANT (treeop1))
8083
            {
8084
              rtx constant_part;
8085
 
8086
              op1 = expand_expr (treeop1, subtarget, VOIDmode,
8087
                                 EXPAND_SUM);
8088
              /* Use immed_double_const to ensure that the constant is
8089
                 truncated according to the mode of OP1, then sign extended
8090
                 to a HOST_WIDE_INT.  Using the constant directly can result
8091
                 in non-canonical RTL in a 64x32 cross compile.  */
8092
              constant_part
8093
                = immed_double_const (TREE_INT_CST_LOW (treeop0),
8094
                                      (HOST_WIDE_INT) 0,
8095
                                      TYPE_MODE (TREE_TYPE (treeop1)));
8096
              op1 = plus_constant (op1, INTVAL (constant_part));
8097
              if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8098
                op1 = force_operand (op1, target);
8099
              return REDUCE_BIT_FIELD (op1);
8100
            }
8101
 
8102
          else if (TREE_CODE (treeop1) == INTEGER_CST
8103
                   && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8104
                   && TREE_CONSTANT (treeop0))
8105
            {
8106
              rtx constant_part;
8107
 
8108
              op0 = expand_expr (treeop0, subtarget, VOIDmode,
8109
                                 (modifier == EXPAND_INITIALIZER
8110
                                 ? EXPAND_INITIALIZER : EXPAND_SUM));
8111
              if (! CONSTANT_P (op0))
8112
                {
8113
                  op1 = expand_expr (treeop1, NULL_RTX,
8114
                                     VOIDmode, modifier);
8115
                  /* Return a PLUS if modifier says it's OK.  */
8116
                  if (modifier == EXPAND_SUM
8117
                      || modifier == EXPAND_INITIALIZER)
8118
                    return simplify_gen_binary (PLUS, mode, op0, op1);
8119
                  goto binop2;
8120
                }
8121
              /* Use immed_double_const to ensure that the constant is
8122
                 truncated according to the mode of OP1, then sign extended
8123
                 to a HOST_WIDE_INT.  Using the constant directly can result
8124
                 in non-canonical RTL in a 64x32 cross compile.  */
8125
              constant_part
8126
                = immed_double_const (TREE_INT_CST_LOW (treeop1),
8127
                                      (HOST_WIDE_INT) 0,
8128
                                      TYPE_MODE (TREE_TYPE (treeop0)));
8129
              op0 = plus_constant (op0, INTVAL (constant_part));
8130
              if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8131
                op0 = force_operand (op0, target);
8132
              return REDUCE_BIT_FIELD (op0);
8133
            }
8134
        }
8135
 
8136
      /* Use TER to expand pointer addition of a negated value
8137
         as pointer subtraction.  */
8138
      if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8139
           || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8140
               && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8141
          && TREE_CODE (treeop1) == SSA_NAME
8142
          && TYPE_MODE (TREE_TYPE (treeop0))
8143
             == TYPE_MODE (TREE_TYPE (treeop1)))
8144
        {
8145
          gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8146
          if (def)
8147
            {
8148
              treeop1 = gimple_assign_rhs1 (def);
8149
              code = MINUS_EXPR;
8150
              goto do_minus;
8151
            }
8152
        }
8153
 
8154
      /* No sense saving up arithmetic to be done
8155
         if it's all in the wrong mode to form part of an address.
8156
         And force_operand won't know whether to sign-extend or
8157
         zero-extend.  */
8158
      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8159
          || mode != ptr_mode)
8160
        {
8161
          expand_operands (treeop0, treeop1,
8162
                           subtarget, &op0, &op1, EXPAND_NORMAL);
8163
          if (op0 == const0_rtx)
8164
            return op1;
8165
          if (op1 == const0_rtx)
8166
            return op0;
8167
          goto binop2;
8168
        }
8169
 
8170
      expand_operands (treeop0, treeop1,
8171
                       subtarget, &op0, &op1, modifier);
8172
      return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8173
 
8174
    case MINUS_EXPR:
8175
    do_minus:
8176
      /* For initializers, we are allowed to return a MINUS of two
8177
         symbolic constants.  Here we handle all cases when both operands
8178
         are constant.  */
8179
      /* Handle difference of two symbolic constants,
8180
         for the sake of an initializer.  */
8181
      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8182
          && really_constant_p (treeop0)
8183
          && really_constant_p (treeop1))
8184
        {
8185
          expand_operands (treeop0, treeop1,
8186
                           NULL_RTX, &op0, &op1, modifier);
8187
 
8188
          /* If the last operand is a CONST_INT, use plus_constant of
8189
             the negated constant.  Else make the MINUS.  */
8190
          if (CONST_INT_P (op1))
8191
            return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8192
          else
8193
            return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8194
        }
8195
 
8196
      /* No sense saving up arithmetic to be done
8197
         if it's all in the wrong mode to form part of an address.
8198
         And force_operand won't know whether to sign-extend or
8199
         zero-extend.  */
8200
      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8201
          || mode != ptr_mode)
8202
        goto binop;
8203
 
8204
      expand_operands (treeop0, treeop1,
8205
                       subtarget, &op0, &op1, modifier);
8206
 
8207
      /* Convert A - const to A + (-const).  */
8208
      if (CONST_INT_P (op1))
8209
        {
8210
          op1 = negate_rtx (mode, op1);
8211
          return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8212
        }
8213
 
8214
      goto binop2;
8215
 
8216
    case WIDEN_MULT_PLUS_EXPR:
8217
    case WIDEN_MULT_MINUS_EXPR:
8218
      expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8219
      op2 = expand_normal (treeop2);
8220
      target = expand_widen_pattern_expr (ops, op0, op1, op2,
8221
                                          target, unsignedp);
8222
      return target;
8223
 
8224
    case WIDEN_MULT_EXPR:
8225
      /* If first operand is constant, swap them.
8226
         Thus the following special case checks need only
8227
         check the second operand.  */
8228
      if (TREE_CODE (treeop0) == INTEGER_CST)
8229
        {
8230
          tree t1 = treeop0;
8231
          treeop0 = treeop1;
8232
          treeop1 = t1;
8233
        }
8234
 
8235
      /* First, check if we have a multiplication of one signed and one
8236
         unsigned operand.  */
8237
      if (TREE_CODE (treeop1) != INTEGER_CST
8238
          && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8239
              != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8240
        {
8241
          enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8242
          this_optab = usmul_widen_optab;
8243
          if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8244
                != CODE_FOR_nothing)
8245
            {
8246
              if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8247
                expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8248
                                 EXPAND_NORMAL);
8249
              else
8250
                expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8251
                                 EXPAND_NORMAL);
8252
              goto binop3;
8253
            }
8254
        }
8255
      /* Check for a multiplication with matching signedness.  */
8256
      else if ((TREE_CODE (treeop1) == INTEGER_CST
8257
                && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8258
               || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8259
                   == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8260
        {
8261
          tree op0type = TREE_TYPE (treeop0);
8262
          enum machine_mode innermode = TYPE_MODE (op0type);
8263
          bool zextend_p = TYPE_UNSIGNED (op0type);
8264
          optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8265
          this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8266
 
8267
          if (TREE_CODE (treeop0) != INTEGER_CST)
8268
            {
8269
              if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8270
                    != CODE_FOR_nothing)
8271
                {
8272
                  expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8273
                                   EXPAND_NORMAL);
8274
                  temp = expand_widening_mult (mode, op0, op1, target,
8275
                                               unsignedp, this_optab);
8276
                  return REDUCE_BIT_FIELD (temp);
8277
                }
8278
              if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8279
                    != CODE_FOR_nothing
8280
                  && innermode == word_mode)
8281
                {
8282
                  rtx htem, hipart;
8283
                  op0 = expand_normal (treeop0);
8284
                  if (TREE_CODE (treeop1) == INTEGER_CST)
8285
                    op1 = convert_modes (innermode, mode,
8286
                                         expand_normal (treeop1), unsignedp);
8287
                  else
8288
                    op1 = expand_normal (treeop1);
8289
                  temp = expand_binop (mode, other_optab, op0, op1, target,
8290
                                       unsignedp, OPTAB_LIB_WIDEN);
8291
                  hipart = gen_highpart (innermode, temp);
8292
                  htem = expand_mult_highpart_adjust (innermode, hipart,
8293
                                                      op0, op1, hipart,
8294
                                                      zextend_p);
8295
                  if (htem != hipart)
8296
                    emit_move_insn (hipart, htem);
8297
                  return REDUCE_BIT_FIELD (temp);
8298
                }
8299
            }
8300
        }
8301
      treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8302
      treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8303
      expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8304
      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8305
 
8306
    case FMA_EXPR:
8307
      {
8308
        optab opt = fma_optab;
8309
        gimple def0, def2;
8310
 
8311
        /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8312
           call.  */
8313
        if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8314
          {
8315
            tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8316
            tree call_expr;
8317
 
8318
            gcc_assert (fn != NULL_TREE);
8319
            call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8320
            return expand_builtin (call_expr, target, subtarget, mode, false);
8321
          }
8322
 
8323
        def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8324
        def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8325
 
8326
        op0 = op2 = NULL;
8327
 
8328
        if (def0 && def2
8329
            && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8330
          {
8331
            opt = fnms_optab;
8332
            op0 = expand_normal (gimple_assign_rhs1 (def0));
8333
            op2 = expand_normal (gimple_assign_rhs1 (def2));
8334
          }
8335
        else if (def0
8336
                 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8337
          {
8338
            opt = fnma_optab;
8339
            op0 = expand_normal (gimple_assign_rhs1 (def0));
8340
          }
8341
        else if (def2
8342
                 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8343
          {
8344
            opt = fms_optab;
8345
            op2 = expand_normal (gimple_assign_rhs1 (def2));
8346
          }
8347
 
8348
        if (op0 == NULL)
8349
          op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8350
        if (op2 == NULL)
8351
          op2 = expand_normal (treeop2);
8352
        op1 = expand_normal (treeop1);
8353
 
8354
        return expand_ternary_op (TYPE_MODE (type), opt,
8355
                                  op0, op1, op2, target, 0);
8356
      }
8357
 
8358
    case MULT_EXPR:
8359
      /* If this is a fixed-point operation, then we cannot use the code
8360
         below because "expand_mult" doesn't support sat/no-sat fixed-point
8361
         multiplications.   */
8362
      if (ALL_FIXED_POINT_MODE_P (mode))
8363
        goto binop;
8364
 
8365
      /* If first operand is constant, swap them.
8366
         Thus the following special case checks need only
8367
         check the second operand.  */
8368
      if (TREE_CODE (treeop0) == INTEGER_CST)
8369
        {
8370
          tree t1 = treeop0;
8371
          treeop0 = treeop1;
8372
          treeop1 = t1;
8373
        }
8374
 
8375
      /* Attempt to return something suitable for generating an
8376
         indexed address, for machines that support that.  */
8377
 
8378
      if (modifier == EXPAND_SUM && mode == ptr_mode
8379
          && host_integerp (treeop1, 0))
8380
        {
8381
          tree exp1 = treeop1;
8382
 
8383
          op0 = expand_expr (treeop0, subtarget, VOIDmode,
8384
                             EXPAND_SUM);
8385
 
8386
          if (!REG_P (op0))
8387
            op0 = force_operand (op0, NULL_RTX);
8388
          if (!REG_P (op0))
8389
            op0 = copy_to_mode_reg (mode, op0);
8390
 
8391
          return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8392
                               gen_int_mode (tree_low_cst (exp1, 0),
8393
                                             TYPE_MODE (TREE_TYPE (exp1)))));
8394
        }
8395
 
8396
      if (modifier == EXPAND_STACK_PARM)
8397
        target = 0;
8398
 
8399
      expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8400
      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8401
 
8402
    case TRUNC_DIV_EXPR:
8403
    case FLOOR_DIV_EXPR:
8404
    case CEIL_DIV_EXPR:
8405
    case ROUND_DIV_EXPR:
8406
    case EXACT_DIV_EXPR:
8407
      /* If this is a fixed-point operation, then we cannot use the code
8408
         below because "expand_divmod" doesn't support sat/no-sat fixed-point
8409
         divisions.   */
8410
      if (ALL_FIXED_POINT_MODE_P (mode))
8411
        goto binop;
8412
 
8413
      if (modifier == EXPAND_STACK_PARM)
8414
        target = 0;
8415
      /* Possible optimization: compute the dividend with EXPAND_SUM
8416
         then if the divisor is constant can optimize the case
8417
         where some terms of the dividend have coeffs divisible by it.  */
8418
      expand_operands (treeop0, treeop1,
8419
                       subtarget, &op0, &op1, EXPAND_NORMAL);
8420
      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8421
 
8422
    case RDIV_EXPR:
8423
      goto binop;
8424
 
8425
    case TRUNC_MOD_EXPR:
8426
    case FLOOR_MOD_EXPR:
8427
    case CEIL_MOD_EXPR:
8428
    case ROUND_MOD_EXPR:
8429
      if (modifier == EXPAND_STACK_PARM)
8430
        target = 0;
8431
      expand_operands (treeop0, treeop1,
8432
                       subtarget, &op0, &op1, EXPAND_NORMAL);
8433
      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8434
 
8435
    case FIXED_CONVERT_EXPR:
8436
      op0 = expand_normal (treeop0);
8437
      if (target == 0 || modifier == EXPAND_STACK_PARM)
8438
        target = gen_reg_rtx (mode);
8439
 
8440
      if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8441
           && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8442
          || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8443
        expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8444
      else
8445
        expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8446
      return target;
8447
 
8448
    case FIX_TRUNC_EXPR:
8449
      op0 = expand_normal (treeop0);
8450
      if (target == 0 || modifier == EXPAND_STACK_PARM)
8451
        target = gen_reg_rtx (mode);
8452
      expand_fix (target, op0, unsignedp);
8453
      return target;
8454
 
8455
    case FLOAT_EXPR:
8456
      op0 = expand_normal (treeop0);
8457
      if (target == 0 || modifier == EXPAND_STACK_PARM)
8458
        target = gen_reg_rtx (mode);
8459
      /* expand_float can't figure out what to do if FROM has VOIDmode.
8460
         So give it the correct mode.  With -O, cse will optimize this.  */
8461
      if (GET_MODE (op0) == VOIDmode)
8462
        op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8463
                                op0);
8464
      expand_float (target, op0,
8465
                    TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8466
      return target;
8467
 
8468
    case NEGATE_EXPR:
8469
      op0 = expand_expr (treeop0, subtarget,
8470
                         VOIDmode, EXPAND_NORMAL);
8471
      if (modifier == EXPAND_STACK_PARM)
8472
        target = 0;
8473
      temp = expand_unop (mode,
8474
                          optab_for_tree_code (NEGATE_EXPR, type,
8475
                                               optab_default),
8476
                          op0, target, 0);
8477
      gcc_assert (temp);
8478
      return REDUCE_BIT_FIELD (temp);
8479
 
8480
    case ABS_EXPR:
8481
      op0 = expand_expr (treeop0, subtarget,
8482
                         VOIDmode, EXPAND_NORMAL);
8483
      if (modifier == EXPAND_STACK_PARM)
8484
        target = 0;
8485
 
8486
      /* ABS_EXPR is not valid for complex arguments.  */
8487
      gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8488
                  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8489
 
8490
      /* Unsigned abs is simply the operand.  Testing here means we don't
8491
         risk generating incorrect code below.  */
8492
      if (TYPE_UNSIGNED (type))
8493
        return op0;
8494
 
8495
      return expand_abs (mode, op0, target, unsignedp,
8496
                         safe_from_p (target, treeop0, 1));
8497
 
8498
    case MAX_EXPR:
8499
    case MIN_EXPR:
8500
      target = original_target;
8501
      if (target == 0
8502
          || modifier == EXPAND_STACK_PARM
8503
          || (MEM_P (target) && MEM_VOLATILE_P (target))
8504
          || GET_MODE (target) != mode
8505
          || (REG_P (target)
8506
              && REGNO (target) < FIRST_PSEUDO_REGISTER))
8507
        target = gen_reg_rtx (mode);
8508
      expand_operands (treeop0, treeop1,
8509
                       target, &op0, &op1, EXPAND_NORMAL);
8510
 
8511
      /* First try to do it with a special MIN or MAX instruction.
8512
         If that does not win, use a conditional jump to select the proper
8513
         value.  */
8514
      this_optab = optab_for_tree_code (code, type, optab_default);
8515
      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8516
                           OPTAB_WIDEN);
8517
      if (temp != 0)
8518
        return temp;
8519
 
8520
      /* At this point, a MEM target is no longer useful; we will get better
8521
         code without it.  */
8522
 
8523
      if (! REG_P (target))
8524
        target = gen_reg_rtx (mode);
8525
 
8526
      /* If op1 was placed in target, swap op0 and op1.  */
8527
      if (target != op0 && target == op1)
8528
        {
8529
          temp = op0;
8530
          op0 = op1;
8531
          op1 = temp;
8532
        }
8533
 
8534
      /* We generate better code and avoid problems with op1 mentioning
8535
         target by forcing op1 into a pseudo if it isn't a constant.  */
8536
      if (! CONSTANT_P (op1))
8537
        op1 = force_reg (mode, op1);
8538
 
8539
      {
8540
        enum rtx_code comparison_code;
8541
        rtx cmpop1 = op1;
8542
 
8543
        if (code == MAX_EXPR)
8544
          comparison_code = unsignedp ? GEU : GE;
8545
        else
8546
          comparison_code = unsignedp ? LEU : LE;
8547
 
8548
        /* Canonicalize to comparisons against 0.  */
8549
        if (op1 == const1_rtx)
8550
          {
8551
            /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8552
               or (a != 0 ? a : 1) for unsigned.
8553
               For MIN we are safe converting (a <= 1 ? a : 1)
8554
               into (a <= 0 ? a : 1)  */
8555
            cmpop1 = const0_rtx;
8556
            if (code == MAX_EXPR)
8557
              comparison_code = unsignedp ? NE : GT;
8558
          }
8559
        if (op1 == constm1_rtx && !unsignedp)
8560
          {
8561
            /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8562
               and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8563
            cmpop1 = const0_rtx;
8564
            if (code == MIN_EXPR)
8565
              comparison_code = LT;
8566
          }
8567
#ifdef HAVE_conditional_move
8568
        /* Use a conditional move if possible.  */
8569
        if (can_conditionally_move_p (mode))
8570
          {
8571
            rtx insn;
8572
 
8573
            /* ??? Same problem as in expmed.c: emit_conditional_move
8574
               forces a stack adjustment via compare_from_rtx, and we
8575
               lose the stack adjustment if the sequence we are about
8576
               to create is discarded.  */
8577
            do_pending_stack_adjust ();
8578
 
8579
            start_sequence ();
8580
 
8581
            /* Try to emit the conditional move.  */
8582
            insn = emit_conditional_move (target, comparison_code,
8583
                                          op0, cmpop1, mode,
8584
                                          op0, op1, mode,
8585
                                          unsignedp);
8586
 
8587
            /* If we could do the conditional move, emit the sequence,
8588
               and return.  */
8589
            if (insn)
8590
              {
8591
                rtx seq = get_insns ();
8592
                end_sequence ();
8593
                emit_insn (seq);
8594
                return target;
8595
              }
8596
 
8597
            /* Otherwise discard the sequence and fall back to code with
8598
               branches.  */
8599
            end_sequence ();
8600
          }
8601
#endif
8602
        if (target != op0)
8603
          emit_move_insn (target, op0);
8604
 
8605
        temp = gen_label_rtx ();
8606
        do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8607
                                 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8608
                                 -1);
8609
      }
8610
      emit_move_insn (target, op1);
8611
      emit_label (temp);
8612
      return target;
8613
 
8614
    case BIT_NOT_EXPR:
8615
      op0 = expand_expr (treeop0, subtarget,
8616
                         VOIDmode, EXPAND_NORMAL);
8617
      if (modifier == EXPAND_STACK_PARM)
8618
        target = 0;
8619
      /* In case we have to reduce the result to bitfield precision
8620
         for unsigned bitfield expand this as XOR with a proper constant
8621
         instead.  */
8622
      if (reduce_bit_field && TYPE_UNSIGNED (type))
8623
        temp = expand_binop (mode, xor_optab, op0,
8624
                             immed_double_int_const
8625
                               (double_int_mask (TYPE_PRECISION (type)), mode),
8626
                             target, 1, OPTAB_LIB_WIDEN);
8627
      else
8628
        temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8629
      gcc_assert (temp);
8630
      return temp;
8631
 
8632
      /* ??? Can optimize bitwise operations with one arg constant.
8633
         Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8634
         and (a bitwise1 b) bitwise2 b (etc)
8635
         but that is probably not worth while.  */
8636
 
8637
    case BIT_AND_EXPR:
8638
    case BIT_IOR_EXPR:
8639
    case BIT_XOR_EXPR:
8640
      goto binop;
8641
 
8642
    case LROTATE_EXPR:
8643
    case RROTATE_EXPR:
8644
      gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8645
                  || (GET_MODE_PRECISION (TYPE_MODE (type))
8646
                      == TYPE_PRECISION (type)));
8647
      /* fall through */
8648
 
8649
    case LSHIFT_EXPR:
8650
    case RSHIFT_EXPR:
8651
      /* If this is a fixed-point operation, then we cannot use the code
8652
         below because "expand_shift" doesn't support sat/no-sat fixed-point
8653
         shifts.   */
8654
      if (ALL_FIXED_POINT_MODE_P (mode))
8655
        goto binop;
8656
 
8657
      if (! safe_from_p (subtarget, treeop1, 1))
8658
        subtarget = 0;
8659
      if (modifier == EXPAND_STACK_PARM)
8660
        target = 0;
8661
      op0 = expand_expr (treeop0, subtarget,
8662
                         VOIDmode, EXPAND_NORMAL);
8663
      temp = expand_variable_shift (code, mode, op0, treeop1, target,
8664
                                    unsignedp);
8665
      if (code == LSHIFT_EXPR)
8666
        temp = REDUCE_BIT_FIELD (temp);
8667
      return temp;
8668
 
8669
      /* Could determine the answer when only additive constants differ.  Also,
8670
         the addition of one can be handled by changing the condition.  */
8671
    case LT_EXPR:
8672
    case LE_EXPR:
8673
    case GT_EXPR:
8674
    case GE_EXPR:
8675
    case EQ_EXPR:
8676
    case NE_EXPR:
8677
    case UNORDERED_EXPR:
8678
    case ORDERED_EXPR:
8679
    case UNLT_EXPR:
8680
    case UNLE_EXPR:
8681
    case UNGT_EXPR:
8682
    case UNGE_EXPR:
8683
    case UNEQ_EXPR:
8684
    case LTGT_EXPR:
8685
      temp = do_store_flag (ops,
8686
                            modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8687
                            tmode != VOIDmode ? tmode : mode);
8688
      if (temp)
8689
        return temp;
8690
 
8691
      /* Use a compare and a jump for BLKmode comparisons, or for function
8692
         type comparisons is HAVE_canonicalize_funcptr_for_compare.  */
8693
 
8694
      if ((target == 0
8695
           || modifier == EXPAND_STACK_PARM
8696
           || ! safe_from_p (target, treeop0, 1)
8697
           || ! safe_from_p (target, treeop1, 1)
8698
           /* Make sure we don't have a hard reg (such as function's return
8699
              value) live across basic blocks, if not optimizing.  */
8700
           || (!optimize && REG_P (target)
8701
               && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8702
        target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8703
 
8704
      emit_move_insn (target, const0_rtx);
8705
 
8706
      op1 = gen_label_rtx ();
8707
      jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8708
 
8709
      if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8710
        emit_move_insn (target, constm1_rtx);
8711
      else
8712
        emit_move_insn (target, const1_rtx);
8713
 
8714
      emit_label (op1);
8715
      return target;
8716
 
8717
    case COMPLEX_EXPR:
8718
      /* Get the rtx code of the operands.  */
8719
      op0 = expand_normal (treeop0);
8720
      op1 = expand_normal (treeop1);
8721
 
8722
      if (!target)
8723
        target = gen_reg_rtx (TYPE_MODE (type));
8724
 
8725
      /* Move the real (op0) and imaginary (op1) parts to their location.  */
8726
      write_complex_part (target, op0, false);
8727
      write_complex_part (target, op1, true);
8728
 
8729
      return target;
8730
 
8731
    case WIDEN_SUM_EXPR:
8732
      {
8733
        tree oprnd0 = treeop0;
8734
        tree oprnd1 = treeop1;
8735
 
8736
        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8737
        target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8738
                                            target, unsignedp);
8739
        return target;
8740
      }
8741
 
8742
    case REDUC_MAX_EXPR:
8743
    case REDUC_MIN_EXPR:
8744
    case REDUC_PLUS_EXPR:
8745
      {
8746
        op0 = expand_normal (treeop0);
8747
        this_optab = optab_for_tree_code (code, type, optab_default);
8748
        temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8749
        gcc_assert (temp);
8750
        return temp;
8751
      }
8752
 
8753
    case VEC_LSHIFT_EXPR:
8754
    case VEC_RSHIFT_EXPR:
8755
      {
8756
        target = expand_vec_shift_expr (ops, target);
8757
        return target;
8758
      }
8759
 
8760
    case VEC_UNPACK_HI_EXPR:
8761
    case VEC_UNPACK_LO_EXPR:
8762
      {
8763
        op0 = expand_normal (treeop0);
8764
        temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8765
                                          target, unsignedp);
8766
        gcc_assert (temp);
8767
        return temp;
8768
      }
8769
 
8770
    case VEC_UNPACK_FLOAT_HI_EXPR:
8771
    case VEC_UNPACK_FLOAT_LO_EXPR:
8772
      {
8773
        op0 = expand_normal (treeop0);
8774
        /* The signedness is determined from input operand.  */
8775
        temp = expand_widen_pattern_expr
8776
          (ops, op0, NULL_RTX, NULL_RTX,
8777
           target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8778
 
8779
        gcc_assert (temp);
8780
        return temp;
8781
      }
8782
 
8783
    case VEC_WIDEN_MULT_HI_EXPR:
8784
    case VEC_WIDEN_MULT_LO_EXPR:
8785
      {
8786
        tree oprnd0 = treeop0;
8787
        tree oprnd1 = treeop1;
8788
 
8789
        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8790
        target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8791
                                            target, unsignedp);
8792
        gcc_assert (target);
8793
        return target;
8794
      }
8795
 
8796
    case VEC_WIDEN_LSHIFT_HI_EXPR:
8797
    case VEC_WIDEN_LSHIFT_LO_EXPR:
8798
      {
8799
        tree oprnd0 = treeop0;
8800
        tree oprnd1 = treeop1;
8801
 
8802
        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8803
        target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8804
                                            target, unsignedp);
8805
        gcc_assert (target);
8806
        return target;
8807
      }
8808
 
8809
    case VEC_PACK_TRUNC_EXPR:
8810
    case VEC_PACK_SAT_EXPR:
8811
    case VEC_PACK_FIX_TRUNC_EXPR:
8812
      mode = TYPE_MODE (TREE_TYPE (treeop0));
8813
      goto binop;
8814
 
8815
    case VEC_PERM_EXPR:
8816
      expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
8817
      op2 = expand_normal (treeop2);
8818
 
8819
      /* Careful here: if the target doesn't support integral vector modes,
8820
         a constant selection vector could wind up smooshed into a normal
8821
         integral constant.  */
8822
      if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
8823
        {
8824
          tree sel_type = TREE_TYPE (treeop2);
8825
          enum machine_mode vmode
8826
            = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
8827
                               TYPE_VECTOR_SUBPARTS (sel_type));
8828
          gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
8829
          op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
8830
          gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
8831
        }
8832
      else
8833
        gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
8834
 
8835
      temp = expand_vec_perm (mode, op0, op1, op2, target);
8836
      gcc_assert (temp);
8837
      return temp;
8838
 
8839
    case DOT_PROD_EXPR:
8840
      {
8841
        tree oprnd0 = treeop0;
8842
        tree oprnd1 = treeop1;
8843
        tree oprnd2 = treeop2;
8844
        rtx op2;
8845
 
8846
        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8847
        op2 = expand_normal (oprnd2);
8848
        target = expand_widen_pattern_expr (ops, op0, op1, op2,
8849
                                            target, unsignedp);
8850
        return target;
8851
      }
8852
 
8853
    case REALIGN_LOAD_EXPR:
8854
      {
8855
        tree oprnd0 = treeop0;
8856
        tree oprnd1 = treeop1;
8857
        tree oprnd2 = treeop2;
8858
        rtx op2;
8859
 
8860
        this_optab = optab_for_tree_code (code, type, optab_default);
8861
        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8862
        op2 = expand_normal (oprnd2);
8863
        temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8864
                                  target, unsignedp);
8865
        gcc_assert (temp);
8866
        return temp;
8867
      }
8868
 
8869
    case COND_EXPR:
8870
      /* A COND_EXPR with its type being VOID_TYPE represents a
8871
         conditional jump and is handled in
8872
         expand_gimple_cond_expr.  */
8873
      gcc_assert (!VOID_TYPE_P (type));
8874
 
8875
      /* Note that COND_EXPRs whose type is a structure or union
8876
         are required to be constructed to contain assignments of
8877
         a temporary variable, so that we can evaluate them here
8878
         for side effect only.  If type is void, we must do likewise.  */
8879
 
8880
      gcc_assert (!TREE_ADDRESSABLE (type)
8881
                  && !ignore
8882
                  && TREE_TYPE (treeop1) != void_type_node
8883
                  && TREE_TYPE (treeop2) != void_type_node);
8884
 
8885
      /* If we are not to produce a result, we have no target.  Otherwise,
8886
         if a target was specified use it; it will not be used as an
8887
         intermediate target unless it is safe.  If no target, use a
8888
         temporary.  */
8889
 
8890
      if (modifier != EXPAND_STACK_PARM
8891
          && original_target
8892
          && safe_from_p (original_target, treeop0, 1)
8893
          && GET_MODE (original_target) == mode
8894
#ifdef HAVE_conditional_move
8895
          && (! can_conditionally_move_p (mode)
8896
              || REG_P (original_target))
8897
#endif
8898
          && !MEM_P (original_target))
8899
        temp = original_target;
8900
      else
8901
        temp = assign_temp (type, 0, 0, 1);
8902
 
8903
      do_pending_stack_adjust ();
8904
      NO_DEFER_POP;
8905
      op0 = gen_label_rtx ();
8906
      op1 = gen_label_rtx ();
8907
      jumpifnot (treeop0, op0, -1);
8908
      store_expr (treeop1, temp,
8909
                  modifier == EXPAND_STACK_PARM,
8910
                  false);
8911
 
8912
      emit_jump_insn (gen_jump (op1));
8913
      emit_barrier ();
8914
      emit_label (op0);
8915
      store_expr (treeop2, temp,
8916
                  modifier == EXPAND_STACK_PARM,
8917
                  false);
8918
 
8919
      emit_label (op1);
8920
      OK_DEFER_POP;
8921
      return temp;
8922
 
8923
    case VEC_COND_EXPR:
8924
      target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
8925
      return target;
8926
 
8927
    default:
8928
      gcc_unreachable ();
8929
    }
8930
 
8931
  /* Here to do an ordinary binary operator.  */
8932
 binop:
8933
  expand_operands (treeop0, treeop1,
8934
                   subtarget, &op0, &op1, EXPAND_NORMAL);
8935
 binop2:
8936
  this_optab = optab_for_tree_code (code, type, optab_default);
8937
 binop3:
8938
  if (modifier == EXPAND_STACK_PARM)
8939
    target = 0;
8940
  temp = expand_binop (mode, this_optab, op0, op1, target,
8941
                       unsignedp, OPTAB_LIB_WIDEN);
8942
  gcc_assert (temp);
8943
  /* Bitwise operations do not need bitfield reduction as we expect their
8944
     operands being properly truncated.  */
8945
  if (code == BIT_XOR_EXPR
8946
      || code == BIT_AND_EXPR
8947
      || code == BIT_IOR_EXPR)
8948
    return temp;
8949
  return REDUCE_BIT_FIELD (temp);
8950
}
8951
#undef REDUCE_BIT_FIELD
8952
 
8953
rtx
8954
expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8955
                    enum expand_modifier modifier, rtx *alt_rtl)
8956
{
8957
  rtx op0, op1, temp, decl_rtl;
8958
  tree type;
8959
  int unsignedp;
8960
  enum machine_mode mode;
8961
  enum tree_code code = TREE_CODE (exp);
8962
  rtx subtarget, original_target;
8963
  int ignore;
8964
  tree context;
8965
  bool reduce_bit_field;
8966
  location_t loc = EXPR_LOCATION (exp);
8967
  struct separate_ops ops;
8968
  tree treeop0, treeop1, treeop2;
8969
  tree ssa_name = NULL_TREE;
8970
  gimple g;
8971
 
8972
  type = TREE_TYPE (exp);
8973
  mode = TYPE_MODE (type);
8974
  unsignedp = TYPE_UNSIGNED (type);
8975
 
8976
  treeop0 = treeop1 = treeop2 = NULL_TREE;
8977
  if (!VL_EXP_CLASS_P (exp))
8978
    switch (TREE_CODE_LENGTH (code))
8979
      {
8980
        default:
8981
        case 3: treeop2 = TREE_OPERAND (exp, 2);
8982
        case 2: treeop1 = TREE_OPERAND (exp, 1);
8983
        case 1: treeop0 = TREE_OPERAND (exp, 0);
8984
        case 0: break;
8985
      }
8986
  ops.code = code;
8987
  ops.type = type;
8988
  ops.op0 = treeop0;
8989
  ops.op1 = treeop1;
8990
  ops.op2 = treeop2;
8991
  ops.location = loc;
8992
 
8993
  ignore = (target == const0_rtx
8994
            || ((CONVERT_EXPR_CODE_P (code)
8995
                 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8996
                && TREE_CODE (type) == VOID_TYPE));
8997
 
8998
  /* An operation in what may be a bit-field type needs the
8999
     result to be reduced to the precision of the bit-field type,
9000
     which is narrower than that of the type's mode.  */
9001
  reduce_bit_field = (!ignore
9002
                      && INTEGRAL_TYPE_P (type)
9003
                      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9004
 
9005
  /* If we are going to ignore this result, we need only do something
9006
     if there is a side-effect somewhere in the expression.  If there
9007
     is, short-circuit the most common cases here.  Note that we must
9008
     not call expand_expr with anything but const0_rtx in case this
9009
     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
9010
 
9011
  if (ignore)
9012
    {
9013
      if (! TREE_SIDE_EFFECTS (exp))
9014
        return const0_rtx;
9015
 
9016
      /* Ensure we reference a volatile object even if value is ignored, but
9017
         don't do this if all we are doing is taking its address.  */
9018
      if (TREE_THIS_VOLATILE (exp)
9019
          && TREE_CODE (exp) != FUNCTION_DECL
9020
          && mode != VOIDmode && mode != BLKmode
9021
          && modifier != EXPAND_CONST_ADDRESS)
9022
        {
9023
          temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9024
          if (MEM_P (temp))
9025
            copy_to_reg (temp);
9026
          return const0_rtx;
9027
        }
9028
 
9029
      if (TREE_CODE_CLASS (code) == tcc_unary
9030
          || code == COMPONENT_REF || code == INDIRECT_REF)
9031
        return expand_expr (treeop0, const0_rtx, VOIDmode,
9032
                            modifier);
9033
 
9034
      else if (TREE_CODE_CLASS (code) == tcc_binary
9035
               || TREE_CODE_CLASS (code) == tcc_comparison
9036
               || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9037
        {
9038
          expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9039
          expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9040
          return const0_rtx;
9041
        }
9042
      else if (code == BIT_FIELD_REF)
9043
        {
9044
          expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9045
          expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9046
          expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
9047
          return const0_rtx;
9048
        }
9049
 
9050
      target = 0;
9051
    }
9052
 
9053
  if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9054
    target = 0;
9055
 
9056
  /* Use subtarget as the target for operand 0 of a binary operation.  */
9057
  subtarget = get_subtarget (target);
9058
  original_target = target;
9059
 
9060
  switch (code)
9061
    {
9062
    case LABEL_DECL:
9063
      {
9064
        tree function = decl_function_context (exp);
9065
 
9066
        temp = label_rtx (exp);
9067
        temp = gen_rtx_LABEL_REF (Pmode, temp);
9068
 
9069
        if (function != current_function_decl
9070
            && function != 0)
9071
          LABEL_REF_NONLOCAL_P (temp) = 1;
9072
 
9073
        temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9074
        return temp;
9075
      }
9076
 
9077
    case SSA_NAME:
9078
      /* ??? ivopts calls expander, without any preparation from
9079
         out-of-ssa.  So fake instructions as if this was an access to the
9080
         base variable.  This unnecessarily allocates a pseudo, see how we can
9081
         reuse it, if partition base vars have it set already.  */
9082
      if (!currently_expanding_to_rtl)
9083
        return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
9084
                                   NULL);
9085
 
9086
      g = get_gimple_for_ssa_name (exp);
9087
      /* For EXPAND_INITIALIZER try harder to get something simpler.  */
9088
      if (g == NULL
9089
          && modifier == EXPAND_INITIALIZER
9090
          && !SSA_NAME_IS_DEFAULT_DEF (exp)
9091
          && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9092
          && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9093
        g = SSA_NAME_DEF_STMT (exp);
9094
      if (g)
9095
        return expand_expr_real (gimple_assign_rhs_to_tree (g), target, tmode,
9096
                                 modifier, NULL);
9097
 
9098
      ssa_name = exp;
9099
      decl_rtl = get_rtx_for_ssa_name (ssa_name);
9100
      exp = SSA_NAME_VAR (ssa_name);
9101
      goto expand_decl_rtl;
9102
 
9103
    case PARM_DECL:
9104
    case VAR_DECL:
9105
      /* If a static var's type was incomplete when the decl was written,
9106
         but the type is complete now, lay out the decl now.  */
9107
      if (DECL_SIZE (exp) == 0
9108
          && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9109
          && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9110
        layout_decl (exp, 0);
9111
 
9112
      /* ... fall through ...  */
9113
 
9114
    case FUNCTION_DECL:
9115
    case RESULT_DECL:
9116
      decl_rtl = DECL_RTL (exp);
9117
    expand_decl_rtl:
9118
      gcc_assert (decl_rtl);
9119
      decl_rtl = copy_rtx (decl_rtl);
9120
      /* Record writes to register variables.  */
9121
      if (modifier == EXPAND_WRITE
9122
          && REG_P (decl_rtl)
9123
          && HARD_REGISTER_P (decl_rtl))
9124
        add_to_hard_reg_set (&crtl->asm_clobbers,
9125
                             GET_MODE (decl_rtl), REGNO (decl_rtl));
9126
 
9127
      /* Ensure variable marked as used even if it doesn't go through
9128
         a parser.  If it hasn't be used yet, write out an external
9129
         definition.  */
9130
      if (! TREE_USED (exp))
9131
        {
9132
          assemble_external (exp);
9133
          TREE_USED (exp) = 1;
9134
        }
9135
 
9136
      /* Show we haven't gotten RTL for this yet.  */
9137
      temp = 0;
9138
 
9139
      /* Variables inherited from containing functions should have
9140
         been lowered by this point.  */
9141
      context = decl_function_context (exp);
9142
      gcc_assert (!context
9143
                  || context == current_function_decl
9144
                  || TREE_STATIC (exp)
9145
                  || DECL_EXTERNAL (exp)
9146
                  /* ??? C++ creates functions that are not TREE_STATIC.  */
9147
                  || TREE_CODE (exp) == FUNCTION_DECL);
9148
 
9149
      /* This is the case of an array whose size is to be determined
9150
         from its initializer, while the initializer is still being parsed.
9151
         See expand_decl.  */
9152
 
9153
      if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9154
        temp = validize_mem (decl_rtl);
9155
 
9156
      /* If DECL_RTL is memory, we are in the normal case and the
9157
         address is not valid, get the address into a register.  */
9158
 
9159
      else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9160
        {
9161
          if (alt_rtl)
9162
            *alt_rtl = decl_rtl;
9163
          decl_rtl = use_anchored_address (decl_rtl);
9164
          if (modifier != EXPAND_CONST_ADDRESS
9165
              && modifier != EXPAND_SUM
9166
              && !memory_address_addr_space_p (DECL_MODE (exp),
9167
                                               XEXP (decl_rtl, 0),
9168
                                               MEM_ADDR_SPACE (decl_rtl)))
9169
            temp = replace_equiv_address (decl_rtl,
9170
                                          copy_rtx (XEXP (decl_rtl, 0)));
9171
        }
9172
 
9173
      /* If we got something, return it.  But first, set the alignment
9174
         if the address is a register.  */
9175
      if (temp != 0)
9176
        {
9177
          if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9178
            mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9179
 
9180
          return temp;
9181
        }
9182
 
9183
      /* If the mode of DECL_RTL does not match that of the decl,
9184
         there are two cases: we are dealing with a BLKmode value
9185
         that is returned in a register, or we are dealing with
9186
         a promoted value.  In the latter case, return a SUBREG
9187
         of the wanted mode, but mark it so that we know that it
9188
         was already extended.  */
9189
      if (REG_P (decl_rtl)
9190
          && DECL_MODE (exp) != BLKmode
9191
          && GET_MODE (decl_rtl) != DECL_MODE (exp))
9192
        {
9193
          enum machine_mode pmode;
9194
 
9195
          /* Get the signedness to be used for this variable.  Ensure we get
9196
             the same mode we got when the variable was declared.  */
9197
          if (code == SSA_NAME
9198
              && (g = SSA_NAME_DEF_STMT (ssa_name))
9199
              && gimple_code (g) == GIMPLE_CALL)
9200
            {
9201
              gcc_assert (!gimple_call_internal_p (g));
9202
              pmode = promote_function_mode (type, mode, &unsignedp,
9203
                                             gimple_call_fntype (g),
9204
                                             2);
9205
            }
9206
          else
9207
            pmode = promote_decl_mode (exp, &unsignedp);
9208
          gcc_assert (GET_MODE (decl_rtl) == pmode);
9209
 
9210
          temp = gen_lowpart_SUBREG (mode, decl_rtl);
9211
          SUBREG_PROMOTED_VAR_P (temp) = 1;
9212
          SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9213
          return temp;
9214
        }
9215
 
9216
      return decl_rtl;
9217
 
9218
    case INTEGER_CST:
9219
      temp = immed_double_const (TREE_INT_CST_LOW (exp),
9220
                                 TREE_INT_CST_HIGH (exp), mode);
9221
 
9222
      return temp;
9223
 
9224
    case VECTOR_CST:
9225
      {
9226
        tree tmp = NULL_TREE;
9227
        if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9228
            || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9229
            || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9230
            || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9231
            || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9232
            || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9233
          return const_vector_from_tree (exp);
9234
        if (GET_MODE_CLASS (mode) == MODE_INT)
9235
          {
9236
            tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9237
            if (type_for_mode)
9238
              tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9239
          }
9240
        if (!tmp)
9241
          tmp = build_constructor_from_list (type,
9242
                                             TREE_VECTOR_CST_ELTS (exp));
9243
        return expand_expr (tmp, ignore ? const0_rtx : target,
9244
                            tmode, modifier);
9245
      }
9246
 
9247
    case CONST_DECL:
9248
      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9249
 
9250
    case REAL_CST:
9251
      /* If optimized, generate immediate CONST_DOUBLE
9252
         which will be turned into memory by reload if necessary.
9253
 
9254
         We used to force a register so that loop.c could see it.  But
9255
         this does not allow gen_* patterns to perform optimizations with
9256
         the constants.  It also produces two insns in cases like "x = 1.0;".
9257
         On most machines, floating-point constants are not permitted in
9258
         many insns, so we'd end up copying it to a register in any case.
9259
 
9260
         Now, we do the copying in expand_binop, if appropriate.  */
9261
      return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9262
                                           TYPE_MODE (TREE_TYPE (exp)));
9263
 
9264
    case FIXED_CST:
9265
      return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9266
                                           TYPE_MODE (TREE_TYPE (exp)));
9267
 
9268
    case COMPLEX_CST:
9269
      /* Handle evaluating a complex constant in a CONCAT target.  */
9270
      if (original_target && GET_CODE (original_target) == CONCAT)
9271
        {
9272
          enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9273
          rtx rtarg, itarg;
9274
 
9275
          rtarg = XEXP (original_target, 0);
9276
          itarg = XEXP (original_target, 1);
9277
 
9278
          /* Move the real and imaginary parts separately.  */
9279
          op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9280
          op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9281
 
9282
          if (op0 != rtarg)
9283
            emit_move_insn (rtarg, op0);
9284
          if (op1 != itarg)
9285
            emit_move_insn (itarg, op1);
9286
 
9287
          return original_target;
9288
        }
9289
 
9290
      /* ... fall through ...  */
9291
 
9292
    case STRING_CST:
9293
      temp = expand_expr_constant (exp, 1, modifier);
9294
 
9295
      /* temp contains a constant address.
9296
         On RISC machines where a constant address isn't valid,
9297
         make some insns to get that address into a register.  */
9298
      if (modifier != EXPAND_CONST_ADDRESS
9299
          && modifier != EXPAND_INITIALIZER
9300
          && modifier != EXPAND_SUM
9301
          && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9302
                                            MEM_ADDR_SPACE (temp)))
9303
        return replace_equiv_address (temp,
9304
                                      copy_rtx (XEXP (temp, 0)));
9305
      return temp;
9306
 
9307
    case SAVE_EXPR:
9308
      {
9309
        tree val = treeop0;
9310
        rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
9311
 
9312
        if (!SAVE_EXPR_RESOLVED_P (exp))
9313
          {
9314
            /* We can indeed still hit this case, typically via builtin
9315
               expanders calling save_expr immediately before expanding
9316
               something.  Assume this means that we only have to deal
9317
               with non-BLKmode values.  */
9318
            gcc_assert (GET_MODE (ret) != BLKmode);
9319
 
9320
            val = build_decl (EXPR_LOCATION (exp),
9321
                              VAR_DECL, NULL, TREE_TYPE (exp));
9322
            DECL_ARTIFICIAL (val) = 1;
9323
            DECL_IGNORED_P (val) = 1;
9324
            treeop0 = val;
9325
            TREE_OPERAND (exp, 0) = treeop0;
9326
            SAVE_EXPR_RESOLVED_P (exp) = 1;
9327
 
9328
            if (!CONSTANT_P (ret))
9329
              ret = copy_to_reg (ret);
9330
            SET_DECL_RTL (val, ret);
9331
          }
9332
 
9333
        return ret;
9334
      }
9335
 
9336
 
9337
    case CONSTRUCTOR:
9338
      /* If we don't need the result, just ensure we evaluate any
9339
         subexpressions.  */
9340
      if (ignore)
9341
        {
9342
          unsigned HOST_WIDE_INT idx;
9343
          tree value;
9344
 
9345
          FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9346
            expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9347
 
9348
          return const0_rtx;
9349
        }
9350
 
9351
      return expand_constructor (exp, target, modifier, false);
9352
 
9353
    case TARGET_MEM_REF:
9354
      {
9355
        addr_space_t as
9356
          = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9357
        struct mem_address addr;
9358
        enum insn_code icode;
9359
        unsigned int align;
9360
 
9361
        get_address_description (exp, &addr);
9362
        op0 = addr_for_mem_ref (&addr, as, true);
9363
        op0 = memory_address_addr_space (mode, op0, as);
9364
        temp = gen_rtx_MEM (mode, op0);
9365
        set_mem_attributes (temp, exp, 0);
9366
        set_mem_addr_space (temp, as);
9367
        align = get_object_or_type_alignment (exp);
9368
        if (mode != BLKmode
9369
            && align < GET_MODE_ALIGNMENT (mode)
9370
            /* If the target does not have special handling for unaligned
9371
               loads of mode then it can use regular moves for them.  */
9372
            && ((icode = optab_handler (movmisalign_optab, mode))
9373
                != CODE_FOR_nothing))
9374
          {
9375
            struct expand_operand ops[2];
9376
 
9377
            /* We've already validated the memory, and we're creating a
9378
               new pseudo destination.  The predicates really can't fail,
9379
               nor can the generator.  */
9380
            create_output_operand (&ops[0], NULL_RTX, mode);
9381
            create_fixed_operand (&ops[1], temp);
9382
            expand_insn (icode, 2, ops);
9383
            return ops[0].value;
9384
          }
9385
        return temp;
9386
      }
9387
 
9388
    case MEM_REF:
9389
      {
9390
        addr_space_t as
9391
          = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9392
        enum machine_mode address_mode;
9393
        tree base = TREE_OPERAND (exp, 0);
9394
        gimple def_stmt;
9395
        enum insn_code icode;
9396
        unsigned align;
9397
        /* Handle expansion of non-aliased memory with non-BLKmode.  That
9398
           might end up in a register.  */
9399
        if (mem_ref_refers_to_non_mem_p (exp))
9400
          {
9401
            HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9402
            tree bit_offset;
9403
            tree bftype;
9404
            base = TREE_OPERAND (base, 0);
9405
            if (offset == 0
9406
                && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
9407
                && (GET_MODE_BITSIZE (DECL_MODE (base))
9408
                    == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
9409
              return expand_expr (build1 (VIEW_CONVERT_EXPR,
9410
                                          TREE_TYPE (exp), base),
9411
                                  target, tmode, modifier);
9412
            bit_offset = bitsize_int (offset * BITS_PER_UNIT);
9413
            bftype = TREE_TYPE (base);
9414
            if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
9415
              bftype = TREE_TYPE (exp);
9416
            else
9417
              {
9418
                temp = assign_stack_temp (DECL_MODE (base),
9419
                                          GET_MODE_SIZE (DECL_MODE (base)),
9420
                                          0);
9421
                store_expr (base, temp, 0, false);
9422
                temp = adjust_address (temp, BLKmode, offset);
9423
                set_mem_size (temp, int_size_in_bytes (TREE_TYPE (exp)));
9424
                return temp;
9425
              }
9426
            return expand_expr (build3 (BIT_FIELD_REF, bftype,
9427
                                        base,
9428
                                        TYPE_SIZE (TREE_TYPE (exp)),
9429
                                        bit_offset),
9430
                                target, tmode, modifier);
9431
          }
9432
        address_mode = targetm.addr_space.address_mode (as);
9433
        base = TREE_OPERAND (exp, 0);
9434
        if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9435
          {
9436
            tree mask = gimple_assign_rhs2 (def_stmt);
9437
            base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9438
                           gimple_assign_rhs1 (def_stmt), mask);
9439
            TREE_OPERAND (exp, 0) = base;
9440
          }
9441
        align = get_object_or_type_alignment (exp);
9442
        op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9443
        op0 = memory_address_addr_space (address_mode, op0, as);
9444
        if (!integer_zerop (TREE_OPERAND (exp, 1)))
9445
          {
9446
            rtx off
9447
              = immed_double_int_const (mem_ref_offset (exp), address_mode);
9448
            op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9449
          }
9450
        op0 = memory_address_addr_space (mode, op0, as);
9451
        temp = gen_rtx_MEM (mode, op0);
9452
        set_mem_attributes (temp, exp, 0);
9453
        set_mem_addr_space (temp, as);
9454
        if (TREE_THIS_VOLATILE (exp))
9455
          MEM_VOLATILE_P (temp) = 1;
9456
        if (mode != BLKmode
9457
            && align < GET_MODE_ALIGNMENT (mode)
9458
            /* If the target does not have special handling for unaligned
9459
               loads of mode then it can use regular moves for them.  */
9460
            && ((icode = optab_handler (movmisalign_optab, mode))
9461
                != CODE_FOR_nothing))
9462
          {
9463
            struct expand_operand ops[2];
9464
 
9465
            /* We've already validated the memory, and we're creating a
9466
               new pseudo destination.  The predicates really can't fail,
9467
               nor can the generator.  */
9468
            create_output_operand (&ops[0], NULL_RTX, mode);
9469
            create_fixed_operand (&ops[1], temp);
9470
            expand_insn (icode, 2, ops);
9471
            return ops[0].value;
9472
          }
9473
        return temp;
9474
      }
9475
 
9476
    case ARRAY_REF:
9477
 
9478
      {
9479
        tree array = treeop0;
9480
        tree index = treeop1;
9481
 
9482
        /* Fold an expression like: "foo"[2].
9483
           This is not done in fold so it won't happen inside &.
9484
           Don't fold if this is for wide characters since it's too
9485
           difficult to do correctly and this is a very rare case.  */
9486
 
9487
        if (modifier != EXPAND_CONST_ADDRESS
9488
            && modifier != EXPAND_INITIALIZER
9489
            && modifier != EXPAND_MEMORY)
9490
          {
9491
            tree t = fold_read_from_constant_string (exp);
9492
 
9493
            if (t)
9494
              return expand_expr (t, target, tmode, modifier);
9495
          }
9496
 
9497
        /* If this is a constant index into a constant array,
9498
           just get the value from the array.  Handle both the cases when
9499
           we have an explicit constructor and when our operand is a variable
9500
           that was declared const.  */
9501
 
9502
        if (modifier != EXPAND_CONST_ADDRESS
9503
            && modifier != EXPAND_INITIALIZER
9504
            && modifier != EXPAND_MEMORY
9505
            && TREE_CODE (array) == CONSTRUCTOR
9506
            && ! TREE_SIDE_EFFECTS (array)
9507
            && TREE_CODE (index) == INTEGER_CST)
9508
          {
9509
            unsigned HOST_WIDE_INT ix;
9510
            tree field, value;
9511
 
9512
            FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9513
                                      field, value)
9514
              if (tree_int_cst_equal (field, index))
9515
                {
9516
                  if (!TREE_SIDE_EFFECTS (value))
9517
                    return expand_expr (fold (value), target, tmode, modifier);
9518
                  break;
9519
                }
9520
          }
9521
 
9522
        else if (optimize >= 1
9523
                 && modifier != EXPAND_CONST_ADDRESS
9524
                 && modifier != EXPAND_INITIALIZER
9525
                 && modifier != EXPAND_MEMORY
9526
                 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9527
                 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
9528
                 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
9529
                 && const_value_known_p (array))
9530
          {
9531
            if (TREE_CODE (index) == INTEGER_CST)
9532
              {
9533
                tree init = DECL_INITIAL (array);
9534
 
9535
                if (TREE_CODE (init) == CONSTRUCTOR)
9536
                  {
9537
                    unsigned HOST_WIDE_INT ix;
9538
                    tree field, value;
9539
 
9540
                    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9541
                                              field, value)
9542
                      if (tree_int_cst_equal (field, index))
9543
                        {
9544
                          if (TREE_SIDE_EFFECTS (value))
9545
                            break;
9546
 
9547
                          if (TREE_CODE (value) == CONSTRUCTOR)
9548
                            {
9549
                              /* If VALUE is a CONSTRUCTOR, this
9550
                                 optimization is only useful if
9551
                                 this doesn't store the CONSTRUCTOR
9552
                                 into memory.  If it does, it is more
9553
                                 efficient to just load the data from
9554
                                 the array directly.  */
9555
                              rtx ret = expand_constructor (value, target,
9556
                                                            modifier, true);
9557
                              if (ret == NULL_RTX)
9558
                                break;
9559
                            }
9560
 
9561
                          return expand_expr (fold (value), target, tmode,
9562
                                              modifier);
9563
                        }
9564
                  }
9565
                else if(TREE_CODE (init) == STRING_CST)
9566
                  {
9567
                    tree index1 = index;
9568
                    tree low_bound = array_ref_low_bound (exp);
9569
                    index1 = fold_convert_loc (loc, sizetype,
9570
                                               treeop1);
9571
 
9572
                    /* Optimize the special-case of a zero lower bound.
9573
 
9574
                       We convert the low_bound to sizetype to avoid some problems
9575
                       with constant folding.  (E.g. suppose the lower bound is 1,
9576
                       and its mode is QI.  Without the conversion,l (ARRAY
9577
                       +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9578
                       +INDEX), which becomes (ARRAY+255+INDEX).  Opps!)  */
9579
 
9580
                    if (! integer_zerop (low_bound))
9581
                      index1 = size_diffop_loc (loc, index1,
9582
                                            fold_convert_loc (loc, sizetype,
9583
                                                              low_bound));
9584
 
9585
                    if (0 > compare_tree_int (index1,
9586
                                              TREE_STRING_LENGTH (init)))
9587
                      {
9588
                        tree type = TREE_TYPE (TREE_TYPE (init));
9589
                        enum machine_mode mode = TYPE_MODE (type);
9590
 
9591
                        if (GET_MODE_CLASS (mode) == MODE_INT
9592
                            && GET_MODE_SIZE (mode) == 1)
9593
                          return gen_int_mode (TREE_STRING_POINTER (init)
9594
                                               [TREE_INT_CST_LOW (index1)],
9595
                                               mode);
9596
                      }
9597
                  }
9598
              }
9599
          }
9600
      }
9601
      goto normal_inner_ref;
9602
 
9603
    case COMPONENT_REF:
9604
      /* If the operand is a CONSTRUCTOR, we can just extract the
9605
         appropriate field if it is present.  */
9606
      if (TREE_CODE (treeop0) == CONSTRUCTOR)
9607
        {
9608
          unsigned HOST_WIDE_INT idx;
9609
          tree field, value;
9610
 
9611
          FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9612
                                    idx, field, value)
9613
            if (field == treeop1
9614
                /* We can normally use the value of the field in the
9615
                   CONSTRUCTOR.  However, if this is a bitfield in
9616
                   an integral mode that we can fit in a HOST_WIDE_INT,
9617
                   we must mask only the number of bits in the bitfield,
9618
                   since this is done implicitly by the constructor.  If
9619
                   the bitfield does not meet either of those conditions,
9620
                   we can't do this optimization.  */
9621
                && (! DECL_BIT_FIELD (field)
9622
                    || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9623
                        && (GET_MODE_PRECISION (DECL_MODE (field))
9624
                            <= HOST_BITS_PER_WIDE_INT))))
9625
              {
9626
                if (DECL_BIT_FIELD (field)
9627
                    && modifier == EXPAND_STACK_PARM)
9628
                  target = 0;
9629
                op0 = expand_expr (value, target, tmode, modifier);
9630
                if (DECL_BIT_FIELD (field))
9631
                  {
9632
                    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9633
                    enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9634
 
9635
                    if (TYPE_UNSIGNED (TREE_TYPE (field)))
9636
                      {
9637
                        op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
9638
                        op0 = expand_and (imode, op0, op1, target);
9639
                      }
9640
                    else
9641
                      {
9642
                        int count = GET_MODE_PRECISION (imode) - bitsize;
9643
 
9644
                        op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9645
                                            target, 0);
9646
                        op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9647
                                            target, 0);
9648
                      }
9649
                  }
9650
 
9651
                return op0;
9652
              }
9653
        }
9654
      goto normal_inner_ref;
9655
 
9656
    case BIT_FIELD_REF:
9657
    case ARRAY_RANGE_REF:
9658
    normal_inner_ref:
9659
      {
9660
        enum machine_mode mode1, mode2;
9661
        HOST_WIDE_INT bitsize, bitpos;
9662
        tree offset;
9663
        int volatilep = 0, must_force_mem;
9664
        bool packedp = false;
9665
        tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9666
                                        &mode1, &unsignedp, &volatilep, true);
9667
        rtx orig_op0, memloc;
9668
 
9669
        /* If we got back the original object, something is wrong.  Perhaps
9670
           we are evaluating an expression too early.  In any event, don't
9671
           infinitely recurse.  */
9672
        gcc_assert (tem != exp);
9673
 
9674
        if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
9675
            || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
9676
                && DECL_PACKED (TREE_OPERAND (exp, 1))))
9677
          packedp = true;
9678
 
9679
        /* If TEM's type is a union of variable size, pass TARGET to the inner
9680
           computation, since it will need a temporary and TARGET is known
9681
           to have to do.  This occurs in unchecked conversion in Ada.  */
9682
        orig_op0 = op0
9683
          = expand_expr (tem,
9684
                         (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9685
                          && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9686
                              != INTEGER_CST)
9687
                          && modifier != EXPAND_STACK_PARM
9688
                          ? target : NULL_RTX),
9689
                         VOIDmode,
9690
                         (modifier == EXPAND_INITIALIZER
9691
                          || modifier == EXPAND_CONST_ADDRESS
9692
                          || modifier == EXPAND_STACK_PARM)
9693
                         ? modifier : EXPAND_NORMAL);
9694
 
9695
 
9696
        /* If the bitfield is volatile, we want to access it in the
9697
           field's mode, not the computed mode.
9698
           If a MEM has VOIDmode (external with incomplete type),
9699
           use BLKmode for it instead.  */
9700
        if (MEM_P (op0))
9701
          {
9702
            if (volatilep && flag_strict_volatile_bitfields > 0)
9703
              op0 = adjust_address (op0, mode1, 0);
9704
            else if (GET_MODE (op0) == VOIDmode)
9705
              op0 = adjust_address (op0, BLKmode, 0);
9706
          }
9707
 
9708
        mode2
9709
          = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9710
 
9711
        /* If we have either an offset, a BLKmode result, or a reference
9712
           outside the underlying object, we must force it to memory.
9713
           Such a case can occur in Ada if we have unchecked conversion
9714
           of an expression from a scalar type to an aggregate type or
9715
           for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9716
           passed a partially uninitialized object or a view-conversion
9717
           to a larger size.  */
9718
        must_force_mem = (offset
9719
                          || mode1 == BLKmode
9720
                          || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9721
 
9722
        /* Handle CONCAT first.  */
9723
        if (GET_CODE (op0) == CONCAT && !must_force_mem)
9724
          {
9725
            if (bitpos == 0
9726
                && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9727
              return op0;
9728
            if (bitpos == 0
9729
                && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9730
                && bitsize)
9731
              {
9732
                op0 = XEXP (op0, 0);
9733
                mode2 = GET_MODE (op0);
9734
              }
9735
            else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9736
                     && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9737
                     && bitpos
9738
                     && bitsize)
9739
              {
9740
                op0 = XEXP (op0, 1);
9741
                bitpos = 0;
9742
                mode2 = GET_MODE (op0);
9743
              }
9744
            else
9745
              /* Otherwise force into memory.  */
9746
              must_force_mem = 1;
9747
          }
9748
 
9749
        /* If this is a constant, put it in a register if it is a legitimate
9750
           constant and we don't need a memory reference.  */
9751
        if (CONSTANT_P (op0)
9752
            && mode2 != BLKmode
9753
            && targetm.legitimate_constant_p (mode2, op0)
9754
            && !must_force_mem)
9755
          op0 = force_reg (mode2, op0);
9756
 
9757
        /* Otherwise, if this is a constant, try to force it to the constant
9758
           pool.  Note that back-ends, e.g. MIPS, may refuse to do so if it
9759
           is a legitimate constant.  */
9760
        else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9761
          op0 = validize_mem (memloc);
9762
 
9763
        /* Otherwise, if this is a constant or the object is not in memory
9764
           and need be, put it there.  */
9765
        else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9766
          {
9767
            tree nt = build_qualified_type (TREE_TYPE (tem),
9768
                                            (TYPE_QUALS (TREE_TYPE (tem))
9769
                                             | TYPE_QUAL_CONST));
9770
            memloc = assign_temp (nt, 1, 1, 1);
9771
            emit_move_insn (memloc, op0);
9772
            op0 = memloc;
9773
          }
9774
 
9775
        if (offset)
9776
          {
9777
            enum machine_mode address_mode;
9778
            rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9779
                                          EXPAND_SUM);
9780
 
9781
            gcc_assert (MEM_P (op0));
9782
 
9783
            address_mode
9784
              = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9785
            if (GET_MODE (offset_rtx) != address_mode)
9786
              offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9787
 
9788
            if (GET_MODE (op0) == BLKmode
9789
                /* A constant address in OP0 can have VOIDmode, we must
9790
                   not try to call force_reg in that case.  */
9791
                && GET_MODE (XEXP (op0, 0)) != VOIDmode
9792
                && bitsize != 0
9793
                && (bitpos % bitsize) == 0
9794
                && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9795
                && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9796
              {
9797
                op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9798
                bitpos = 0;
9799
              }
9800
 
9801
            op0 = offset_address (op0, offset_rtx,
9802
                                  highest_pow2_factor (offset));
9803
          }
9804
 
9805
        /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9806
           record its alignment as BIGGEST_ALIGNMENT.  */
9807
        if (MEM_P (op0) && bitpos == 0 && offset != 0
9808
            && is_aligning_offset (offset, tem))
9809
          set_mem_align (op0, BIGGEST_ALIGNMENT);
9810
 
9811
        /* Don't forget about volatility even if this is a bitfield.  */
9812
        if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9813
          {
9814
            if (op0 == orig_op0)
9815
              op0 = copy_rtx (op0);
9816
 
9817
            MEM_VOLATILE_P (op0) = 1;
9818
          }
9819
 
9820
        /* In cases where an aligned union has an unaligned object
9821
           as a field, we might be extracting a BLKmode value from
9822
           an integer-mode (e.g., SImode) object.  Handle this case
9823
           by doing the extract into an object as wide as the field
9824
           (which we know to be the width of a basic mode), then
9825
           storing into memory, and changing the mode to BLKmode.  */
9826
        if (mode1 == VOIDmode
9827
            || REG_P (op0) || GET_CODE (op0) == SUBREG
9828
            || (mode1 != BLKmode && ! direct_load[(int) mode1]
9829
                && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9830
                && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9831
                && modifier != EXPAND_CONST_ADDRESS
9832
                && modifier != EXPAND_INITIALIZER)
9833
            /* If the field is volatile, we always want an aligned
9834
               access.  Do this in following two situations:
9835
               1. the access is not already naturally
9836
               aligned, otherwise "normal" (non-bitfield) volatile fields
9837
               become non-addressable.
9838
               2. the bitsize is narrower than the access size. Need
9839
               to extract bitfields from the access.  */
9840
            || (volatilep && flag_strict_volatile_bitfields > 0
9841
                && (bitpos % GET_MODE_ALIGNMENT (mode) != 0
9842
                    || (mode1 != BLKmode
9843
                        && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)))
9844
            /* If the field isn't aligned enough to fetch as a memref,
9845
               fetch it as a bit field.  */
9846
            || (mode1 != BLKmode
9847
                && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9848
                      || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9849
                      || (MEM_P (op0)
9850
                          && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9851
                              || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9852
                     && ((modifier == EXPAND_CONST_ADDRESS
9853
                          || modifier == EXPAND_INITIALIZER)
9854
                         ? STRICT_ALIGNMENT
9855
                         : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9856
                    || (bitpos % BITS_PER_UNIT != 0)))
9857
            /* If the type and the field are a constant size and the
9858
               size of the type isn't the same size as the bitfield,
9859
               we must use bitfield operations.  */
9860
            || (bitsize >= 0
9861
                && TYPE_SIZE (TREE_TYPE (exp))
9862
                && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9863
                && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9864
                                          bitsize)))
9865
          {
9866
            enum machine_mode ext_mode = mode;
9867
 
9868
            if (ext_mode == BLKmode
9869
                && ! (target != 0 && MEM_P (op0)
9870
                      && MEM_P (target)
9871
                      && bitpos % BITS_PER_UNIT == 0))
9872
              ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9873
 
9874
            if (ext_mode == BLKmode)
9875
              {
9876
                if (target == 0)
9877
                  target = assign_temp (type, 0, 1, 1);
9878
 
9879
                if (bitsize == 0)
9880
                  return target;
9881
 
9882
                /* In this case, BITPOS must start at a byte boundary and
9883
                   TARGET, if specified, must be a MEM.  */
9884
                gcc_assert (MEM_P (op0)
9885
                            && (!target || MEM_P (target))
9886
                            && !(bitpos % BITS_PER_UNIT));
9887
 
9888
                emit_block_move (target,
9889
                                 adjust_address (op0, VOIDmode,
9890
                                                 bitpos / BITS_PER_UNIT),
9891
                                 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9892
                                          / BITS_PER_UNIT),
9893
                                 (modifier == EXPAND_STACK_PARM
9894
                                  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9895
 
9896
                return target;
9897
              }
9898
 
9899
            op0 = validize_mem (op0);
9900
 
9901
            if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9902
              mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9903
 
9904
            op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
9905
                                     (modifier == EXPAND_STACK_PARM
9906
                                      ? NULL_RTX : target),
9907
                                     ext_mode, ext_mode);
9908
 
9909
            /* If the result is a record type and BITSIZE is narrower than
9910
               the mode of OP0, an integral mode, and this is a big endian
9911
               machine, we must put the field into the high-order bits.  */
9912
            if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9913
                && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9914
                && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9915
              op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9916
                                  GET_MODE_BITSIZE (GET_MODE (op0))
9917
                                  - bitsize, op0, 1);
9918
 
9919
            /* If the result type is BLKmode, store the data into a temporary
9920
               of the appropriate type, but with the mode corresponding to the
9921
               mode for the data we have (op0's mode).  It's tempting to make
9922
               this a constant type, since we know it's only being stored once,
9923
               but that can cause problems if we are taking the address of this
9924
               COMPONENT_REF because the MEM of any reference via that address
9925
               will have flags corresponding to the type, which will not
9926
               necessarily be constant.  */
9927
            if (mode == BLKmode)
9928
              {
9929
                HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9930
                rtx new_rtx;
9931
 
9932
                /* If the reference doesn't use the alias set of its type,
9933
                   we cannot create the temporary using that type.  */
9934
                if (component_uses_parent_alias_set (exp))
9935
                  {
9936
                    new_rtx = assign_stack_local (ext_mode, size, 0);
9937
                    set_mem_alias_set (new_rtx, get_alias_set (exp));
9938
                  }
9939
                else
9940
                  new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9941
 
9942
                emit_move_insn (new_rtx, op0);
9943
                op0 = copy_rtx (new_rtx);
9944
                PUT_MODE (op0, BLKmode);
9945
                set_mem_attributes (op0, exp, 1);
9946
              }
9947
 
9948
            return op0;
9949
          }
9950
 
9951
        /* If the result is BLKmode, use that to access the object
9952
           now as well.  */
9953
        if (mode == BLKmode)
9954
          mode1 = BLKmode;
9955
 
9956
        /* Get a reference to just this component.  */
9957
        if (modifier == EXPAND_CONST_ADDRESS
9958
            || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9959
          op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9960
        else
9961
          op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9962
 
9963
        if (op0 == orig_op0)
9964
          op0 = copy_rtx (op0);
9965
 
9966
        set_mem_attributes (op0, exp, 0);
9967
        if (REG_P (XEXP (op0, 0)))
9968
          mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9969
 
9970
        MEM_VOLATILE_P (op0) |= volatilep;
9971
        if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9972
            || modifier == EXPAND_CONST_ADDRESS
9973
            || modifier == EXPAND_INITIALIZER)
9974
          return op0;
9975
        else if (target == 0)
9976
          target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9977
 
9978
        convert_move (target, op0, unsignedp);
9979
        return target;
9980
      }
9981
 
9982
    case OBJ_TYPE_REF:
9983
      return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9984
 
9985
    case CALL_EXPR:
9986
      /* All valid uses of __builtin_va_arg_pack () are removed during
9987
         inlining.  */
9988
      if (CALL_EXPR_VA_ARG_PACK (exp))
9989
        error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9990
      {
9991
        tree fndecl = get_callee_fndecl (exp), attr;
9992
 
9993
        if (fndecl
9994
            && (attr = lookup_attribute ("error",
9995
                                         DECL_ATTRIBUTES (fndecl))) != NULL)
9996
          error ("%Kcall to %qs declared with attribute error: %s",
9997
                 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9998
                 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9999
        if (fndecl
10000
            && (attr = lookup_attribute ("warning",
10001
                                         DECL_ATTRIBUTES (fndecl))) != NULL)
10002
          warning_at (tree_nonartificial_location (exp),
10003
                      0, "%Kcall to %qs declared with attribute warning: %s",
10004
                      exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10005
                      TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10006
 
10007
        /* Check for a built-in function.  */
10008
        if (fndecl && DECL_BUILT_IN (fndecl))
10009
          {
10010
            gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10011
            return expand_builtin (exp, target, subtarget, tmode, ignore);
10012
          }
10013
      }
10014
      return expand_call (exp, target, ignore);
10015
 
10016
    case VIEW_CONVERT_EXPR:
10017
      op0 = NULL_RTX;
10018
 
10019
      /* If we are converting to BLKmode, try to avoid an intermediate
10020
         temporary by fetching an inner memory reference.  */
10021
      if (mode == BLKmode
10022
          && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10023
          && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10024
          && handled_component_p (treeop0))
10025
      {
10026
        enum machine_mode mode1;
10027
        HOST_WIDE_INT bitsize, bitpos;
10028
        tree offset;
10029
        int unsignedp;
10030
        int volatilep = 0;
10031
        tree tem
10032
          = get_inner_reference (treeop0, &bitsize, &bitpos,
10033
                                 &offset, &mode1, &unsignedp, &volatilep,
10034
                                 true);
10035
        rtx orig_op0;
10036
 
10037
        /* ??? We should work harder and deal with non-zero offsets.  */
10038
        if (!offset
10039
            && (bitpos % BITS_PER_UNIT) == 0
10040
            && bitsize >= 0
10041
            && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
10042
          {
10043
            /* See the normal_inner_ref case for the rationale.  */
10044
            orig_op0
10045
              = expand_expr (tem,
10046
                             (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10047
                              && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10048
                                  != INTEGER_CST)
10049
                              && modifier != EXPAND_STACK_PARM
10050
                              ? target : NULL_RTX),
10051
                             VOIDmode,
10052
                             (modifier == EXPAND_INITIALIZER
10053
                              || modifier == EXPAND_CONST_ADDRESS
10054
                              || modifier == EXPAND_STACK_PARM)
10055
                             ? modifier : EXPAND_NORMAL);
10056
 
10057
            if (MEM_P (orig_op0))
10058
              {
10059
                op0 = orig_op0;
10060
 
10061
                /* Get a reference to just this component.  */
10062
                if (modifier == EXPAND_CONST_ADDRESS
10063
                    || modifier == EXPAND_SUM
10064
                    || modifier == EXPAND_INITIALIZER)
10065
                  op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10066
                else
10067
                  op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10068
 
10069
                if (op0 == orig_op0)
10070
                  op0 = copy_rtx (op0);
10071
 
10072
                set_mem_attributes (op0, treeop0, 0);
10073
                if (REG_P (XEXP (op0, 0)))
10074
                  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10075
 
10076
                MEM_VOLATILE_P (op0) |= volatilep;
10077
              }
10078
          }
10079
      }
10080
 
10081
      if (!op0)
10082
        op0 = expand_expr (treeop0,
10083
                           NULL_RTX, VOIDmode, modifier);
10084
 
10085
      /* If the input and output modes are both the same, we are done.  */
10086
      if (mode == GET_MODE (op0))
10087
        ;
10088
      /* If neither mode is BLKmode, and both modes are the same size
10089
         then we can use gen_lowpart.  */
10090
      else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10091
               && (GET_MODE_PRECISION (mode)
10092
                   == GET_MODE_PRECISION (GET_MODE (op0)))
10093
               && !COMPLEX_MODE_P (GET_MODE (op0)))
10094
        {
10095
          if (GET_CODE (op0) == SUBREG)
10096
            op0 = force_reg (GET_MODE (op0), op0);
10097
          temp = gen_lowpart_common (mode, op0);
10098
          if (temp)
10099
            op0 = temp;
10100
          else
10101
            {
10102
              if (!REG_P (op0) && !MEM_P (op0))
10103
                op0 = force_reg (GET_MODE (op0), op0);
10104
              op0 = gen_lowpart (mode, op0);
10105
            }
10106
        }
10107
      /* If both types are integral, convert from one mode to the other.  */
10108
      else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10109
        op0 = convert_modes (mode, GET_MODE (op0), op0,
10110
                             TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10111
      /* As a last resort, spill op0 to memory, and reload it in a
10112
         different mode.  */
10113
      else if (!MEM_P (op0))
10114
        {
10115
          /* If the operand is not a MEM, force it into memory.  Since we
10116
             are going to be changing the mode of the MEM, don't call
10117
             force_const_mem for constants because we don't allow pool
10118
             constants to change mode.  */
10119
          tree inner_type = TREE_TYPE (treeop0);
10120
 
10121
          gcc_assert (!TREE_ADDRESSABLE (exp));
10122
 
10123
          if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10124
            target
10125
              = assign_stack_temp_for_type
10126
                (TYPE_MODE (inner_type),
10127
                 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
10128
 
10129
          emit_move_insn (target, op0);
10130
          op0 = target;
10131
        }
10132
 
10133
      /* At this point, OP0 is in the correct mode.  If the output type is
10134
         such that the operand is known to be aligned, indicate that it is.
10135
         Otherwise, we need only be concerned about alignment for non-BLKmode
10136
         results.  */
10137
      if (MEM_P (op0))
10138
        {
10139
          enum insn_code icode;
10140
 
10141
          op0 = copy_rtx (op0);
10142
 
10143
          if (TYPE_ALIGN_OK (type))
10144
            set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10145
          else if (mode != BLKmode
10146
                   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)
10147
                   /* If the target does have special handling for unaligned
10148
                      loads of mode then use them.  */
10149
                   && ((icode = optab_handler (movmisalign_optab, mode))
10150
                       != CODE_FOR_nothing))
10151
            {
10152
              rtx reg, insn;
10153
 
10154
              op0 = adjust_address (op0, mode, 0);
10155
              /* We've already validated the memory, and we're creating a
10156
                 new pseudo destination.  The predicates really can't
10157
                 fail.  */
10158
              reg = gen_reg_rtx (mode);
10159
 
10160
              /* Nor can the insn generator.  */
10161
              insn = GEN_FCN (icode) (reg, op0);
10162
              emit_insn (insn);
10163
              return reg;
10164
            }
10165
          else if (STRICT_ALIGNMENT
10166
                   && mode != BLKmode
10167
                   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10168
            {
10169
              tree inner_type = TREE_TYPE (treeop0);
10170
              HOST_WIDE_INT temp_size
10171
                = MAX (int_size_in_bytes (inner_type),
10172
                       (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10173
              rtx new_rtx
10174
                = assign_stack_temp_for_type (mode, temp_size, 0, type);
10175
              rtx new_with_op0_mode
10176
                = adjust_address (new_rtx, GET_MODE (op0), 0);
10177
 
10178
              gcc_assert (!TREE_ADDRESSABLE (exp));
10179
 
10180
              if (GET_MODE (op0) == BLKmode)
10181
                emit_block_move (new_with_op0_mode, op0,
10182
                                 GEN_INT (GET_MODE_SIZE (mode)),
10183
                                 (modifier == EXPAND_STACK_PARM
10184
                                  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10185
              else
10186
                emit_move_insn (new_with_op0_mode, op0);
10187
 
10188
              op0 = new_rtx;
10189
            }
10190
 
10191
          op0 = adjust_address (op0, mode, 0);
10192
        }
10193
 
10194
      return op0;
10195
 
10196
    case MODIFY_EXPR:
10197
      {
10198
        tree lhs = treeop0;
10199
        tree rhs = treeop1;
10200
        gcc_assert (ignore);
10201
 
10202
        /* Check for |= or &= of a bitfield of size one into another bitfield
10203
           of size 1.  In this case, (unless we need the result of the
10204
           assignment) we can do this more efficiently with a
10205
           test followed by an assignment, if necessary.
10206
 
10207
           ??? At this point, we can't get a BIT_FIELD_REF here.  But if
10208
           things change so we do, this code should be enhanced to
10209
           support it.  */
10210
        if (TREE_CODE (lhs) == COMPONENT_REF
10211
            && (TREE_CODE (rhs) == BIT_IOR_EXPR
10212
                || TREE_CODE (rhs) == BIT_AND_EXPR)
10213
            && TREE_OPERAND (rhs, 0) == lhs
10214
            && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10215
            && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10216
            && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10217
          {
10218
            rtx label = gen_label_rtx ();
10219
            int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10220
            do_jump (TREE_OPERAND (rhs, 1),
10221
                     value ? label : 0,
10222
                     value ? 0 : label, -1);
10223
            expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10224
                               MOVE_NONTEMPORAL (exp));
10225
            do_pending_stack_adjust ();
10226
            emit_label (label);
10227
            return const0_rtx;
10228
          }
10229
 
10230
        expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
10231
        return const0_rtx;
10232
      }
10233
 
10234
    case ADDR_EXPR:
10235
      return expand_expr_addr_expr (exp, target, tmode, modifier);
10236
 
10237
    case REALPART_EXPR:
10238
      op0 = expand_normal (treeop0);
10239
      return read_complex_part (op0, false);
10240
 
10241
    case IMAGPART_EXPR:
10242
      op0 = expand_normal (treeop0);
10243
      return read_complex_part (op0, true);
10244
 
10245
    case RETURN_EXPR:
10246
    case LABEL_EXPR:
10247
    case GOTO_EXPR:
10248
    case SWITCH_EXPR:
10249
    case ASM_EXPR:
10250
      /* Expanded in cfgexpand.c.  */
10251
      gcc_unreachable ();
10252
 
10253
    case TRY_CATCH_EXPR:
10254
    case CATCH_EXPR:
10255
    case EH_FILTER_EXPR:
10256
    case TRY_FINALLY_EXPR:
10257
      /* Lowered by tree-eh.c.  */
10258
      gcc_unreachable ();
10259
 
10260
    case WITH_CLEANUP_EXPR:
10261
    case CLEANUP_POINT_EXPR:
10262
    case TARGET_EXPR:
10263
    case CASE_LABEL_EXPR:
10264
    case VA_ARG_EXPR:
10265
    case BIND_EXPR:
10266
    case INIT_EXPR:
10267
    case CONJ_EXPR:
10268
    case COMPOUND_EXPR:
10269
    case PREINCREMENT_EXPR:
10270
    case PREDECREMENT_EXPR:
10271
    case POSTINCREMENT_EXPR:
10272
    case POSTDECREMENT_EXPR:
10273
    case LOOP_EXPR:
10274
    case EXIT_EXPR:
10275
      /* Lowered by gimplify.c.  */
10276
      gcc_unreachable ();
10277
 
10278
    case FDESC_EXPR:
10279
      /* Function descriptors are not valid except for as
10280
         initialization constants, and should not be expanded.  */
10281
      gcc_unreachable ();
10282
 
10283
    case WITH_SIZE_EXPR:
10284
      /* WITH_SIZE_EXPR expands to its first argument.  The caller should
10285
         have pulled out the size to use in whatever context it needed.  */
10286
      return expand_expr_real (treeop0, original_target, tmode,
10287
                               modifier, alt_rtl);
10288
 
10289
    case COMPOUND_LITERAL_EXPR:
10290
      {
10291
        /* Initialize the anonymous variable declared in the compound
10292
           literal, then return the variable.  */
10293
        tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
10294
 
10295
        /* Create RTL for this variable.  */
10296
        if (!DECL_RTL_SET_P (decl))
10297
          {
10298
            if (DECL_HARD_REGISTER (decl))
10299
              /* The user specified an assembler name for this variable.
10300
                 Set that up now.  */
10301
              rest_of_decl_compilation (decl, 0, 0);
10302
            else
10303
              expand_decl (decl);
10304
          }
10305
 
10306
        return expand_expr_real (decl, original_target, tmode,
10307
                                 modifier, alt_rtl);
10308
      }
10309
 
10310
    default:
10311
      return expand_expr_real_2 (&ops, target, tmode, modifier);
10312
    }
10313
}
10314
 
10315
/* Subroutine of above: reduce EXP to the precision of TYPE (in the
10316
   signedness of TYPE), possibly returning the result in TARGET.  */
10317
static rtx
10318
reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10319
{
10320
  HOST_WIDE_INT prec = TYPE_PRECISION (type);
10321
  if (target && GET_MODE (target) != GET_MODE (exp))
10322
    target = 0;
10323
  /* For constant values, reduce using build_int_cst_type. */
10324
  if (CONST_INT_P (exp))
10325
    {
10326
      HOST_WIDE_INT value = INTVAL (exp);
10327
      tree t = build_int_cst_type (type, value);
10328
      return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10329
    }
10330
  else if (TYPE_UNSIGNED (type))
10331
    {
10332
      rtx mask = immed_double_int_const (double_int_mask (prec),
10333
                                         GET_MODE (exp));
10334
      return expand_and (GET_MODE (exp), exp, mask, target);
10335
    }
10336
  else
10337
    {
10338
      int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10339
      exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10340
                          exp, count, target, 0);
10341
      return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10342
                           exp, count, target, 0);
10343
    }
10344
}
10345
 
10346
/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10347
   when applied to the address of EXP produces an address known to be
10348
   aligned more than BIGGEST_ALIGNMENT.  */
10349
 
10350
static int
10351
is_aligning_offset (const_tree offset, const_tree exp)
10352
{
10353
  /* Strip off any conversions.  */
10354
  while (CONVERT_EXPR_P (offset))
10355
    offset = TREE_OPERAND (offset, 0);
10356
 
10357
  /* We must now have a BIT_AND_EXPR with a constant that is one less than
10358
     power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
10359
  if (TREE_CODE (offset) != BIT_AND_EXPR
10360
      || !host_integerp (TREE_OPERAND (offset, 1), 1)
10361
      || compare_tree_int (TREE_OPERAND (offset, 1),
10362
                           BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10363
      || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
10364
    return 0;
10365
 
10366
  /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10367
     It must be NEGATE_EXPR.  Then strip any more conversions.  */
10368
  offset = TREE_OPERAND (offset, 0);
10369
  while (CONVERT_EXPR_P (offset))
10370
    offset = TREE_OPERAND (offset, 0);
10371
 
10372
  if (TREE_CODE (offset) != NEGATE_EXPR)
10373
    return 0;
10374
 
10375
  offset = TREE_OPERAND (offset, 0);
10376
  while (CONVERT_EXPR_P (offset))
10377
    offset = TREE_OPERAND (offset, 0);
10378
 
10379
  /* This must now be the address of EXP.  */
10380
  return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10381
}
10382
 
10383
/* Return the tree node if an ARG corresponds to a string constant or zero
10384
   if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
10385
   in bytes within the string that ARG is accessing.  The type of the
10386
   offset will be `sizetype'.  */
10387
 
10388
tree
10389
string_constant (tree arg, tree *ptr_offset)
10390
{
10391
  tree array, offset, lower_bound;
10392
  STRIP_NOPS (arg);
10393
 
10394
  if (TREE_CODE (arg) == ADDR_EXPR)
10395
    {
10396
      if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10397
        {
10398
          *ptr_offset = size_zero_node;
10399
          return TREE_OPERAND (arg, 0);
10400
        }
10401
      else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10402
        {
10403
          array = TREE_OPERAND (arg, 0);
10404
          offset = size_zero_node;
10405
        }
10406
      else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10407
        {
10408
          array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10409
          offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10410
          if (TREE_CODE (array) != STRING_CST
10411
              && TREE_CODE (array) != VAR_DECL)
10412
            return 0;
10413
 
10414
          /* Check if the array has a nonzero lower bound.  */
10415
          lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10416
          if (!integer_zerop (lower_bound))
10417
            {
10418
              /* If the offset and base aren't both constants, return 0.  */
10419
              if (TREE_CODE (lower_bound) != INTEGER_CST)
10420
                return 0;
10421
              if (TREE_CODE (offset) != INTEGER_CST)
10422
                return 0;
10423
              /* Adjust offset by the lower bound.  */
10424
              offset = size_diffop (fold_convert (sizetype, offset),
10425
                                    fold_convert (sizetype, lower_bound));
10426
            }
10427
        }
10428
      else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10429
        {
10430
          array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10431
          offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10432
          if (TREE_CODE (array) != ADDR_EXPR)
10433
            return 0;
10434
          array = TREE_OPERAND (array, 0);
10435
          if (TREE_CODE (array) != STRING_CST
10436
              && TREE_CODE (array) != VAR_DECL)
10437
            return 0;
10438
        }
10439
      else
10440
        return 0;
10441
    }
10442
  else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10443
    {
10444
      tree arg0 = TREE_OPERAND (arg, 0);
10445
      tree arg1 = TREE_OPERAND (arg, 1);
10446
 
10447
      STRIP_NOPS (arg0);
10448
      STRIP_NOPS (arg1);
10449
 
10450
      if (TREE_CODE (arg0) == ADDR_EXPR
10451
          && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10452
              || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10453
        {
10454
          array = TREE_OPERAND (arg0, 0);
10455
          offset = arg1;
10456
        }
10457
      else if (TREE_CODE (arg1) == ADDR_EXPR
10458
               && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10459
                   || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10460
        {
10461
          array = TREE_OPERAND (arg1, 0);
10462
          offset = arg0;
10463
        }
10464
      else
10465
        return 0;
10466
    }
10467
  else
10468
    return 0;
10469
 
10470
  if (TREE_CODE (array) == STRING_CST)
10471
    {
10472
      *ptr_offset = fold_convert (sizetype, offset);
10473
      return array;
10474
    }
10475
  else if (TREE_CODE (array) == VAR_DECL
10476
           || TREE_CODE (array) == CONST_DECL)
10477
    {
10478
      int length;
10479
 
10480
      /* Variables initialized to string literals can be handled too.  */
10481
      if (!const_value_known_p (array)
10482
          || !DECL_INITIAL (array)
10483
          || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
10484
        return 0;
10485
 
10486
      /* Avoid const char foo[4] = "abcde";  */
10487
      if (DECL_SIZE_UNIT (array) == NULL_TREE
10488
          || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10489
          || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
10490
          || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10491
        return 0;
10492
 
10493
      /* If variable is bigger than the string literal, OFFSET must be constant
10494
         and inside of the bounds of the string literal.  */
10495
      offset = fold_convert (sizetype, offset);
10496
      if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10497
          && (! host_integerp (offset, 1)
10498
              || compare_tree_int (offset, length) >= 0))
10499
        return 0;
10500
 
10501
      *ptr_offset = offset;
10502
      return DECL_INITIAL (array);
10503
    }
10504
 
10505
  return 0;
10506
}
10507
 
10508
/* Generate code to calculate OPS, and exploded expression
10509
   using a store-flag instruction and return an rtx for the result.
10510
   OPS reflects a comparison.
10511
 
10512
   If TARGET is nonzero, store the result there if convenient.
10513
 
10514
   Return zero if there is no suitable set-flag instruction
10515
   available on this machine.
10516
 
10517
   Once expand_expr has been called on the arguments of the comparison,
10518
   we are committed to doing the store flag, since it is not safe to
10519
   re-evaluate the expression.  We emit the store-flag insn by calling
10520
   emit_store_flag, but only expand the arguments if we have a reason
10521
   to believe that emit_store_flag will be successful.  If we think that
10522
   it will, but it isn't, we have to simulate the store-flag with a
10523
   set/jump/set sequence.  */
10524
 
10525
static rtx
10526
do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10527
{
10528
  enum rtx_code code;
10529
  tree arg0, arg1, type;
10530
  tree tem;
10531
  enum machine_mode operand_mode;
10532
  int unsignedp;
10533
  rtx op0, op1;
10534
  rtx subtarget = target;
10535
  location_t loc = ops->location;
10536
 
10537
  arg0 = ops->op0;
10538
  arg1 = ops->op1;
10539
 
10540
  /* Don't crash if the comparison was erroneous.  */
10541
  if (arg0 == error_mark_node || arg1 == error_mark_node)
10542
    return const0_rtx;
10543
 
10544
  type = TREE_TYPE (arg0);
10545
  operand_mode = TYPE_MODE (type);
10546
  unsignedp = TYPE_UNSIGNED (type);
10547
 
10548
  /* We won't bother with BLKmode store-flag operations because it would mean
10549
     passing a lot of information to emit_store_flag.  */
10550
  if (operand_mode == BLKmode)
10551
    return 0;
10552
 
10553
  /* We won't bother with store-flag operations involving function pointers
10554
     when function pointers must be canonicalized before comparisons.  */
10555
#ifdef HAVE_canonicalize_funcptr_for_compare
10556
  if (HAVE_canonicalize_funcptr_for_compare
10557
      && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10558
           && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10559
               == FUNCTION_TYPE))
10560
          || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10561
              && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10562
                  == FUNCTION_TYPE))))
10563
    return 0;
10564
#endif
10565
 
10566
  STRIP_NOPS (arg0);
10567
  STRIP_NOPS (arg1);
10568
 
10569
  /* For vector typed comparisons emit code to generate the desired
10570
     all-ones or all-zeros mask.  Conveniently use the VEC_COND_EXPR
10571
     expander for this.  */
10572
  if (TREE_CODE (ops->type) == VECTOR_TYPE)
10573
    {
10574
      tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10575
      tree if_true = constant_boolean_node (true, ops->type);
10576
      tree if_false = constant_boolean_node (false, ops->type);
10577
      return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10578
    }
10579
 
10580
  /* For vector typed comparisons emit code to generate the desired
10581
     all-ones or all-zeros mask.  Conveniently use the VEC_COND_EXPR
10582
     expander for this.  */
10583
  if (TREE_CODE (ops->type) == VECTOR_TYPE)
10584
    {
10585
      tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10586
      tree if_true = constant_boolean_node (true, ops->type);
10587
      tree if_false = constant_boolean_node (false, ops->type);
10588
      return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10589
    }
10590
 
10591
  /* Get the rtx comparison code to use.  We know that EXP is a comparison
10592
     operation of some type.  Some comparisons against 1 and -1 can be
10593
     converted to comparisons with zero.  Do so here so that the tests
10594
     below will be aware that we have a comparison with zero.   These
10595
     tests will not catch constants in the first operand, but constants
10596
     are rarely passed as the first operand.  */
10597
 
10598
  switch (ops->code)
10599
    {
10600
    case EQ_EXPR:
10601
      code = EQ;
10602
      break;
10603
    case NE_EXPR:
10604
      code = NE;
10605
      break;
10606
    case LT_EXPR:
10607
      if (integer_onep (arg1))
10608
        arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10609
      else
10610
        code = unsignedp ? LTU : LT;
10611
      break;
10612
    case LE_EXPR:
10613
      if (! unsignedp && integer_all_onesp (arg1))
10614
        arg1 = integer_zero_node, code = LT;
10615
      else
10616
        code = unsignedp ? LEU : LE;
10617
      break;
10618
    case GT_EXPR:
10619
      if (! unsignedp && integer_all_onesp (arg1))
10620
        arg1 = integer_zero_node, code = GE;
10621
      else
10622
        code = unsignedp ? GTU : GT;
10623
      break;
10624
    case GE_EXPR:
10625
      if (integer_onep (arg1))
10626
        arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10627
      else
10628
        code = unsignedp ? GEU : GE;
10629
      break;
10630
 
10631
    case UNORDERED_EXPR:
10632
      code = UNORDERED;
10633
      break;
10634
    case ORDERED_EXPR:
10635
      code = ORDERED;
10636
      break;
10637
    case UNLT_EXPR:
10638
      code = UNLT;
10639
      break;
10640
    case UNLE_EXPR:
10641
      code = UNLE;
10642
      break;
10643
    case UNGT_EXPR:
10644
      code = UNGT;
10645
      break;
10646
    case UNGE_EXPR:
10647
      code = UNGE;
10648
      break;
10649
    case UNEQ_EXPR:
10650
      code = UNEQ;
10651
      break;
10652
    case LTGT_EXPR:
10653
      code = LTGT;
10654
      break;
10655
 
10656
    default:
10657
      gcc_unreachable ();
10658
    }
10659
 
10660
  /* Put a constant second.  */
10661
  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10662
      || TREE_CODE (arg0) == FIXED_CST)
10663
    {
10664
      tem = arg0; arg0 = arg1; arg1 = tem;
10665
      code = swap_condition (code);
10666
    }
10667
 
10668
  /* If this is an equality or inequality test of a single bit, we can
10669
     do this by shifting the bit being tested to the low-order bit and
10670
     masking the result with the constant 1.  If the condition was EQ,
10671
     we xor it with 1.  This does not require an scc insn and is faster
10672
     than an scc insn even if we have it.
10673
 
10674
     The code to make this transformation was moved into fold_single_bit_test,
10675
     so we just call into the folder and expand its result.  */
10676
 
10677
  if ((code == NE || code == EQ)
10678
      && integer_zerop (arg1)
10679
      && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10680
    {
10681
      gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10682
      if (srcstmt
10683
          && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10684
        {
10685
          enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10686
          tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10687
          tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10688
                                       gimple_assign_rhs1 (srcstmt),
10689
                                       gimple_assign_rhs2 (srcstmt));
10690
          temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10691
          if (temp)
10692
            return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10693
        }
10694
    }
10695
 
10696
  if (! get_subtarget (target)
10697
      || GET_MODE (subtarget) != operand_mode)
10698
    subtarget = 0;
10699
 
10700
  expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10701
 
10702
  if (target == 0)
10703
    target = gen_reg_rtx (mode);
10704
 
10705
  /* Try a cstore if possible.  */
10706
  return emit_store_flag_force (target, code, op0, op1,
10707
                                operand_mode, unsignedp,
10708
                                (TYPE_PRECISION (ops->type) == 1
10709
                                 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10710
}
10711
 
10712
 
10713
/* Stubs in case we haven't got a casesi insn.  */
10714
#ifndef HAVE_casesi
10715
# define HAVE_casesi 0
10716
# define gen_casesi(a, b, c, d, e) (0)
10717
# define CODE_FOR_casesi CODE_FOR_nothing
10718
#endif
10719
 
10720
/* Attempt to generate a casesi instruction.  Returns 1 if successful,
10721
 
10722
int
10723
try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10724
            rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10725
            rtx fallback_label ATTRIBUTE_UNUSED)
10726
{
10727
  struct expand_operand ops[5];
10728
  enum machine_mode index_mode = SImode;
10729
  int index_bits = GET_MODE_BITSIZE (index_mode);
10730
  rtx op1, op2, index;
10731
 
10732
  if (! HAVE_casesi)
10733
    return 0;
10734
 
10735
  /* Convert the index to SImode.  */
10736
  if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10737
    {
10738
      enum machine_mode omode = TYPE_MODE (index_type);
10739
      rtx rangertx = expand_normal (range);
10740
 
10741
      /* We must handle the endpoints in the original mode.  */
10742
      index_expr = build2 (MINUS_EXPR, index_type,
10743
                           index_expr, minval);
10744
      minval = integer_zero_node;
10745
      index = expand_normal (index_expr);
10746
      if (default_label)
10747
        emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10748
                                 omode, 1, default_label);
10749
      /* Now we can safely truncate.  */
10750
      index = convert_to_mode (index_mode, index, 0);
10751
    }
10752
  else
10753
    {
10754
      if (TYPE_MODE (index_type) != index_mode)
10755
        {
10756
          index_type = lang_hooks.types.type_for_size (index_bits, 0);
10757
          index_expr = fold_convert (index_type, index_expr);
10758
        }
10759
 
10760
      index = expand_normal (index_expr);
10761
    }
10762
 
10763
  do_pending_stack_adjust ();
10764
 
10765
  op1 = expand_normal (minval);
10766
  op2 = expand_normal (range);
10767
 
10768
  create_input_operand (&ops[0], index, index_mode);
10769
  create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10770
  create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10771
  create_fixed_operand (&ops[3], table_label);
10772
  create_fixed_operand (&ops[4], (default_label
10773
                                  ? default_label
10774
                                  : fallback_label));
10775
  expand_jump_insn (CODE_FOR_casesi, 5, ops);
10776
  return 1;
10777
}
10778
 
10779
/* Attempt to generate a tablejump instruction; same concept.  */
10780
#ifndef HAVE_tablejump
10781
#define HAVE_tablejump 0
10782
#define gen_tablejump(x, y) (0)
10783
#endif
10784
 
10785
/* Subroutine of the next function.
10786
 
10787
   INDEX is the value being switched on, with the lowest value
10788
   in the table already subtracted.
10789
   MODE is its expected mode (needed if INDEX is constant).
10790
   RANGE is the length of the jump table.
10791
   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10792
 
10793
   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10794
   index value is out of range.  */
10795
 
10796
static void
10797
do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10798
              rtx default_label)
10799
{
10800
  rtx temp, vector;
10801
 
10802
  if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10803
    cfun->cfg->max_jumptable_ents = INTVAL (range);
10804
 
10805
  /* Do an unsigned comparison (in the proper mode) between the index
10806
     expression and the value which represents the length of the range.
10807
     Since we just finished subtracting the lower bound of the range
10808
     from the index expression, this comparison allows us to simultaneously
10809
     check that the original index expression value is both greater than
10810
     or equal to the minimum value of the range and less than or equal to
10811
     the maximum value of the range.  */
10812
 
10813
  if (default_label)
10814
    emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10815
                             default_label);
10816
 
10817
  /* If index is in range, it must fit in Pmode.
10818
     Convert to Pmode so we can index with it.  */
10819
  if (mode != Pmode)
10820
    index = convert_to_mode (Pmode, index, 1);
10821
 
10822
  /* Don't let a MEM slip through, because then INDEX that comes
10823
     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10824
     and break_out_memory_refs will go to work on it and mess it up.  */
10825
#ifdef PIC_CASE_VECTOR_ADDRESS
10826
  if (flag_pic && !REG_P (index))
10827
    index = copy_to_mode_reg (Pmode, index);
10828
#endif
10829
 
10830
  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10831
     GET_MODE_SIZE, because this indicates how large insns are.  The other
10832
     uses should all be Pmode, because they are addresses.  This code
10833
     could fail if addresses and insns are not the same size.  */
10834
  index = gen_rtx_PLUS (Pmode,
10835
                        gen_rtx_MULT (Pmode, index,
10836
                                      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10837
                        gen_rtx_LABEL_REF (Pmode, table_label));
10838
#ifdef PIC_CASE_VECTOR_ADDRESS
10839
  if (flag_pic)
10840
    index = PIC_CASE_VECTOR_ADDRESS (index);
10841
  else
10842
#endif
10843
    index = memory_address (CASE_VECTOR_MODE, index);
10844
  temp = gen_reg_rtx (CASE_VECTOR_MODE);
10845
  vector = gen_const_mem (CASE_VECTOR_MODE, index);
10846
  convert_move (temp, vector, 0);
10847
 
10848
  emit_jump_insn (gen_tablejump (temp, table_label));
10849
 
10850
  /* If we are generating PIC code or if the table is PC-relative, the
10851
     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
10852
  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10853
    emit_barrier ();
10854
}
10855
 
10856
int
10857
try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10858
               rtx table_label, rtx default_label)
10859
{
10860
  rtx index;
10861
 
10862
  if (! HAVE_tablejump)
10863
    return 0;
10864
 
10865
  index_expr = fold_build2 (MINUS_EXPR, index_type,
10866
                            fold_convert (index_type, index_expr),
10867
                            fold_convert (index_type, minval));
10868
  index = expand_normal (index_expr);
10869
  do_pending_stack_adjust ();
10870
 
10871
  do_tablejump (index, TYPE_MODE (index_type),
10872
                convert_modes (TYPE_MODE (index_type),
10873
                               TYPE_MODE (TREE_TYPE (range)),
10874
                               expand_normal (range),
10875
                               TYPE_UNSIGNED (TREE_TYPE (range))),
10876
                table_label, default_label);
10877
  return 1;
10878
}
10879
 
10880
/* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
10881
static rtx
10882
const_vector_from_tree (tree exp)
10883
{
10884
  rtvec v;
10885
  int units, i;
10886
  tree link, elt;
10887
  enum machine_mode inner, mode;
10888
 
10889
  mode = TYPE_MODE (TREE_TYPE (exp));
10890
 
10891
  if (initializer_zerop (exp))
10892
    return CONST0_RTX (mode);
10893
 
10894
  units = GET_MODE_NUNITS (mode);
10895
  inner = GET_MODE_INNER (mode);
10896
 
10897
  v = rtvec_alloc (units);
10898
 
10899
  link = TREE_VECTOR_CST_ELTS (exp);
10900
  for (i = 0; link; link = TREE_CHAIN (link), ++i)
10901
    {
10902
      elt = TREE_VALUE (link);
10903
 
10904
      if (TREE_CODE (elt) == REAL_CST)
10905
        RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10906
                                                         inner);
10907
      else if (TREE_CODE (elt) == FIXED_CST)
10908
        RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10909
                                                         inner);
10910
      else
10911
        RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
10912
                                                   inner);
10913
    }
10914
 
10915
  /* Initialize remaining elements to 0.  */
10916
  for (; i < units; ++i)
10917
    RTVEC_ELT (v, i) = CONST0_RTX (inner);
10918
 
10919
  return gen_rtx_CONST_VECTOR (mode, v);
10920
}
10921
 
10922
/* Build a decl for a personality function given a language prefix.  */
10923
 
10924
tree
10925
build_personality_function (const char *lang)
10926
{
10927
  const char *unwind_and_version;
10928
  tree decl, type;
10929
  char *name;
10930
 
10931
  switch (targetm_common.except_unwind_info (&global_options))
10932
    {
10933
    case UI_NONE:
10934
      return NULL;
10935
    case UI_SJLJ:
10936
      unwind_and_version = "_sj0";
10937
      break;
10938
    case UI_DWARF2:
10939
    case UI_TARGET:
10940
      unwind_and_version = "_v0";
10941
      break;
10942
    default:
10943
      gcc_unreachable ();
10944
    }
10945
 
10946
  name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
10947
 
10948
  type = build_function_type_list (integer_type_node, integer_type_node,
10949
                                   long_long_unsigned_type_node,
10950
                                   ptr_type_node, ptr_type_node, NULL_TREE);
10951
  decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10952
                     get_identifier (name), type);
10953
  DECL_ARTIFICIAL (decl) = 1;
10954
  DECL_EXTERNAL (decl) = 1;
10955
  TREE_PUBLIC (decl) = 1;
10956
 
10957
  /* Zap the nonsensical SYMBOL_REF_DECL for this.  What we're left with
10958
     are the flags assigned by targetm.encode_section_info.  */
10959
  SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10960
 
10961
  return decl;
10962
}
10963
 
10964
/* Extracts the personality function of DECL and returns the corresponding
10965
   libfunc.  */
10966
 
10967
rtx
10968
get_personality_function (tree decl)
10969
{
10970
  tree personality = DECL_FUNCTION_PERSONALITY (decl);
10971
  enum eh_personality_kind pk;
10972
 
10973
  pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10974
  if (pk == eh_personality_none)
10975
    return NULL;
10976
 
10977
  if (!personality
10978
      && pk == eh_personality_any)
10979
    personality = lang_hooks.eh_personality ();
10980
 
10981
  if (pk == eh_personality_lang)
10982
    gcc_assert (personality != NULL_TREE);
10983
 
10984
  return XEXP (DECL_RTL (personality), 0);
10985
}
10986
 
10987
#include "gt-expr.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.