OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [expr.c] - Blame information for rev 300

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Convert tree expression to rtl instructions, for GNU compiler.
2
   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3
   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "machmode.h"
27
#include "real.h"
28
#include "rtl.h"
29
#include "tree.h"
30
#include "flags.h"
31
#include "regs.h"
32
#include "hard-reg-set.h"
33
#include "except.h"
34
#include "function.h"
35
#include "insn-config.h"
36
#include "insn-attr.h"
37
/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
38
#include "expr.h"
39
#include "optabs.h"
40
#include "libfuncs.h"
41
#include "recog.h"
42
#include "reload.h"
43
#include "output.h"
44
#include "typeclass.h"
45
#include "toplev.h"
46
#include "ggc.h"
47
#include "langhooks.h"
48
#include "intl.h"
49
#include "tm_p.h"
50
#include "tree-iterator.h"
51
#include "tree-pass.h"
52
#include "tree-flow.h"
53
#include "target.h"
54
#include "timevar.h"
55
#include "df.h"
56
#include "diagnostic.h"
57
#include "ssaexpand.h"
58
 
59
/* Decide whether a function's arguments should be processed
60
   from first to last or from last to first.
61
 
62
   They should if the stack and args grow in opposite directions, but
63
   only if we have push insns.  */
64
 
65
#ifdef PUSH_ROUNDING
66
 
67
#ifndef PUSH_ARGS_REVERSED
68
#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
69
#define PUSH_ARGS_REVERSED      /* If it's last to first.  */
70
#endif
71
#endif
72
 
73
#endif
74
 
75
#ifndef STACK_PUSH_CODE
76
#ifdef STACK_GROWS_DOWNWARD
77
#define STACK_PUSH_CODE PRE_DEC
78
#else
79
#define STACK_PUSH_CODE PRE_INC
80
#endif
81
#endif
82
 
83
 
84
/* If this is nonzero, we do not bother generating VOLATILE
85
   around volatile memory references, and we are willing to
86
   output indirect addresses.  If cse is to follow, we reject
87
   indirect addresses so a useful potential cse is generated;
88
   if it is used only once, instruction combination will produce
89
   the same indirect address eventually.  */
90
int cse_not_expected;
91
 
92
/* This structure is used by move_by_pieces to describe the move to
93
   be performed.  */
94
struct move_by_pieces_d
95
{
96
  rtx to;
97
  rtx to_addr;
98
  int autinc_to;
99
  int explicit_inc_to;
100
  rtx from;
101
  rtx from_addr;
102
  int autinc_from;
103
  int explicit_inc_from;
104
  unsigned HOST_WIDE_INT len;
105
  HOST_WIDE_INT offset;
106
  int reverse;
107
};
108
 
109
/* This structure is used by store_by_pieces to describe the clear to
110
   be performed.  */
111
 
112
struct store_by_pieces_d
113
{
114
  rtx to;
115
  rtx to_addr;
116
  int autinc_to;
117
  int explicit_inc_to;
118
  unsigned HOST_WIDE_INT len;
119
  HOST_WIDE_INT offset;
120
  rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
121
  void *constfundata;
122
  int reverse;
123
};
124
 
125
static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
126
                                                     unsigned int,
127
                                                     unsigned int);
128
static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
129
                              struct move_by_pieces_d *);
130
static bool block_move_libcall_safe_for_call_parm (void);
131
static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
132
static tree emit_block_move_libcall_fn (int);
133
static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
134
static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
135
static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
136
static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
137
static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
138
                               struct store_by_pieces_d *);
139
static tree clear_storage_libcall_fn (int);
140
static rtx compress_float_constant (rtx, rtx);
141
static rtx get_subtarget (rtx);
142
static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143
                                     HOST_WIDE_INT, enum machine_mode,
144
                                     tree, tree, int, alias_set_type);
145
static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146
static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147
                        tree, tree, alias_set_type, bool);
148
 
149
static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150
 
151
static int is_aligning_offset (const_tree, const_tree);
152
static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153
                             enum expand_modifier);
154
static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155
static rtx do_store_flag (sepops, rtx, enum machine_mode);
156
#ifdef PUSH_ROUNDING
157
static void emit_single_push_insn (enum machine_mode, rtx, tree);
158
#endif
159
static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160
static rtx const_vector_from_tree (tree);
161
static void write_complex_part (rtx, rtx, bool);
162
 
163
/* Record for each mode whether we can move a register directly to or
164
   from an object of that mode in memory.  If we can't, we won't try
165
   to use that mode directly when accessing a field of that mode.  */
166
 
167
static char direct_load[NUM_MACHINE_MODES];
168
static char direct_store[NUM_MACHINE_MODES];
169
 
170
/* Record for each mode whether we can float-extend from memory.  */
171
 
172
static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173
 
174
/* This macro is used to determine whether move_by_pieces should be called
175
   to perform a structure copy.  */
176
#ifndef MOVE_BY_PIECES_P
177
#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178
  (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179
   < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
180
#endif
181
 
182
/* This macro is used to determine whether clear_by_pieces should be
183
   called to clear storage.  */
184
#ifndef CLEAR_BY_PIECES_P
185
#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186
  (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187
   < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
188
#endif
189
 
190
/* This macro is used to determine whether store_by_pieces should be
191
   called to "memset" storage with byte values other than zero.  */
192
#ifndef SET_BY_PIECES_P
193
#define SET_BY_PIECES_P(SIZE, ALIGN) \
194
  (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195
   < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
196
#endif
197
 
198
/* This macro is used to determine whether store_by_pieces should be
199
   called to "memcpy" storage when the source is a constant string.  */
200
#ifndef STORE_BY_PIECES_P
201
#define STORE_BY_PIECES_P(SIZE, ALIGN) \
202
  (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
203
   < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
204
#endif
205
 
206
/* This array records the insn_code of insns to perform block moves.  */
207
enum insn_code movmem_optab[NUM_MACHINE_MODES];
208
 
209
/* This array records the insn_code of insns to perform block sets.  */
210
enum insn_code setmem_optab[NUM_MACHINE_MODES];
211
 
212
/* These arrays record the insn_code of three different kinds of insns
213
   to perform block compares.  */
214
enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
215
enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
216
enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
217
 
218
/* Synchronization primitives.  */
219
enum insn_code sync_add_optab[NUM_MACHINE_MODES];
220
enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
221
enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
222
enum insn_code sync_and_optab[NUM_MACHINE_MODES];
223
enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
224
enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
225
enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
226
enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
227
enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
228
enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
229
enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
230
enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
231
enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
232
enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
233
enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
234
enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
235
enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
236
enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
237
enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
238
enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
239
enum insn_code sync_lock_release[NUM_MACHINE_MODES];
240
 
241
/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow.  */
242
 
243
#ifndef SLOW_UNALIGNED_ACCESS
244
#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
245
#endif
246
 
247
/* This is run to set up which modes can be used
248
   directly in memory and to initialize the block move optab.  It is run
249
   at the beginning of compilation and when the target is reinitialized.  */
250
 
251
void
252
init_expr_target (void)
253
{
254
  rtx insn, pat;
255
  enum machine_mode mode;
256
  int num_clobbers;
257
  rtx mem, mem1;
258
  rtx reg;
259
 
260
  /* Try indexing by frame ptr and try by stack ptr.
261
     It is known that on the Convex the stack ptr isn't a valid index.
262
     With luck, one or the other is valid on any machine.  */
263
  mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
264
  mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
265
 
266
  /* A scratch register we can modify in-place below to avoid
267
     useless RTL allocations.  */
268
  reg = gen_rtx_REG (VOIDmode, -1);
269
 
270
  insn = rtx_alloc (INSN);
271
  pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
272
  PATTERN (insn) = pat;
273
 
274
  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275
       mode = (enum machine_mode) ((int) mode + 1))
276
    {
277
      int regno;
278
 
279
      direct_load[(int) mode] = direct_store[(int) mode] = 0;
280
      PUT_MODE (mem, mode);
281
      PUT_MODE (mem1, mode);
282
      PUT_MODE (reg, mode);
283
 
284
      /* See if there is some register that can be used in this mode and
285
         directly loaded or stored from memory.  */
286
 
287
      if (mode != VOIDmode && mode != BLKmode)
288
        for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289
             && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
290
             regno++)
291
          {
292
            if (! HARD_REGNO_MODE_OK (regno, mode))
293
              continue;
294
 
295
            SET_REGNO (reg, regno);
296
 
297
            SET_SRC (pat) = mem;
298
            SET_DEST (pat) = reg;
299
            if (recog (pat, insn, &num_clobbers) >= 0)
300
              direct_load[(int) mode] = 1;
301
 
302
            SET_SRC (pat) = mem1;
303
            SET_DEST (pat) = reg;
304
            if (recog (pat, insn, &num_clobbers) >= 0)
305
              direct_load[(int) mode] = 1;
306
 
307
            SET_SRC (pat) = reg;
308
            SET_DEST (pat) = mem;
309
            if (recog (pat, insn, &num_clobbers) >= 0)
310
              direct_store[(int) mode] = 1;
311
 
312
            SET_SRC (pat) = reg;
313
            SET_DEST (pat) = mem1;
314
            if (recog (pat, insn, &num_clobbers) >= 0)
315
              direct_store[(int) mode] = 1;
316
          }
317
    }
318
 
319
  mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
320
 
321
  for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
322
       mode = GET_MODE_WIDER_MODE (mode))
323
    {
324
      enum machine_mode srcmode;
325
      for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
326
           srcmode = GET_MODE_WIDER_MODE (srcmode))
327
        {
328
          enum insn_code ic;
329
 
330
          ic = can_extend_p (mode, srcmode, 0);
331
          if (ic == CODE_FOR_nothing)
332
            continue;
333
 
334
          PUT_MODE (mem, srcmode);
335
 
336
          if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
337
            float_extend_from_mem[mode][srcmode] = true;
338
        }
339
    }
340
}
341
 
342
/* This is run at the start of compiling a function.  */
343
 
344
void
345
init_expr (void)
346
{
347
  memset (&crtl->expr, 0, sizeof (crtl->expr));
348
}
349
 
350
/* Copy data from FROM to TO, where the machine modes are not the same.
351
   Both modes may be integer, or both may be floating, or both may be
352
   fixed-point.
353
   UNSIGNEDP should be nonzero if FROM is an unsigned type.
354
   This causes zero-extension instead of sign-extension.  */
355
 
356
void
357
convert_move (rtx to, rtx from, int unsignedp)
358
{
359
  enum machine_mode to_mode = GET_MODE (to);
360
  enum machine_mode from_mode = GET_MODE (from);
361
  int to_real = SCALAR_FLOAT_MODE_P (to_mode);
362
  int from_real = SCALAR_FLOAT_MODE_P (from_mode);
363
  enum insn_code code;
364
  rtx libcall;
365
 
366
  /* rtx code for making an equivalent value.  */
367
  enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
368
                              : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
369
 
370
 
371
  gcc_assert (to_real == from_real);
372
  gcc_assert (to_mode != BLKmode);
373
  gcc_assert (from_mode != BLKmode);
374
 
375
  /* If the source and destination are already the same, then there's
376
     nothing to do.  */
377
  if (to == from)
378
    return;
379
 
380
  /* If FROM is a SUBREG that indicates that we have already done at least
381
     the required extension, strip it.  We don't handle such SUBREGs as
382
     TO here.  */
383
 
384
  if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
385
      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
386
          >= GET_MODE_SIZE (to_mode))
387
      && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
388
    from = gen_lowpart (to_mode, from), from_mode = to_mode;
389
 
390
  gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
391
 
392
  if (to_mode == from_mode
393
      || (from_mode == VOIDmode && CONSTANT_P (from)))
394
    {
395
      emit_move_insn (to, from);
396
      return;
397
    }
398
 
399
  if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
400
    {
401
      gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
402
 
403
      if (VECTOR_MODE_P (to_mode))
404
        from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
405
      else
406
        to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
407
 
408
      emit_move_insn (to, from);
409
      return;
410
    }
411
 
412
  if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
413
    {
414
      convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
415
      convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
416
      return;
417
    }
418
 
419
  if (to_real)
420
    {
421
      rtx value, insns;
422
      convert_optab tab;
423
 
424
      gcc_assert ((GET_MODE_PRECISION (from_mode)
425
                   != GET_MODE_PRECISION (to_mode))
426
                  || (DECIMAL_FLOAT_MODE_P (from_mode)
427
                      != DECIMAL_FLOAT_MODE_P (to_mode)));
428
 
429
      if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
430
        /* Conversion between decimal float and binary float, same size.  */
431
        tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
432
      else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
433
        tab = sext_optab;
434
      else
435
        tab = trunc_optab;
436
 
437
      /* Try converting directly if the insn is supported.  */
438
 
439
      code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
440
      if (code != CODE_FOR_nothing)
441
        {
442
          emit_unop_insn (code, to, from,
443
                          tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
444
          return;
445
        }
446
 
447
      /* Otherwise use a libcall.  */
448
      libcall = convert_optab_libfunc (tab, to_mode, from_mode);
449
 
450
      /* Is this conversion implemented yet?  */
451
      gcc_assert (libcall);
452
 
453
      start_sequence ();
454
      value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
455
                                       1, from, from_mode);
456
      insns = get_insns ();
457
      end_sequence ();
458
      emit_libcall_block (insns, to, value,
459
                          tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
460
                                                                       from)
461
                          : gen_rtx_FLOAT_EXTEND (to_mode, from));
462
      return;
463
    }
464
 
465
  /* Handle pointer conversion.  */                     /* SPEE 900220.  */
466
  /* Targets are expected to provide conversion insns between PxImode and
467
     xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
468
  if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
469
    {
470
      enum machine_mode full_mode
471
        = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
472
 
473
      gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
474
                  != CODE_FOR_nothing);
475
 
476
      if (full_mode != from_mode)
477
        from = convert_to_mode (full_mode, from, unsignedp);
478
      emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
479
                      to, from, UNKNOWN);
480
      return;
481
    }
482
  if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
483
    {
484
      rtx new_from;
485
      enum machine_mode full_mode
486
        = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
487
 
488
      gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
489
                  != CODE_FOR_nothing);
490
 
491
      if (to_mode == full_mode)
492
        {
493
          emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
494
                          to, from, UNKNOWN);
495
          return;
496
        }
497
 
498
      new_from = gen_reg_rtx (full_mode);
499
      emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
500
                      new_from, from, UNKNOWN);
501
 
502
      /* else proceed to integer conversions below.  */
503
      from_mode = full_mode;
504
      from = new_from;
505
    }
506
 
507
   /* Make sure both are fixed-point modes or both are not.  */
508
   gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
509
               ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
510
   if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
511
    {
512
      /* If we widen from_mode to to_mode and they are in the same class,
513
         we won't saturate the result.
514
         Otherwise, always saturate the result to play safe.  */
515
      if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
516
          && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
517
        expand_fixed_convert (to, from, 0, 0);
518
      else
519
        expand_fixed_convert (to, from, 0, 1);
520
      return;
521
    }
522
 
523
  /* Now both modes are integers.  */
524
 
525
  /* Handle expanding beyond a word.  */
526
  if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
527
      && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
528
    {
529
      rtx insns;
530
      rtx lowpart;
531
      rtx fill_value;
532
      rtx lowfrom;
533
      int i;
534
      enum machine_mode lowpart_mode;
535
      int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
536
 
537
      /* Try converting directly if the insn is supported.  */
538
      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
539
          != CODE_FOR_nothing)
540
        {
541
          /* If FROM is a SUBREG, put it into a register.  Do this
542
             so that we always generate the same set of insns for
543
             better cse'ing; if an intermediate assignment occurred,
544
             we won't be doing the operation directly on the SUBREG.  */
545
          if (optimize > 0 && GET_CODE (from) == SUBREG)
546
            from = force_reg (from_mode, from);
547
          emit_unop_insn (code, to, from, equiv_code);
548
          return;
549
        }
550
      /* Next, try converting via full word.  */
551
      else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
552
               && ((code = can_extend_p (to_mode, word_mode, unsignedp))
553
                   != CODE_FOR_nothing))
554
        {
555
          rtx word_to = gen_reg_rtx (word_mode);
556
          if (REG_P (to))
557
            {
558
              if (reg_overlap_mentioned_p (to, from))
559
                from = force_reg (from_mode, from);
560
              emit_clobber (to);
561
            }
562
          convert_move (word_to, from, unsignedp);
563
          emit_unop_insn (code, to, word_to, equiv_code);
564
          return;
565
        }
566
 
567
      /* No special multiword conversion insn; do it by hand.  */
568
      start_sequence ();
569
 
570
      /* Since we will turn this into a no conflict block, we must ensure
571
         that the source does not overlap the target.  */
572
 
573
      if (reg_overlap_mentioned_p (to, from))
574
        from = force_reg (from_mode, from);
575
 
576
      /* Get a copy of FROM widened to a word, if necessary.  */
577
      if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
578
        lowpart_mode = word_mode;
579
      else
580
        lowpart_mode = from_mode;
581
 
582
      lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
583
 
584
      lowpart = gen_lowpart (lowpart_mode, to);
585
      emit_move_insn (lowpart, lowfrom);
586
 
587
      /* Compute the value to put in each remaining word.  */
588
      if (unsignedp)
589
        fill_value = const0_rtx;
590
      else
591
        fill_value = emit_store_flag (gen_reg_rtx (word_mode),
592
                                      LT, lowfrom, const0_rtx,
593
                                      VOIDmode, 0, -1);
594
 
595
      /* Fill the remaining words.  */
596
      for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
597
        {
598
          int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
599
          rtx subword = operand_subword (to, index, 1, to_mode);
600
 
601
          gcc_assert (subword);
602
 
603
          if (fill_value != subword)
604
            emit_move_insn (subword, fill_value);
605
        }
606
 
607
      insns = get_insns ();
608
      end_sequence ();
609
 
610
      emit_insn (insns);
611
      return;
612
    }
613
 
614
  /* Truncating multi-word to a word or less.  */
615
  if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
616
      && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
617
    {
618
      if (!((MEM_P (from)
619
             && ! MEM_VOLATILE_P (from)
620
             && direct_load[(int) to_mode]
621
             && ! mode_dependent_address_p (XEXP (from, 0)))
622
            || REG_P (from)
623
            || GET_CODE (from) == SUBREG))
624
        from = force_reg (from_mode, from);
625
      convert_move (to, gen_lowpart (word_mode, from), 0);
626
      return;
627
    }
628
 
629
  /* Now follow all the conversions between integers
630
     no more than a word long.  */
631
 
632
  /* For truncation, usually we can just refer to FROM in a narrower mode.  */
633
  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
634
      && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
635
                                GET_MODE_BITSIZE (from_mode)))
636
    {
637
      if (!((MEM_P (from)
638
             && ! MEM_VOLATILE_P (from)
639
             && direct_load[(int) to_mode]
640
             && ! mode_dependent_address_p (XEXP (from, 0)))
641
            || REG_P (from)
642
            || GET_CODE (from) == SUBREG))
643
        from = force_reg (from_mode, from);
644
      if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
645
          && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
646
        from = copy_to_reg (from);
647
      emit_move_insn (to, gen_lowpart (to_mode, from));
648
      return;
649
    }
650
 
651
  /* Handle extension.  */
652
  if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
653
    {
654
      /* Convert directly if that works.  */
655
      if ((code = can_extend_p (to_mode, from_mode, unsignedp))
656
          != CODE_FOR_nothing)
657
        {
658
          emit_unop_insn (code, to, from, equiv_code);
659
          return;
660
        }
661
      else
662
        {
663
          enum machine_mode intermediate;
664
          rtx tmp;
665
          tree shift_amount;
666
 
667
          /* Search for a mode to convert via.  */
668
          for (intermediate = from_mode; intermediate != VOIDmode;
669
               intermediate = GET_MODE_WIDER_MODE (intermediate))
670
            if (((can_extend_p (to_mode, intermediate, unsignedp)
671
                  != CODE_FOR_nothing)
672
                 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
673
                     && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
674
                                               GET_MODE_BITSIZE (intermediate))))
675
                && (can_extend_p (intermediate, from_mode, unsignedp)
676
                    != CODE_FOR_nothing))
677
              {
678
                convert_move (to, convert_to_mode (intermediate, from,
679
                                                   unsignedp), unsignedp);
680
                return;
681
              }
682
 
683
          /* No suitable intermediate mode.
684
             Generate what we need with shifts.  */
685
          shift_amount = build_int_cst (NULL_TREE,
686
                                        GET_MODE_BITSIZE (to_mode)
687
                                        - GET_MODE_BITSIZE (from_mode));
688
          from = gen_lowpart (to_mode, force_reg (from_mode, from));
689
          tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
690
                              to, unsignedp);
691
          tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
692
                              to, unsignedp);
693
          if (tmp != to)
694
            emit_move_insn (to, tmp);
695
          return;
696
        }
697
    }
698
 
699
  /* Support special truncate insns for certain modes.  */
700
  if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
701
    {
702
      emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
703
                      to, from, UNKNOWN);
704
      return;
705
    }
706
 
707
  /* Handle truncation of volatile memrefs, and so on;
708
     the things that couldn't be truncated directly,
709
     and for which there was no special instruction.
710
 
711
     ??? Code above formerly short-circuited this, for most integer
712
     mode pairs, with a force_reg in from_mode followed by a recursive
713
     call to this routine.  Appears always to have been wrong.  */
714
  if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
715
    {
716
      rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
717
      emit_move_insn (to, temp);
718
      return;
719
    }
720
 
721
  /* Mode combination is not recognized.  */
722
  gcc_unreachable ();
723
}
724
 
725
/* Return an rtx for a value that would result
726
   from converting X to mode MODE.
727
   Both X and MODE may be floating, or both integer.
728
   UNSIGNEDP is nonzero if X is an unsigned value.
729
   This can be done by referring to a part of X in place
730
   or by copying to a new temporary with conversion.  */
731
 
732
rtx
733
convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
734
{
735
  return convert_modes (mode, VOIDmode, x, unsignedp);
736
}
737
 
738
/* Return an rtx for a value that would result
739
   from converting X from mode OLDMODE to mode MODE.
740
   Both modes may be floating, or both integer.
741
   UNSIGNEDP is nonzero if X is an unsigned value.
742
 
743
   This can be done by referring to a part of X in place
744
   or by copying to a new temporary with conversion.
745
 
746
   You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.  */
747
 
748
rtx
749
convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
750
{
751
  rtx temp;
752
 
753
  /* If FROM is a SUBREG that indicates that we have already done at least
754
     the required extension, strip it.  */
755
 
756
  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
757
      && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
758
      && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
759
    x = gen_lowpart (mode, x);
760
 
761
  if (GET_MODE (x) != VOIDmode)
762
    oldmode = GET_MODE (x);
763
 
764
  if (mode == oldmode)
765
    return x;
766
 
767
  /* There is one case that we must handle specially: If we are converting
768
     a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
769
     we are to interpret the constant as unsigned, gen_lowpart will do
770
     the wrong if the constant appears negative.  What we want to do is
771
     make the high-order word of the constant zero, not all ones.  */
772
 
773
  if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
774
      && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
775
      && CONST_INT_P (x) && INTVAL (x) < 0)
776
    {
777
      HOST_WIDE_INT val = INTVAL (x);
778
 
779
      if (oldmode != VOIDmode
780
          && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
781
        {
782
          int width = GET_MODE_BITSIZE (oldmode);
783
 
784
          /* We need to zero extend VAL.  */
785
          val &= ((HOST_WIDE_INT) 1 << width) - 1;
786
        }
787
 
788
      return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
789
    }
790
 
791
  /* We can do this with a gen_lowpart if both desired and current modes
792
     are integer, and this is either a constant integer, a register, or a
793
     non-volatile MEM.  Except for the constant case where MODE is no
794
     wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
795
 
796
  if ((CONST_INT_P (x)
797
       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
798
      || (GET_MODE_CLASS (mode) == MODE_INT
799
          && GET_MODE_CLASS (oldmode) == MODE_INT
800
          && (GET_CODE (x) == CONST_DOUBLE
801
              || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
802
                  && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
803
                       && direct_load[(int) mode])
804
                      || (REG_P (x)
805
                          && (! HARD_REGISTER_P (x)
806
                              || HARD_REGNO_MODE_OK (REGNO (x), mode))
807
                          && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
808
                                                    GET_MODE_BITSIZE (GET_MODE (x)))))))))
809
    {
810
      /* ?? If we don't know OLDMODE, we have to assume here that
811
         X does not need sign- or zero-extension.   This may not be
812
         the case, but it's the best we can do.  */
813
      if (CONST_INT_P (x) && oldmode != VOIDmode
814
          && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
815
        {
816
          HOST_WIDE_INT val = INTVAL (x);
817
          int width = GET_MODE_BITSIZE (oldmode);
818
 
819
          /* We must sign or zero-extend in this case.  Start by
820
             zero-extending, then sign extend if we need to.  */
821
          val &= ((HOST_WIDE_INT) 1 << width) - 1;
822
          if (! unsignedp
823
              && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
824
            val |= (HOST_WIDE_INT) (-1) << width;
825
 
826
          return gen_int_mode (val, mode);
827
        }
828
 
829
      return gen_lowpart (mode, x);
830
    }
831
 
832
  /* Converting from integer constant into mode is always equivalent to an
833
     subreg operation.  */
834
  if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
835
    {
836
      gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
837
      return simplify_gen_subreg (mode, x, oldmode, 0);
838
    }
839
 
840
  temp = gen_reg_rtx (mode);
841
  convert_move (temp, x, unsignedp);
842
  return temp;
843
}
844
 
845
/* STORE_MAX_PIECES is the number of bytes at a time that we can
846
   store efficiently.  Due to internal GCC limitations, this is
847
   MOVE_MAX_PIECES limited by the number of bytes GCC can represent
848
   for an immediate constant.  */
849
 
850
#define STORE_MAX_PIECES  MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
851
 
852
/* Determine whether the LEN bytes can be moved by using several move
853
   instructions.  Return nonzero if a call to move_by_pieces should
854
   succeed.  */
855
 
856
int
857
can_move_by_pieces (unsigned HOST_WIDE_INT len,
858
                    unsigned int align ATTRIBUTE_UNUSED)
859
{
860
  return MOVE_BY_PIECES_P (len, align);
861
}
862
 
863
/* Generate several move instructions to copy LEN bytes from block FROM to
864
   block TO.  (These are MEM rtx's with BLKmode).
865
 
866
   If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
867
   used to push FROM to the stack.
868
 
869
   ALIGN is maximum stack alignment we can assume.
870
 
871
   If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
872
   mempcpy, and if ENDP is 2 return memory the end minus one byte ala
873
   stpcpy.  */
874
 
875
rtx
876
move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
877
                unsigned int align, int endp)
878
{
879
  struct move_by_pieces_d data;
880
  enum machine_mode to_addr_mode, from_addr_mode
881
    = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
882
  rtx to_addr, from_addr = XEXP (from, 0);
883
  unsigned int max_size = MOVE_MAX_PIECES + 1;
884
  enum machine_mode mode = VOIDmode, tmode;
885
  enum insn_code icode;
886
 
887
  align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
888
 
889
  data.offset = 0;
890
  data.from_addr = from_addr;
891
  if (to)
892
    {
893
      to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
894
      to_addr = XEXP (to, 0);
895
      data.to = to;
896
      data.autinc_to
897
        = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
898
           || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
899
      data.reverse
900
        = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
901
    }
902
  else
903
    {
904
      to_addr_mode = VOIDmode;
905
      to_addr = NULL_RTX;
906
      data.to = NULL_RTX;
907
      data.autinc_to = 1;
908
#ifdef STACK_GROWS_DOWNWARD
909
      data.reverse = 1;
910
#else
911
      data.reverse = 0;
912
#endif
913
    }
914
  data.to_addr = to_addr;
915
  data.from = from;
916
  data.autinc_from
917
    = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
918
       || GET_CODE (from_addr) == POST_INC
919
       || GET_CODE (from_addr) == POST_DEC);
920
 
921
  data.explicit_inc_from = 0;
922
  data.explicit_inc_to = 0;
923
  if (data.reverse) data.offset = len;
924
  data.len = len;
925
 
926
  /* If copying requires more than two move insns,
927
     copy addresses to registers (to make displacements shorter)
928
     and use post-increment if available.  */
929
  if (!(data.autinc_from && data.autinc_to)
930
      && move_by_pieces_ninsns (len, align, max_size) > 2)
931
    {
932
      /* Find the mode of the largest move...  */
933
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
934
           tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
935
        if (GET_MODE_SIZE (tmode) < max_size)
936
          mode = tmode;
937
 
938
      if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
939
        {
940
          data.from_addr = copy_to_mode_reg (from_addr_mode,
941
                                             plus_constant (from_addr, len));
942
          data.autinc_from = 1;
943
          data.explicit_inc_from = -1;
944
        }
945
      if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
946
        {
947
          data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
948
          data.autinc_from = 1;
949
          data.explicit_inc_from = 1;
950
        }
951
      if (!data.autinc_from && CONSTANT_P (from_addr))
952
        data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
953
      if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
954
        {
955
          data.to_addr = copy_to_mode_reg (to_addr_mode,
956
                                           plus_constant (to_addr, len));
957
          data.autinc_to = 1;
958
          data.explicit_inc_to = -1;
959
        }
960
      if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
961
        {
962
          data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
963
          data.autinc_to = 1;
964
          data.explicit_inc_to = 1;
965
        }
966
      if (!data.autinc_to && CONSTANT_P (to_addr))
967
        data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
968
    }
969
 
970
  tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
971
  if (align >= GET_MODE_ALIGNMENT (tmode))
972
    align = GET_MODE_ALIGNMENT (tmode);
973
  else
974
    {
975
      enum machine_mode xmode;
976
 
977
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
978
           tmode != VOIDmode;
979
           xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
980
        if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
981
            || SLOW_UNALIGNED_ACCESS (tmode, align))
982
          break;
983
 
984
      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
985
    }
986
 
987
  /* First move what we can in the largest integer mode, then go to
988
     successively smaller modes.  */
989
 
990
  while (max_size > 1)
991
    {
992
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
993
           tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
994
        if (GET_MODE_SIZE (tmode) < max_size)
995
          mode = tmode;
996
 
997
      if (mode == VOIDmode)
998
        break;
999
 
1000
      icode = optab_handler (mov_optab, mode)->insn_code;
1001
      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1002
        move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1003
 
1004
      max_size = GET_MODE_SIZE (mode);
1005
    }
1006
 
1007
  /* The code above should have handled everything.  */
1008
  gcc_assert (!data.len);
1009
 
1010
  if (endp)
1011
    {
1012
      rtx to1;
1013
 
1014
      gcc_assert (!data.reverse);
1015
      if (data.autinc_to)
1016
        {
1017
          if (endp == 2)
1018
            {
1019
              if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1020
                emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1021
              else
1022
                data.to_addr = copy_to_mode_reg (to_addr_mode,
1023
                                                 plus_constant (data.to_addr,
1024
                                                                -1));
1025
            }
1026
          to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1027
                                           data.offset);
1028
        }
1029
      else
1030
        {
1031
          if (endp == 2)
1032
            --data.offset;
1033
          to1 = adjust_address (data.to, QImode, data.offset);
1034
        }
1035
      return to1;
1036
    }
1037
  else
1038
    return data.to;
1039
}
1040
 
1041
/* Return number of insns required to move L bytes by pieces.
1042
   ALIGN (in bits) is maximum alignment we can assume.  */
1043
 
1044
static unsigned HOST_WIDE_INT
1045
move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1046
                       unsigned int max_size)
1047
{
1048
  unsigned HOST_WIDE_INT n_insns = 0;
1049
  enum machine_mode tmode;
1050
 
1051
  tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1052
  if (align >= GET_MODE_ALIGNMENT (tmode))
1053
    align = GET_MODE_ALIGNMENT (tmode);
1054
  else
1055
    {
1056
      enum machine_mode tmode, xmode;
1057
 
1058
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1059
           tmode != VOIDmode;
1060
           xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1061
        if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1062
            || SLOW_UNALIGNED_ACCESS (tmode, align))
1063
          break;
1064
 
1065
      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1066
    }
1067
 
1068
  while (max_size > 1)
1069
    {
1070
      enum machine_mode mode = VOIDmode;
1071
      enum insn_code icode;
1072
 
1073
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1074
           tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1075
        if (GET_MODE_SIZE (tmode) < max_size)
1076
          mode = tmode;
1077
 
1078
      if (mode == VOIDmode)
1079
        break;
1080
 
1081
      icode = optab_handler (mov_optab, mode)->insn_code;
1082
      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1083
        n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1084
 
1085
      max_size = GET_MODE_SIZE (mode);
1086
    }
1087
 
1088
  gcc_assert (!l);
1089
  return n_insns;
1090
}
1091
 
1092
/* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1093
   with move instructions for mode MODE.  GENFUN is the gen_... function
1094
   to make a move insn for that mode.  DATA has all the other info.  */
1095
 
1096
static void
1097
move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1098
                  struct move_by_pieces_d *data)
1099
{
1100
  unsigned int size = GET_MODE_SIZE (mode);
1101
  rtx to1 = NULL_RTX, from1;
1102
 
1103
  while (data->len >= size)
1104
    {
1105
      if (data->reverse)
1106
        data->offset -= size;
1107
 
1108
      if (data->to)
1109
        {
1110
          if (data->autinc_to)
1111
            to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1112
                                             data->offset);
1113
          else
1114
            to1 = adjust_address (data->to, mode, data->offset);
1115
        }
1116
 
1117
      if (data->autinc_from)
1118
        from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1119
                                           data->offset);
1120
      else
1121
        from1 = adjust_address (data->from, mode, data->offset);
1122
 
1123
      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1124
        emit_insn (gen_add2_insn (data->to_addr,
1125
                                  GEN_INT (-(HOST_WIDE_INT)size)));
1126
      if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1127
        emit_insn (gen_add2_insn (data->from_addr,
1128
                                  GEN_INT (-(HOST_WIDE_INT)size)));
1129
 
1130
      if (data->to)
1131
        emit_insn ((*genfun) (to1, from1));
1132
      else
1133
        {
1134
#ifdef PUSH_ROUNDING
1135
          emit_single_push_insn (mode, from1, NULL);
1136
#else
1137
          gcc_unreachable ();
1138
#endif
1139
        }
1140
 
1141
      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1142
        emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1143
      if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1144
        emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1145
 
1146
      if (! data->reverse)
1147
        data->offset += size;
1148
 
1149
      data->len -= size;
1150
    }
1151
}
1152
 
1153
/* Emit code to move a block Y to a block X.  This may be done with
1154
   string-move instructions, with multiple scalar move instructions,
1155
   or with a library call.
1156
 
1157
   Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1158
   SIZE is an rtx that says how long they are.
1159
   ALIGN is the maximum alignment we can assume they have.
1160
   METHOD describes what kind of copy this is, and what mechanisms may be used.
1161
 
1162
   Return the address of the new block, if memcpy is called and returns it,
1163
 
1164
 
1165
rtx
1166
emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1167
                       unsigned int expected_align, HOST_WIDE_INT expected_size)
1168
{
1169
  bool may_use_call;
1170
  rtx retval = 0;
1171
  unsigned int align;
1172
 
1173
  switch (method)
1174
    {
1175
    case BLOCK_OP_NORMAL:
1176
    case BLOCK_OP_TAILCALL:
1177
      may_use_call = true;
1178
      break;
1179
 
1180
    case BLOCK_OP_CALL_PARM:
1181
      may_use_call = block_move_libcall_safe_for_call_parm ();
1182
 
1183
      /* Make inhibit_defer_pop nonzero around the library call
1184
         to force it to pop the arguments right away.  */
1185
      NO_DEFER_POP;
1186
      break;
1187
 
1188
    case BLOCK_OP_NO_LIBCALL:
1189
      may_use_call = false;
1190
      break;
1191
 
1192
    default:
1193
      gcc_unreachable ();
1194
    }
1195
 
1196
  align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1197
  gcc_assert (align >= BITS_PER_UNIT);
1198
 
1199
  gcc_assert (MEM_P (x));
1200
  gcc_assert (MEM_P (y));
1201
  gcc_assert (size);
1202
 
1203
  /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1204
     block copy is more efficient for other large modes, e.g. DCmode.  */
1205
  x = adjust_address (x, BLKmode, 0);
1206
  y = adjust_address (y, BLKmode, 0);
1207
 
1208
  /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1209
     can be incorrect is coming from __builtin_memcpy.  */
1210
  if (CONST_INT_P (size))
1211
    {
1212
      if (INTVAL (size) == 0)
1213
        return 0;
1214
 
1215
      x = shallow_copy_rtx (x);
1216
      y = shallow_copy_rtx (y);
1217
      set_mem_size (x, size);
1218
      set_mem_size (y, size);
1219
    }
1220
 
1221
  if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1222
    move_by_pieces (x, y, INTVAL (size), align, 0);
1223
  else if (emit_block_move_via_movmem (x, y, size, align,
1224
                                       expected_align, expected_size))
1225
    ;
1226
  else if (may_use_call
1227
           && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1228
           && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1229
    retval = emit_block_move_via_libcall (x, y, size,
1230
                                          method == BLOCK_OP_TAILCALL);
1231
  else
1232
    emit_block_move_via_loop (x, y, size, align);
1233
 
1234
  if (method == BLOCK_OP_CALL_PARM)
1235
    OK_DEFER_POP;
1236
 
1237
  return retval;
1238
}
1239
 
1240
rtx
1241
emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1242
{
1243
  return emit_block_move_hints (x, y, size, method, 0, -1);
1244
}
1245
 
1246
/* A subroutine of emit_block_move.  Returns true if calling the
1247
   block move libcall will not clobber any parameters which may have
1248
   already been placed on the stack.  */
1249
 
1250
static bool
1251
block_move_libcall_safe_for_call_parm (void)
1252
{
1253
#if defined (REG_PARM_STACK_SPACE)
1254
  tree fn;
1255
#endif
1256
 
1257
  /* If arguments are pushed on the stack, then they're safe.  */
1258
  if (PUSH_ARGS)
1259
    return true;
1260
 
1261
  /* If registers go on the stack anyway, any argument is sure to clobber
1262
     an outgoing argument.  */
1263
#if defined (REG_PARM_STACK_SPACE)
1264
  fn = emit_block_move_libcall_fn (false);
1265
  if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1266
      && REG_PARM_STACK_SPACE (fn) != 0)
1267
    return false;
1268
#endif
1269
 
1270
  /* If any argument goes in memory, then it might clobber an outgoing
1271
     argument.  */
1272
  {
1273
    CUMULATIVE_ARGS args_so_far;
1274
    tree fn, arg;
1275
 
1276
    fn = emit_block_move_libcall_fn (false);
1277
    INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1278
 
1279
    arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1280
    for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1281
      {
1282
        enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1283
        rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1284
        if (!tmp || !REG_P (tmp))
1285
          return false;
1286
        if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1287
          return false;
1288
        FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1289
      }
1290
  }
1291
  return true;
1292
}
1293
 
1294
/* A subroutine of emit_block_move.  Expand a movmem pattern;
1295
   return true if successful.  */
1296
 
1297
static bool
1298
emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1299
                            unsigned int expected_align, HOST_WIDE_INT expected_size)
1300
{
1301
  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1302
  int save_volatile_ok = volatile_ok;
1303
  enum machine_mode mode;
1304
 
1305
  if (expected_align < align)
1306
    expected_align = align;
1307
 
1308
  /* Since this is a move insn, we don't care about volatility.  */
1309
  volatile_ok = 1;
1310
 
1311
  /* Try the most limited insn first, because there's no point
1312
     including more than one in the machine description unless
1313
     the more limited one has some advantage.  */
1314
 
1315
  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1316
       mode = GET_MODE_WIDER_MODE (mode))
1317
    {
1318
      enum insn_code code = movmem_optab[(int) mode];
1319
      insn_operand_predicate_fn pred;
1320
 
1321
      if (code != CODE_FOR_nothing
1322
          /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1323
             here because if SIZE is less than the mode mask, as it is
1324
             returned by the macro, it will definitely be less than the
1325
             actual mode mask.  */
1326
          && ((CONST_INT_P (size)
1327
               && ((unsigned HOST_WIDE_INT) INTVAL (size)
1328
                   <= (GET_MODE_MASK (mode) >> 1)))
1329
              || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1330
          && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1331
              || (*pred) (x, BLKmode))
1332
          && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1333
              || (*pred) (y, BLKmode))
1334
          && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1335
              || (*pred) (opalign, VOIDmode)))
1336
        {
1337
          rtx op2;
1338
          rtx last = get_last_insn ();
1339
          rtx pat;
1340
 
1341
          op2 = convert_to_mode (mode, size, 1);
1342
          pred = insn_data[(int) code].operand[2].predicate;
1343
          if (pred != 0 && ! (*pred) (op2, mode))
1344
            op2 = copy_to_mode_reg (mode, op2);
1345
 
1346
          /* ??? When called via emit_block_move_for_call, it'd be
1347
             nice if there were some way to inform the backend, so
1348
             that it doesn't fail the expansion because it thinks
1349
             emitting the libcall would be more efficient.  */
1350
 
1351
          if (insn_data[(int) code].n_operands == 4)
1352
            pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1353
          else
1354
            pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1355
                                        GEN_INT (expected_align
1356
                                                 / BITS_PER_UNIT),
1357
                                        GEN_INT (expected_size));
1358
          if (pat)
1359
            {
1360
              emit_insn (pat);
1361
              volatile_ok = save_volatile_ok;
1362
              return true;
1363
            }
1364
          else
1365
            delete_insns_since (last);
1366
        }
1367
    }
1368
 
1369
  volatile_ok = save_volatile_ok;
1370
  return false;
1371
}
1372
 
1373
/* A subroutine of emit_block_move.  Expand a call to memcpy.
1374
   Return the return value from memcpy, 0 otherwise.  */
1375
 
1376
rtx
1377
emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1378
{
1379
  rtx dst_addr, src_addr;
1380
  tree call_expr, fn, src_tree, dst_tree, size_tree;
1381
  enum machine_mode size_mode;
1382
  rtx retval;
1383
 
1384
  /* Emit code to copy the addresses of DST and SRC and SIZE into new
1385
     pseudos.  We can then place those new pseudos into a VAR_DECL and
1386
     use them later.  */
1387
 
1388
  dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1389
  src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1390
 
1391
  dst_addr = convert_memory_address (ptr_mode, dst_addr);
1392
  src_addr = convert_memory_address (ptr_mode, src_addr);
1393
 
1394
  dst_tree = make_tree (ptr_type_node, dst_addr);
1395
  src_tree = make_tree (ptr_type_node, src_addr);
1396
 
1397
  size_mode = TYPE_MODE (sizetype);
1398
 
1399
  size = convert_to_mode (size_mode, size, 1);
1400
  size = copy_to_mode_reg (size_mode, size);
1401
 
1402
  /* It is incorrect to use the libcall calling conventions to call
1403
     memcpy in this context.  This could be a user call to memcpy and
1404
     the user may wish to examine the return value from memcpy.  For
1405
     targets where libcalls and normal calls have different conventions
1406
     for returning pointers, we could end up generating incorrect code.  */
1407
 
1408
  size_tree = make_tree (sizetype, size);
1409
 
1410
  fn = emit_block_move_libcall_fn (true);
1411
  call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1412
  CALL_EXPR_TAILCALL (call_expr) = tailcall;
1413
 
1414
  retval = expand_normal (call_expr);
1415
 
1416
  return retval;
1417
}
1418
 
1419
/* A subroutine of emit_block_move_via_libcall.  Create the tree node
1420
   for the function we use for block copies.  The first time FOR_CALL
1421
   is true, we call assemble_external.  */
1422
 
1423
static GTY(()) tree block_move_fn;
1424
 
1425
void
1426
init_block_move_fn (const char *asmspec)
1427
{
1428
  if (!block_move_fn)
1429
    {
1430
      tree args, fn;
1431
 
1432
      fn = get_identifier ("memcpy");
1433
      args = build_function_type_list (ptr_type_node, ptr_type_node,
1434
                                       const_ptr_type_node, sizetype,
1435
                                       NULL_TREE);
1436
 
1437
      fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1438
      DECL_EXTERNAL (fn) = 1;
1439
      TREE_PUBLIC (fn) = 1;
1440
      DECL_ARTIFICIAL (fn) = 1;
1441
      TREE_NOTHROW (fn) = 1;
1442
      DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1443
      DECL_VISIBILITY_SPECIFIED (fn) = 1;
1444
 
1445
      block_move_fn = fn;
1446
    }
1447
 
1448
  if (asmspec)
1449
    set_user_assembler_name (block_move_fn, asmspec);
1450
}
1451
 
1452
static tree
1453
emit_block_move_libcall_fn (int for_call)
1454
{
1455
  static bool emitted_extern;
1456
 
1457
  if (!block_move_fn)
1458
    init_block_move_fn (NULL);
1459
 
1460
  if (for_call && !emitted_extern)
1461
    {
1462
      emitted_extern = true;
1463
      make_decl_rtl (block_move_fn);
1464
      assemble_external (block_move_fn);
1465
    }
1466
 
1467
  return block_move_fn;
1468
}
1469
 
1470
/* A subroutine of emit_block_move.  Copy the data via an explicit
1471
   loop.  This is used only when libcalls are forbidden.  */
1472
/* ??? It'd be nice to copy in hunks larger than QImode.  */
1473
 
1474
static void
1475
emit_block_move_via_loop (rtx x, rtx y, rtx size,
1476
                          unsigned int align ATTRIBUTE_UNUSED)
1477
{
1478
  rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1479
  enum machine_mode x_addr_mode
1480
    = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1481
  enum machine_mode y_addr_mode
1482
    = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1483
  enum machine_mode iter_mode;
1484
 
1485
  iter_mode = GET_MODE (size);
1486
  if (iter_mode == VOIDmode)
1487
    iter_mode = word_mode;
1488
 
1489
  top_label = gen_label_rtx ();
1490
  cmp_label = gen_label_rtx ();
1491
  iter = gen_reg_rtx (iter_mode);
1492
 
1493
  emit_move_insn (iter, const0_rtx);
1494
 
1495
  x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1496
  y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1497
  do_pending_stack_adjust ();
1498
 
1499
  emit_jump (cmp_label);
1500
  emit_label (top_label);
1501
 
1502
  tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1503
  x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1504
 
1505
  if (x_addr_mode != y_addr_mode)
1506
    tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1507
  y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1508
 
1509
  x = change_address (x, QImode, x_addr);
1510
  y = change_address (y, QImode, y_addr);
1511
 
1512
  emit_move_insn (x, y);
1513
 
1514
  tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1515
                             true, OPTAB_LIB_WIDEN);
1516
  if (tmp != iter)
1517
    emit_move_insn (iter, tmp);
1518
 
1519
  emit_label (cmp_label);
1520
 
1521
  emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1522
                           true, top_label);
1523
}
1524
 
1525
/* Copy all or part of a value X into registers starting at REGNO.
1526
   The number of registers to be filled is NREGS.  */
1527
 
1528
void
1529
move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1530
{
1531
  int i;
1532
#ifdef HAVE_load_multiple
1533
  rtx pat;
1534
  rtx last;
1535
#endif
1536
 
1537
  if (nregs == 0)
1538
    return;
1539
 
1540
  if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1541
    x = validize_mem (force_const_mem (mode, x));
1542
 
1543
  /* See if the machine can do this with a load multiple insn.  */
1544
#ifdef HAVE_load_multiple
1545
  if (HAVE_load_multiple)
1546
    {
1547
      last = get_last_insn ();
1548
      pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1549
                               GEN_INT (nregs));
1550
      if (pat)
1551
        {
1552
          emit_insn (pat);
1553
          return;
1554
        }
1555
      else
1556
        delete_insns_since (last);
1557
    }
1558
#endif
1559
 
1560
  for (i = 0; i < nregs; i++)
1561
    emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1562
                    operand_subword_force (x, i, mode));
1563
}
1564
 
1565
/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1566
   The number of registers to be filled is NREGS.  */
1567
 
1568
void
1569
move_block_from_reg (int regno, rtx x, int nregs)
1570
{
1571
  int i;
1572
 
1573
  if (nregs == 0)
1574
    return;
1575
 
1576
  /* See if the machine can do this with a store multiple insn.  */
1577
#ifdef HAVE_store_multiple
1578
  if (HAVE_store_multiple)
1579
    {
1580
      rtx last = get_last_insn ();
1581
      rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1582
                                    GEN_INT (nregs));
1583
      if (pat)
1584
        {
1585
          emit_insn (pat);
1586
          return;
1587
        }
1588
      else
1589
        delete_insns_since (last);
1590
    }
1591
#endif
1592
 
1593
  for (i = 0; i < nregs; i++)
1594
    {
1595
      rtx tem = operand_subword (x, i, 1, BLKmode);
1596
 
1597
      gcc_assert (tem);
1598
 
1599
      emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1600
    }
1601
}
1602
 
1603
/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1604
   ORIG, where ORIG is a non-consecutive group of registers represented by
1605
   a PARALLEL.  The clone is identical to the original except in that the
1606
   original set of registers is replaced by a new set of pseudo registers.
1607
   The new set has the same modes as the original set.  */
1608
 
1609
rtx
1610
gen_group_rtx (rtx orig)
1611
{
1612
  int i, length;
1613
  rtx *tmps;
1614
 
1615
  gcc_assert (GET_CODE (orig) == PARALLEL);
1616
 
1617
  length = XVECLEN (orig, 0);
1618
  tmps = XALLOCAVEC (rtx, length);
1619
 
1620
  /* Skip a NULL entry in first slot.  */
1621
  i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1622
 
1623
  if (i)
1624
    tmps[0] = 0;
1625
 
1626
  for (; i < length; i++)
1627
    {
1628
      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1629
      rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1630
 
1631
      tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1632
    }
1633
 
1634
  return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1635
}
1636
 
1637
/* A subroutine of emit_group_load.  Arguments as for emit_group_load,
1638
   except that values are placed in TMPS[i], and must later be moved
1639
   into corresponding XEXP (XVECEXP (DST, 0, i), 0) element.  */
1640
 
1641
static void
1642
emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1643
{
1644
  rtx src;
1645
  int start, i;
1646
  enum machine_mode m = GET_MODE (orig_src);
1647
 
1648
  gcc_assert (GET_CODE (dst) == PARALLEL);
1649
 
1650
  if (m != VOIDmode
1651
      && !SCALAR_INT_MODE_P (m)
1652
      && !MEM_P (orig_src)
1653
      && GET_CODE (orig_src) != CONCAT)
1654
    {
1655
      enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1656
      if (imode == BLKmode)
1657
        src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1658
      else
1659
        src = gen_reg_rtx (imode);
1660
      if (imode != BLKmode)
1661
        src = gen_lowpart (GET_MODE (orig_src), src);
1662
      emit_move_insn (src, orig_src);
1663
      /* ...and back again.  */
1664
      if (imode != BLKmode)
1665
        src = gen_lowpart (imode, src);
1666
      emit_group_load_1 (tmps, dst, src, type, ssize);
1667
      return;
1668
    }
1669
 
1670
  /* Check for a NULL entry, used to indicate that the parameter goes
1671
     both on the stack and in registers.  */
1672
  if (XEXP (XVECEXP (dst, 0, 0), 0))
1673
    start = 0;
1674
  else
1675
    start = 1;
1676
 
1677
  /* Process the pieces.  */
1678
  for (i = start; i < XVECLEN (dst, 0); i++)
1679
    {
1680
      enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1681
      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1682
      unsigned int bytelen = GET_MODE_SIZE (mode);
1683
      int shift = 0;
1684
 
1685
      /* Handle trailing fragments that run over the size of the struct.  */
1686
      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1687
        {
1688
          /* Arrange to shift the fragment to where it belongs.
1689
             extract_bit_field loads to the lsb of the reg.  */
1690
          if (
1691
#ifdef BLOCK_REG_PADDING
1692
              BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1693
              == (BYTES_BIG_ENDIAN ? upward : downward)
1694
#else
1695
              BYTES_BIG_ENDIAN
1696
#endif
1697
              )
1698
            shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1699
          bytelen = ssize - bytepos;
1700
          gcc_assert (bytelen > 0);
1701
        }
1702
 
1703
      /* If we won't be loading directly from memory, protect the real source
1704
         from strange tricks we might play; but make sure that the source can
1705
         be loaded directly into the destination.  */
1706
      src = orig_src;
1707
      if (!MEM_P (orig_src)
1708
          && (!CONSTANT_P (orig_src)
1709
              || (GET_MODE (orig_src) != mode
1710
                  && GET_MODE (orig_src) != VOIDmode)))
1711
        {
1712
          if (GET_MODE (orig_src) == VOIDmode)
1713
            src = gen_reg_rtx (mode);
1714
          else
1715
            src = gen_reg_rtx (GET_MODE (orig_src));
1716
 
1717
          emit_move_insn (src, orig_src);
1718
        }
1719
 
1720
      /* Optimize the access just a bit.  */
1721
      if (MEM_P (src)
1722
          && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1723
              || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1724
          && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1725
          && bytelen == GET_MODE_SIZE (mode))
1726
        {
1727
          tmps[i] = gen_reg_rtx (mode);
1728
          emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1729
        }
1730
      else if (COMPLEX_MODE_P (mode)
1731
               && GET_MODE (src) == mode
1732
               && bytelen == GET_MODE_SIZE (mode))
1733
        /* Let emit_move_complex do the bulk of the work.  */
1734
        tmps[i] = src;
1735
      else if (GET_CODE (src) == CONCAT)
1736
        {
1737
          unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1738
          unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1739
 
1740
          if ((bytepos == 0 && bytelen == slen0)
1741
              || (bytepos != 0 && bytepos + bytelen <= slen))
1742
            {
1743
              /* The following assumes that the concatenated objects all
1744
                 have the same size.  In this case, a simple calculation
1745
                 can be used to determine the object and the bit field
1746
                 to be extracted.  */
1747
              tmps[i] = XEXP (src, bytepos / slen0);
1748
              if (! CONSTANT_P (tmps[i])
1749
                  && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1750
                tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1751
                                             (bytepos % slen0) * BITS_PER_UNIT,
1752
                                             1, NULL_RTX, mode, mode);
1753
            }
1754
          else
1755
            {
1756
              rtx mem;
1757
 
1758
              gcc_assert (!bytepos);
1759
              mem = assign_stack_temp (GET_MODE (src), slen, 0);
1760
              emit_move_insn (mem, src);
1761
              tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1762
                                           0, 1, NULL_RTX, mode, mode);
1763
            }
1764
        }
1765
      /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1766
         SIMD register, which is currently broken.  While we get GCC
1767
         to emit proper RTL for these cases, let's dump to memory.  */
1768
      else if (VECTOR_MODE_P (GET_MODE (dst))
1769
               && REG_P (src))
1770
        {
1771
          int slen = GET_MODE_SIZE (GET_MODE (src));
1772
          rtx mem;
1773
 
1774
          mem = assign_stack_temp (GET_MODE (src), slen, 0);
1775
          emit_move_insn (mem, src);
1776
          tmps[i] = adjust_address (mem, mode, (int) bytepos);
1777
        }
1778
      else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1779
               && XVECLEN (dst, 0) > 1)
1780
        tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1781
      else if (CONSTANT_P (src))
1782
        {
1783
          HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1784
 
1785
          if (len == ssize)
1786
            tmps[i] = src;
1787
          else
1788
            {
1789
              rtx first, second;
1790
 
1791
              gcc_assert (2 * len == ssize);
1792
              split_double (src, &first, &second);
1793
              if (i)
1794
                tmps[i] = second;
1795
              else
1796
                tmps[i] = first;
1797
            }
1798
        }
1799
      else if (REG_P (src) && GET_MODE (src) == mode)
1800
        tmps[i] = src;
1801
      else
1802
        tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1803
                                     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1804
                                     mode, mode);
1805
 
1806
      if (shift)
1807
        tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1808
                                build_int_cst (NULL_TREE, shift), tmps[i], 0);
1809
    }
1810
}
1811
 
1812
/* Emit code to move a block SRC of type TYPE to a block DST,
1813
   where DST is non-consecutive registers represented by a PARALLEL.
1814
   SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1815
   if not known.  */
1816
 
1817
void
1818
emit_group_load (rtx dst, rtx src, tree type, int ssize)
1819
{
1820
  rtx *tmps;
1821
  int i;
1822
 
1823
  tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1824
  emit_group_load_1 (tmps, dst, src, type, ssize);
1825
 
1826
  /* Copy the extracted pieces into the proper (probable) hard regs.  */
1827
  for (i = 0; i < XVECLEN (dst, 0); i++)
1828
    {
1829
      rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1830
      if (d == NULL)
1831
        continue;
1832
      emit_move_insn (d, tmps[i]);
1833
    }
1834
}
1835
 
1836
/* Similar, but load SRC into new pseudos in a format that looks like
1837
   PARALLEL.  This can later be fed to emit_group_move to get things
1838
   in the right place.  */
1839
 
1840
rtx
1841
emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1842
{
1843
  rtvec vec;
1844
  int i;
1845
 
1846
  vec = rtvec_alloc (XVECLEN (parallel, 0));
1847
  emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1848
 
1849
  /* Convert the vector to look just like the original PARALLEL, except
1850
     with the computed values.  */
1851
  for (i = 0; i < XVECLEN (parallel, 0); i++)
1852
    {
1853
      rtx e = XVECEXP (parallel, 0, i);
1854
      rtx d = XEXP (e, 0);
1855
 
1856
      if (d)
1857
        {
1858
          d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1859
          e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1860
        }
1861
      RTVEC_ELT (vec, i) = e;
1862
    }
1863
 
1864
  return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1865
}
1866
 
1867
/* Emit code to move a block SRC to block DST, where SRC and DST are
1868
   non-consecutive groups of registers, each represented by a PARALLEL.  */
1869
 
1870
void
1871
emit_group_move (rtx dst, rtx src)
1872
{
1873
  int i;
1874
 
1875
  gcc_assert (GET_CODE (src) == PARALLEL
1876
              && GET_CODE (dst) == PARALLEL
1877
              && XVECLEN (src, 0) == XVECLEN (dst, 0));
1878
 
1879
  /* Skip first entry if NULL.  */
1880
  for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1881
    emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1882
                    XEXP (XVECEXP (src, 0, i), 0));
1883
}
1884
 
1885
/* Move a group of registers represented by a PARALLEL into pseudos.  */
1886
 
1887
rtx
1888
emit_group_move_into_temps (rtx src)
1889
{
1890
  rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1891
  int i;
1892
 
1893
  for (i = 0; i < XVECLEN (src, 0); i++)
1894
    {
1895
      rtx e = XVECEXP (src, 0, i);
1896
      rtx d = XEXP (e, 0);
1897
 
1898
      if (d)
1899
        e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1900
      RTVEC_ELT (vec, i) = e;
1901
    }
1902
 
1903
  return gen_rtx_PARALLEL (GET_MODE (src), vec);
1904
}
1905
 
1906
/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1907
   where SRC is non-consecutive registers represented by a PARALLEL.
1908
   SSIZE represents the total size of block ORIG_DST, or -1 if not
1909
   known.  */
1910
 
1911
void
1912
emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1913
{
1914
  rtx *tmps, dst;
1915
  int start, finish, i;
1916
  enum machine_mode m = GET_MODE (orig_dst);
1917
 
1918
  gcc_assert (GET_CODE (src) == PARALLEL);
1919
 
1920
  if (!SCALAR_INT_MODE_P (m)
1921
      && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1922
    {
1923
      enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1924
      if (imode == BLKmode)
1925
        dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1926
      else
1927
        dst = gen_reg_rtx (imode);
1928
      emit_group_store (dst, src, type, ssize);
1929
      if (imode != BLKmode)
1930
        dst = gen_lowpart (GET_MODE (orig_dst), dst);
1931
      emit_move_insn (orig_dst, dst);
1932
      return;
1933
    }
1934
 
1935
  /* Check for a NULL entry, used to indicate that the parameter goes
1936
     both on the stack and in registers.  */
1937
  if (XEXP (XVECEXP (src, 0, 0), 0))
1938
    start = 0;
1939
  else
1940
    start = 1;
1941
  finish = XVECLEN (src, 0);
1942
 
1943
  tmps = XALLOCAVEC (rtx, finish);
1944
 
1945
  /* Copy the (probable) hard regs into pseudos.  */
1946
  for (i = start; i < finish; i++)
1947
    {
1948
      rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1949
      if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1950
        {
1951
          tmps[i] = gen_reg_rtx (GET_MODE (reg));
1952
          emit_move_insn (tmps[i], reg);
1953
        }
1954
      else
1955
        tmps[i] = reg;
1956
    }
1957
 
1958
  /* If we won't be storing directly into memory, protect the real destination
1959
     from strange tricks we might play.  */
1960
  dst = orig_dst;
1961
  if (GET_CODE (dst) == PARALLEL)
1962
    {
1963
      rtx temp;
1964
 
1965
      /* We can get a PARALLEL dst if there is a conditional expression in
1966
         a return statement.  In that case, the dst and src are the same,
1967
         so no action is necessary.  */
1968
      if (rtx_equal_p (dst, src))
1969
        return;
1970
 
1971
      /* It is unclear if we can ever reach here, but we may as well handle
1972
         it.  Allocate a temporary, and split this into a store/load to/from
1973
         the temporary.  */
1974
 
1975
      temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1976
      emit_group_store (temp, src, type, ssize);
1977
      emit_group_load (dst, temp, type, ssize);
1978
      return;
1979
    }
1980
  else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1981
    {
1982
      enum machine_mode outer = GET_MODE (dst);
1983
      enum machine_mode inner;
1984
      HOST_WIDE_INT bytepos;
1985
      bool done = false;
1986
      rtx temp;
1987
 
1988
      if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1989
        dst = gen_reg_rtx (outer);
1990
 
1991
      /* Make life a bit easier for combine.  */
1992
      /* If the first element of the vector is the low part
1993
         of the destination mode, use a paradoxical subreg to
1994
         initialize the destination.  */
1995
      if (start < finish)
1996
        {
1997
          inner = GET_MODE (tmps[start]);
1998
          bytepos = subreg_lowpart_offset (inner, outer);
1999
          if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2000
            {
2001
              temp = simplify_gen_subreg (outer, tmps[start],
2002
                                          inner, 0);
2003
              if (temp)
2004
                {
2005
                  emit_move_insn (dst, temp);
2006
                  done = true;
2007
                  start++;
2008
                }
2009
            }
2010
        }
2011
 
2012
      /* If the first element wasn't the low part, try the last.  */
2013
      if (!done
2014
          && start < finish - 1)
2015
        {
2016
          inner = GET_MODE (tmps[finish - 1]);
2017
          bytepos = subreg_lowpart_offset (inner, outer);
2018
          if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2019
            {
2020
              temp = simplify_gen_subreg (outer, tmps[finish - 1],
2021
                                          inner, 0);
2022
              if (temp)
2023
                {
2024
                  emit_move_insn (dst, temp);
2025
                  done = true;
2026
                  finish--;
2027
                }
2028
            }
2029
        }
2030
 
2031
      /* Otherwise, simply initialize the result to zero.  */
2032
      if (!done)
2033
        emit_move_insn (dst, CONST0_RTX (outer));
2034
    }
2035
 
2036
  /* Process the pieces.  */
2037
  for (i = start; i < finish; i++)
2038
    {
2039
      HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2040
      enum machine_mode mode = GET_MODE (tmps[i]);
2041
      unsigned int bytelen = GET_MODE_SIZE (mode);
2042
      unsigned int adj_bytelen = bytelen;
2043
      rtx dest = dst;
2044
 
2045
      /* Handle trailing fragments that run over the size of the struct.  */
2046
      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2047
        adj_bytelen = ssize - bytepos;
2048
 
2049
      if (GET_CODE (dst) == CONCAT)
2050
        {
2051
          if (bytepos + adj_bytelen
2052
              <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2053
            dest = XEXP (dst, 0);
2054
          else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2055
            {
2056
              bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2057
              dest = XEXP (dst, 1);
2058
            }
2059
          else
2060
            {
2061
              enum machine_mode dest_mode = GET_MODE (dest);
2062
              enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2063
 
2064
              gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2065
 
2066
              if (GET_MODE_ALIGNMENT (dest_mode)
2067
                  >= GET_MODE_ALIGNMENT (tmp_mode))
2068
                {
2069
                  dest = assign_stack_temp (dest_mode,
2070
                                            GET_MODE_SIZE (dest_mode),
2071
                                            0);
2072
                  emit_move_insn (adjust_address (dest,
2073
                                                  tmp_mode,
2074
                                                  bytepos),
2075
                                  tmps[i]);
2076
                  dst = dest;
2077
                }
2078
              else
2079
                {
2080
                  dest = assign_stack_temp (tmp_mode,
2081
                                            GET_MODE_SIZE (tmp_mode),
2082
                                            0);
2083
                  emit_move_insn (dest, tmps[i]);
2084
                  dst = adjust_address (dest, dest_mode, bytepos);
2085
                }
2086
              break;
2087
            }
2088
        }
2089
 
2090
      if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2091
        {
2092
          /* store_bit_field always takes its value from the lsb.
2093
             Move the fragment to the lsb if it's not already there.  */
2094
          if (
2095
#ifdef BLOCK_REG_PADDING
2096
              BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2097
              == (BYTES_BIG_ENDIAN ? upward : downward)
2098
#else
2099
              BYTES_BIG_ENDIAN
2100
#endif
2101
              )
2102
            {
2103
              int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2104
              tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2105
                                      build_int_cst (NULL_TREE, shift),
2106
                                      tmps[i], 0);
2107
            }
2108
          bytelen = adj_bytelen;
2109
        }
2110
 
2111
      /* Optimize the access just a bit.  */
2112
      if (MEM_P (dest)
2113
          && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2114
              || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2115
          && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2116
          && bytelen == GET_MODE_SIZE (mode))
2117
        emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2118
      else
2119
        store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2120
                         mode, tmps[i]);
2121
    }
2122
 
2123
  /* Copy from the pseudo into the (probable) hard reg.  */
2124
  if (orig_dst != dst)
2125
    emit_move_insn (orig_dst, dst);
2126
}
2127
 
2128
/* Generate code to copy a BLKmode object of TYPE out of a
2129
   set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2130
   is null, a stack temporary is created.  TGTBLK is returned.
2131
 
2132
   The purpose of this routine is to handle functions that return
2133
   BLKmode structures in registers.  Some machines (the PA for example)
2134
   want to return all small structures in registers regardless of the
2135
   structure's alignment.  */
2136
 
2137
rtx
2138
copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2139
{
2140
  unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2141
  rtx src = NULL, dst = NULL;
2142
  unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2143
  unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2144
  enum machine_mode copy_mode;
2145
 
2146
  if (tgtblk == 0)
2147
    {
2148
      tgtblk = assign_temp (build_qualified_type (type,
2149
                                                  (TYPE_QUALS (type)
2150
                                                   | TYPE_QUAL_CONST)),
2151
                            0, 1, 1);
2152
      preserve_temp_slots (tgtblk);
2153
    }
2154
 
2155
  /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2156
     into a new pseudo which is a full word.  */
2157
 
2158
  if (GET_MODE (srcreg) != BLKmode
2159
      && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2160
    srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2161
 
2162
  /* If the structure doesn't take up a whole number of words, see whether
2163
     SRCREG is padded on the left or on the right.  If it's on the left,
2164
     set PADDING_CORRECTION to the number of bits to skip.
2165
 
2166
     In most ABIs, the structure will be returned at the least end of
2167
     the register, which translates to right padding on little-endian
2168
     targets and left padding on big-endian targets.  The opposite
2169
     holds if the structure is returned at the most significant
2170
     end of the register.  */
2171
  if (bytes % UNITS_PER_WORD != 0
2172
      && (targetm.calls.return_in_msb (type)
2173
          ? !BYTES_BIG_ENDIAN
2174
          : BYTES_BIG_ENDIAN))
2175
    padding_correction
2176
      = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2177
 
2178
  /* Copy the structure BITSIZE bits at a time.  If the target lives in
2179
     memory, take care of not reading/writing past its end by selecting
2180
     a copy mode suited to BITSIZE.  This should always be possible given
2181
     how it is computed.
2182
 
2183
     We could probably emit more efficient code for machines which do not use
2184
     strict alignment, but it doesn't seem worth the effort at the current
2185
     time.  */
2186
 
2187
  copy_mode = word_mode;
2188
  if (MEM_P (tgtblk))
2189
    {
2190
      enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2191
      if (mem_mode != BLKmode)
2192
        copy_mode = mem_mode;
2193
    }
2194
 
2195
  for (bitpos = 0, xbitpos = padding_correction;
2196
       bitpos < bytes * BITS_PER_UNIT;
2197
       bitpos += bitsize, xbitpos += bitsize)
2198
    {
2199
      /* We need a new source operand each time xbitpos is on a
2200
         word boundary and when xbitpos == padding_correction
2201
         (the first time through).  */
2202
      if (xbitpos % BITS_PER_WORD == 0
2203
          || xbitpos == padding_correction)
2204
        src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2205
                                     GET_MODE (srcreg));
2206
 
2207
      /* We need a new destination operand each time bitpos is on
2208
         a word boundary.  */
2209
      if (bitpos % BITS_PER_WORD == 0)
2210
        dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2211
 
2212
      /* Use xbitpos for the source extraction (right justified) and
2213
         bitpos for the destination store (left justified).  */
2214
      store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2215
                       extract_bit_field (src, bitsize,
2216
                                          xbitpos % BITS_PER_WORD, 1,
2217
                                          NULL_RTX, copy_mode, copy_mode));
2218
    }
2219
 
2220
  return tgtblk;
2221
}
2222
 
2223
/* Add a USE expression for REG to the (possibly empty) list pointed
2224
   to by CALL_FUSAGE.  REG must denote a hard register.  */
2225
 
2226
void
2227
use_reg (rtx *call_fusage, rtx reg)
2228
{
2229
  gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2230
 
2231
  *call_fusage
2232
    = gen_rtx_EXPR_LIST (VOIDmode,
2233
                         gen_rtx_USE (VOIDmode, reg), *call_fusage);
2234
}
2235
 
2236
/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2237
   starting at REGNO.  All of these registers must be hard registers.  */
2238
 
2239
void
2240
use_regs (rtx *call_fusage, int regno, int nregs)
2241
{
2242
  int i;
2243
 
2244
  gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2245
 
2246
  for (i = 0; i < nregs; i++)
2247
    use_reg (call_fusage, regno_reg_rtx[regno + i]);
2248
}
2249
 
2250
/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2251
   PARALLEL REGS.  This is for calls that pass values in multiple
2252
   non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2253
 
2254
void
2255
use_group_regs (rtx *call_fusage, rtx regs)
2256
{
2257
  int i;
2258
 
2259
  for (i = 0; i < XVECLEN (regs, 0); i++)
2260
    {
2261
      rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2262
 
2263
      /* A NULL entry means the parameter goes both on the stack and in
2264
         registers.  This can also be a MEM for targets that pass values
2265
         partially on the stack and partially in registers.  */
2266
      if (reg != 0 && REG_P (reg))
2267
        use_reg (call_fusage, reg);
2268
    }
2269
}
2270
 
2271
/* Return the defining gimple statement for SSA_NAME NAME if it is an
2272
   assigment and the code of the expresion on the RHS is CODE.  Return
2273
   NULL otherwise.  */
2274
 
2275
static gimple
2276
get_def_for_expr (tree name, enum tree_code code)
2277
{
2278
  gimple def_stmt;
2279
 
2280
  if (TREE_CODE (name) != SSA_NAME)
2281
    return NULL;
2282
 
2283
  def_stmt = get_gimple_for_ssa_name (name);
2284
  if (!def_stmt
2285
      || gimple_assign_rhs_code (def_stmt) != code)
2286
    return NULL;
2287
 
2288
  return def_stmt;
2289
}
2290
 
2291
 
2292
/* Determine whether the LEN bytes generated by CONSTFUN can be
2293
   stored to memory using several move instructions.  CONSTFUNDATA is
2294
   a pointer which will be passed as argument in every CONSTFUN call.
2295
   ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
2296
   a memset operation and false if it's a copy of a constant string.
2297
   Return nonzero if a call to store_by_pieces should succeed.  */
2298
 
2299
int
2300
can_store_by_pieces (unsigned HOST_WIDE_INT len,
2301
                     rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2302
                     void *constfundata, unsigned int align, bool memsetp)
2303
{
2304
  unsigned HOST_WIDE_INT l;
2305
  unsigned int max_size;
2306
  HOST_WIDE_INT offset = 0;
2307
  enum machine_mode mode, tmode;
2308
  enum insn_code icode;
2309
  int reverse;
2310
  rtx cst;
2311
 
2312
  if (len == 0)
2313
    return 1;
2314
 
2315
  if (! (memsetp
2316
         ? SET_BY_PIECES_P (len, align)
2317
         : STORE_BY_PIECES_P (len, align)))
2318
    return 0;
2319
 
2320
  tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2321
  if (align >= GET_MODE_ALIGNMENT (tmode))
2322
    align = GET_MODE_ALIGNMENT (tmode);
2323
  else
2324
    {
2325
      enum machine_mode xmode;
2326
 
2327
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2328
           tmode != VOIDmode;
2329
           xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2330
        if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2331
            || SLOW_UNALIGNED_ACCESS (tmode, align))
2332
          break;
2333
 
2334
      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2335
    }
2336
 
2337
  /* We would first store what we can in the largest integer mode, then go to
2338
     successively smaller modes.  */
2339
 
2340
  for (reverse = 0;
2341
       reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2342
       reverse++)
2343
    {
2344
      l = len;
2345
      mode = VOIDmode;
2346
      max_size = STORE_MAX_PIECES + 1;
2347
      while (max_size > 1)
2348
        {
2349
          for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2350
               tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2351
            if (GET_MODE_SIZE (tmode) < max_size)
2352
              mode = tmode;
2353
 
2354
          if (mode == VOIDmode)
2355
            break;
2356
 
2357
          icode = optab_handler (mov_optab, mode)->insn_code;
2358
          if (icode != CODE_FOR_nothing
2359
              && align >= GET_MODE_ALIGNMENT (mode))
2360
            {
2361
              unsigned int size = GET_MODE_SIZE (mode);
2362
 
2363
              while (l >= size)
2364
                {
2365
                  if (reverse)
2366
                    offset -= size;
2367
 
2368
                  cst = (*constfun) (constfundata, offset, mode);
2369
                  if (!LEGITIMATE_CONSTANT_P (cst))
2370
                    return 0;
2371
 
2372
                  if (!reverse)
2373
                    offset += size;
2374
 
2375
                  l -= size;
2376
                }
2377
            }
2378
 
2379
          max_size = GET_MODE_SIZE (mode);
2380
        }
2381
 
2382
      /* The code above should have handled everything.  */
2383
      gcc_assert (!l);
2384
    }
2385
 
2386
  return 1;
2387
}
2388
 
2389
/* Generate several move instructions to store LEN bytes generated by
2390
   CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2391
   pointer which will be passed as argument in every CONSTFUN call.
2392
   ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
2393
   a memset operation and false if it's a copy of a constant string.
2394
   If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2395
   mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2396
   stpcpy.  */
2397
 
2398
rtx
2399
store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2400
                 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2401
                 void *constfundata, unsigned int align, bool memsetp, int endp)
2402
{
2403
  enum machine_mode to_addr_mode
2404
    = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2405
  struct store_by_pieces_d data;
2406
 
2407
  if (len == 0)
2408
    {
2409
      gcc_assert (endp != 2);
2410
      return to;
2411
    }
2412
 
2413
  gcc_assert (memsetp
2414
              ? SET_BY_PIECES_P (len, align)
2415
              : STORE_BY_PIECES_P (len, align));
2416
  data.constfun = constfun;
2417
  data.constfundata = constfundata;
2418
  data.len = len;
2419
  data.to = to;
2420
  store_by_pieces_1 (&data, align);
2421
  if (endp)
2422
    {
2423
      rtx to1;
2424
 
2425
      gcc_assert (!data.reverse);
2426
      if (data.autinc_to)
2427
        {
2428
          if (endp == 2)
2429
            {
2430
              if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2431
                emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2432
              else
2433
                data.to_addr = copy_to_mode_reg (to_addr_mode,
2434
                                                 plus_constant (data.to_addr,
2435
                                                                -1));
2436
            }
2437
          to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2438
                                           data.offset);
2439
        }
2440
      else
2441
        {
2442
          if (endp == 2)
2443
            --data.offset;
2444
          to1 = adjust_address (data.to, QImode, data.offset);
2445
        }
2446
      return to1;
2447
    }
2448
  else
2449
    return data.to;
2450
}
2451
 
2452
/* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2453
   rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2454
 
2455
static void
2456
clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2457
{
2458
  struct store_by_pieces_d data;
2459
 
2460
  if (len == 0)
2461
    return;
2462
 
2463
  data.constfun = clear_by_pieces_1;
2464
  data.constfundata = NULL;
2465
  data.len = len;
2466
  data.to = to;
2467
  store_by_pieces_1 (&data, align);
2468
}
2469
 
2470
/* Callback routine for clear_by_pieces.
2471
   Return const0_rtx unconditionally.  */
2472
 
2473
static rtx
2474
clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2475
                   HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2476
                   enum machine_mode mode ATTRIBUTE_UNUSED)
2477
{
2478
  return const0_rtx;
2479
}
2480
 
2481
/* Subroutine of clear_by_pieces and store_by_pieces.
2482
   Generate several move instructions to store LEN bytes of block TO.  (A MEM
2483
   rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2484
 
2485
static void
2486
store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2487
                   unsigned int align ATTRIBUTE_UNUSED)
2488
{
2489
  enum machine_mode to_addr_mode
2490
    = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2491
  rtx to_addr = XEXP (data->to, 0);
2492
  unsigned int max_size = STORE_MAX_PIECES + 1;
2493
  enum machine_mode mode = VOIDmode, tmode;
2494
  enum insn_code icode;
2495
 
2496
  data->offset = 0;
2497
  data->to_addr = to_addr;
2498
  data->autinc_to
2499
    = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2500
       || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2501
 
2502
  data->explicit_inc_to = 0;
2503
  data->reverse
2504
    = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2505
  if (data->reverse)
2506
    data->offset = data->len;
2507
 
2508
  /* If storing requires more than two move insns,
2509
     copy addresses to registers (to make displacements shorter)
2510
     and use post-increment if available.  */
2511
  if (!data->autinc_to
2512
      && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2513
    {
2514
      /* Determine the main mode we'll be using.  */
2515
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2516
           tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2517
        if (GET_MODE_SIZE (tmode) < max_size)
2518
          mode = tmode;
2519
 
2520
      if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2521
        {
2522
          data->to_addr = copy_to_mode_reg (to_addr_mode,
2523
                                            plus_constant (to_addr, data->len));
2524
          data->autinc_to = 1;
2525
          data->explicit_inc_to = -1;
2526
        }
2527
 
2528
      if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2529
          && ! data->autinc_to)
2530
        {
2531
          data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2532
          data->autinc_to = 1;
2533
          data->explicit_inc_to = 1;
2534
        }
2535
 
2536
      if ( !data->autinc_to && CONSTANT_P (to_addr))
2537
        data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2538
    }
2539
 
2540
  tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2541
  if (align >= GET_MODE_ALIGNMENT (tmode))
2542
    align = GET_MODE_ALIGNMENT (tmode);
2543
  else
2544
    {
2545
      enum machine_mode xmode;
2546
 
2547
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2548
           tmode != VOIDmode;
2549
           xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2550
        if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2551
            || SLOW_UNALIGNED_ACCESS (tmode, align))
2552
          break;
2553
 
2554
      align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2555
    }
2556
 
2557
  /* First store what we can in the largest integer mode, then go to
2558
     successively smaller modes.  */
2559
 
2560
  while (max_size > 1)
2561
    {
2562
      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2563
           tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2564
        if (GET_MODE_SIZE (tmode) < max_size)
2565
          mode = tmode;
2566
 
2567
      if (mode == VOIDmode)
2568
        break;
2569
 
2570
      icode = optab_handler (mov_optab, mode)->insn_code;
2571
      if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2572
        store_by_pieces_2 (GEN_FCN (icode), mode, data);
2573
 
2574
      max_size = GET_MODE_SIZE (mode);
2575
    }
2576
 
2577
  /* The code above should have handled everything.  */
2578
  gcc_assert (!data->len);
2579
}
2580
 
2581
/* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2582
   with move instructions for mode MODE.  GENFUN is the gen_... function
2583
   to make a move insn for that mode.  DATA has all the other info.  */
2584
 
2585
static void
2586
store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2587
                   struct store_by_pieces_d *data)
2588
{
2589
  unsigned int size = GET_MODE_SIZE (mode);
2590
  rtx to1, cst;
2591
 
2592
  while (data->len >= size)
2593
    {
2594
      if (data->reverse)
2595
        data->offset -= size;
2596
 
2597
      if (data->autinc_to)
2598
        to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2599
                                         data->offset);
2600
      else
2601
        to1 = adjust_address (data->to, mode, data->offset);
2602
 
2603
      if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2604
        emit_insn (gen_add2_insn (data->to_addr,
2605
                                  GEN_INT (-(HOST_WIDE_INT) size)));
2606
 
2607
      cst = (*data->constfun) (data->constfundata, data->offset, mode);
2608
      emit_insn ((*genfun) (to1, cst));
2609
 
2610
      if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2611
        emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2612
 
2613
      if (! data->reverse)
2614
        data->offset += size;
2615
 
2616
      data->len -= size;
2617
    }
2618
}
2619
 
2620
/* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2621
   its length in bytes.  */
2622
 
2623
rtx
2624
clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2625
                     unsigned int expected_align, HOST_WIDE_INT expected_size)
2626
{
2627
  enum machine_mode mode = GET_MODE (object);
2628
  unsigned int align;
2629
 
2630
  gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2631
 
2632
  /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2633
     just move a zero.  Otherwise, do this a piece at a time.  */
2634
  if (mode != BLKmode
2635
      && CONST_INT_P (size)
2636
      && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2637
    {
2638
      rtx zero = CONST0_RTX (mode);
2639
      if (zero != NULL)
2640
        {
2641
          emit_move_insn (object, zero);
2642
          return NULL;
2643
        }
2644
 
2645
      if (COMPLEX_MODE_P (mode))
2646
        {
2647
          zero = CONST0_RTX (GET_MODE_INNER (mode));
2648
          if (zero != NULL)
2649
            {
2650
              write_complex_part (object, zero, 0);
2651
              write_complex_part (object, zero, 1);
2652
              return NULL;
2653
            }
2654
        }
2655
    }
2656
 
2657
  if (size == const0_rtx)
2658
    return NULL;
2659
 
2660
  align = MEM_ALIGN (object);
2661
 
2662
  if (CONST_INT_P (size)
2663
      && CLEAR_BY_PIECES_P (INTVAL (size), align))
2664
    clear_by_pieces (object, INTVAL (size), align);
2665
  else if (set_storage_via_setmem (object, size, const0_rtx, align,
2666
                                   expected_align, expected_size))
2667
    ;
2668
  else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2669
    return set_storage_via_libcall (object, size, const0_rtx,
2670
                                    method == BLOCK_OP_TAILCALL);
2671
  else
2672
    gcc_unreachable ();
2673
 
2674
  return NULL;
2675
}
2676
 
2677
rtx
2678
clear_storage (rtx object, rtx size, enum block_op_methods method)
2679
{
2680
  return clear_storage_hints (object, size, method, 0, -1);
2681
}
2682
 
2683
 
2684
/* A subroutine of clear_storage.  Expand a call to memset.
2685
   Return the return value of memset, 0 otherwise.  */
2686
 
2687
rtx
2688
set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2689
{
2690
  tree call_expr, fn, object_tree, size_tree, val_tree;
2691
  enum machine_mode size_mode;
2692
  rtx retval;
2693
 
2694
  /* Emit code to copy OBJECT and SIZE into new pseudos.  We can then
2695
     place those into new pseudos into a VAR_DECL and use them later.  */
2696
 
2697
  object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2698
 
2699
  size_mode = TYPE_MODE (sizetype);
2700
  size = convert_to_mode (size_mode, size, 1);
2701
  size = copy_to_mode_reg (size_mode, size);
2702
 
2703
  /* It is incorrect to use the libcall calling conventions to call
2704
     memset in this context.  This could be a user call to memset and
2705
     the user may wish to examine the return value from memset.  For
2706
     targets where libcalls and normal calls have different conventions
2707
     for returning pointers, we could end up generating incorrect code.  */
2708
 
2709
  object_tree = make_tree (ptr_type_node, object);
2710
  if (!CONST_INT_P (val))
2711
    val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2712
  size_tree = make_tree (sizetype, size);
2713
  val_tree = make_tree (integer_type_node, val);
2714
 
2715
  fn = clear_storage_libcall_fn (true);
2716
  call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2717
  CALL_EXPR_TAILCALL (call_expr) = tailcall;
2718
 
2719
  retval = expand_normal (call_expr);
2720
 
2721
  return retval;
2722
}
2723
 
2724
/* A subroutine of set_storage_via_libcall.  Create the tree node
2725
   for the function we use for block clears.  The first time FOR_CALL
2726
   is true, we call assemble_external.  */
2727
 
2728
tree block_clear_fn;
2729
 
2730
void
2731
init_block_clear_fn (const char *asmspec)
2732
{
2733
  if (!block_clear_fn)
2734
    {
2735
      tree fn, args;
2736
 
2737
      fn = get_identifier ("memset");
2738
      args = build_function_type_list (ptr_type_node, ptr_type_node,
2739
                                       integer_type_node, sizetype,
2740
                                       NULL_TREE);
2741
 
2742
      fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2743
      DECL_EXTERNAL (fn) = 1;
2744
      TREE_PUBLIC (fn) = 1;
2745
      DECL_ARTIFICIAL (fn) = 1;
2746
      TREE_NOTHROW (fn) = 1;
2747
      DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2748
      DECL_VISIBILITY_SPECIFIED (fn) = 1;
2749
 
2750
      block_clear_fn = fn;
2751
    }
2752
 
2753
  if (asmspec)
2754
    set_user_assembler_name (block_clear_fn, asmspec);
2755
}
2756
 
2757
static tree
2758
clear_storage_libcall_fn (int for_call)
2759
{
2760
  static bool emitted_extern;
2761
 
2762
  if (!block_clear_fn)
2763
    init_block_clear_fn (NULL);
2764
 
2765
  if (for_call && !emitted_extern)
2766
    {
2767
      emitted_extern = true;
2768
      make_decl_rtl (block_clear_fn);
2769
      assemble_external (block_clear_fn);
2770
    }
2771
 
2772
  return block_clear_fn;
2773
}
2774
 
2775
/* Expand a setmem pattern; return true if successful.  */
2776
 
2777
bool
2778
set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2779
                        unsigned int expected_align, HOST_WIDE_INT expected_size)
2780
{
2781
  /* Try the most limited insn first, because there's no point
2782
     including more than one in the machine description unless
2783
     the more limited one has some advantage.  */
2784
 
2785
  rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2786
  enum machine_mode mode;
2787
 
2788
  if (expected_align < align)
2789
    expected_align = align;
2790
 
2791
  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2792
       mode = GET_MODE_WIDER_MODE (mode))
2793
    {
2794
      enum insn_code code = setmem_optab[(int) mode];
2795
      insn_operand_predicate_fn pred;
2796
 
2797
      if (code != CODE_FOR_nothing
2798
          /* We don't need MODE to be narrower than
2799
             BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2800
             the mode mask, as it is returned by the macro, it will
2801
             definitely be less than the actual mode mask.  */
2802
          && ((CONST_INT_P (size)
2803
               && ((unsigned HOST_WIDE_INT) INTVAL (size)
2804
                   <= (GET_MODE_MASK (mode) >> 1)))
2805
              || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2806
          && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2807
              || (*pred) (object, BLKmode))
2808
          && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2809
              || (*pred) (opalign, VOIDmode)))
2810
        {
2811
          rtx opsize, opchar;
2812
          enum machine_mode char_mode;
2813
          rtx last = get_last_insn ();
2814
          rtx pat;
2815
 
2816
          opsize = convert_to_mode (mode, size, 1);
2817
          pred = insn_data[(int) code].operand[1].predicate;
2818
          if (pred != 0 && ! (*pred) (opsize, mode))
2819
            opsize = copy_to_mode_reg (mode, opsize);
2820
 
2821
          opchar = val;
2822
          char_mode = insn_data[(int) code].operand[2].mode;
2823
          if (char_mode != VOIDmode)
2824
            {
2825
              opchar = convert_to_mode (char_mode, opchar, 1);
2826
              pred = insn_data[(int) code].operand[2].predicate;
2827
              if (pred != 0 && ! (*pred) (opchar, char_mode))
2828
                opchar = copy_to_mode_reg (char_mode, opchar);
2829
            }
2830
 
2831
          if (insn_data[(int) code].n_operands == 4)
2832
            pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2833
          else
2834
            pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2835
                                        GEN_INT (expected_align
2836
                                                 / BITS_PER_UNIT),
2837
                                        GEN_INT (expected_size));
2838
          if (pat)
2839
            {
2840
              emit_insn (pat);
2841
              return true;
2842
            }
2843
          else
2844
            delete_insns_since (last);
2845
        }
2846
    }
2847
 
2848
  return false;
2849
}
2850
 
2851
 
2852
/* Write to one of the components of the complex value CPLX.  Write VAL to
2853
   the real part if IMAG_P is false, and the imaginary part if its true.  */
2854
 
2855
static void
2856
write_complex_part (rtx cplx, rtx val, bool imag_p)
2857
{
2858
  enum machine_mode cmode;
2859
  enum machine_mode imode;
2860
  unsigned ibitsize;
2861
 
2862
  if (GET_CODE (cplx) == CONCAT)
2863
    {
2864
      emit_move_insn (XEXP (cplx, imag_p), val);
2865
      return;
2866
    }
2867
 
2868
  cmode = GET_MODE (cplx);
2869
  imode = GET_MODE_INNER (cmode);
2870
  ibitsize = GET_MODE_BITSIZE (imode);
2871
 
2872
  /* For MEMs simplify_gen_subreg may generate an invalid new address
2873
     because, e.g., the original address is considered mode-dependent
2874
     by the target, which restricts simplify_subreg from invoking
2875
     adjust_address_nv.  Instead of preparing fallback support for an
2876
     invalid address, we call adjust_address_nv directly.  */
2877
  if (MEM_P (cplx))
2878
    {
2879
      emit_move_insn (adjust_address_nv (cplx, imode,
2880
                                         imag_p ? GET_MODE_SIZE (imode) : 0),
2881
                      val);
2882
      return;
2883
    }
2884
 
2885
  /* If the sub-object is at least word sized, then we know that subregging
2886
     will work.  This special case is important, since store_bit_field
2887
     wants to operate on integer modes, and there's rarely an OImode to
2888
     correspond to TCmode.  */
2889
  if (ibitsize >= BITS_PER_WORD
2890
      /* For hard regs we have exact predicates.  Assume we can split
2891
         the original object if it spans an even number of hard regs.
2892
         This special case is important for SCmode on 64-bit platforms
2893
         where the natural size of floating-point regs is 32-bit.  */
2894
      || (REG_P (cplx)
2895
          && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2896
          && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2897
    {
2898
      rtx part = simplify_gen_subreg (imode, cplx, cmode,
2899
                                      imag_p ? GET_MODE_SIZE (imode) : 0);
2900
      if (part)
2901
        {
2902
          emit_move_insn (part, val);
2903
          return;
2904
        }
2905
      else
2906
        /* simplify_gen_subreg may fail for sub-word MEMs.  */
2907
        gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2908
    }
2909
 
2910
  store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2911
}
2912
 
2913
/* Extract one of the components of the complex value CPLX.  Extract the
2914
   real part if IMAG_P is false, and the imaginary part if it's true.  */
2915
 
2916
static rtx
2917
read_complex_part (rtx cplx, bool imag_p)
2918
{
2919
  enum machine_mode cmode, imode;
2920
  unsigned ibitsize;
2921
 
2922
  if (GET_CODE (cplx) == CONCAT)
2923
    return XEXP (cplx, imag_p);
2924
 
2925
  cmode = GET_MODE (cplx);
2926
  imode = GET_MODE_INNER (cmode);
2927
  ibitsize = GET_MODE_BITSIZE (imode);
2928
 
2929
  /* Special case reads from complex constants that got spilled to memory.  */
2930
  if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2931
    {
2932
      tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2933
      if (decl && TREE_CODE (decl) == COMPLEX_CST)
2934
        {
2935
          tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2936
          if (CONSTANT_CLASS_P (part))
2937
            return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2938
        }
2939
    }
2940
 
2941
  /* For MEMs simplify_gen_subreg may generate an invalid new address
2942
     because, e.g., the original address is considered mode-dependent
2943
     by the target, which restricts simplify_subreg from invoking
2944
     adjust_address_nv.  Instead of preparing fallback support for an
2945
     invalid address, we call adjust_address_nv directly.  */
2946
  if (MEM_P (cplx))
2947
    return adjust_address_nv (cplx, imode,
2948
                              imag_p ? GET_MODE_SIZE (imode) : 0);
2949
 
2950
  /* If the sub-object is at least word sized, then we know that subregging
2951
     will work.  This special case is important, since extract_bit_field
2952
     wants to operate on integer modes, and there's rarely an OImode to
2953
     correspond to TCmode.  */
2954
  if (ibitsize >= BITS_PER_WORD
2955
      /* For hard regs we have exact predicates.  Assume we can split
2956
         the original object if it spans an even number of hard regs.
2957
         This special case is important for SCmode on 64-bit platforms
2958
         where the natural size of floating-point regs is 32-bit.  */
2959
      || (REG_P (cplx)
2960
          && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2961
          && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2962
    {
2963
      rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2964
                                     imag_p ? GET_MODE_SIZE (imode) : 0);
2965
      if (ret)
2966
        return ret;
2967
      else
2968
        /* simplify_gen_subreg may fail for sub-word MEMs.  */
2969
        gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2970
    }
2971
 
2972
  return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2973
                            true, NULL_RTX, imode, imode);
2974
}
2975
 
2976
/* A subroutine of emit_move_insn_1.  Yet another lowpart generator.
2977
   NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
2978
   represented in NEW_MODE.  If FORCE is true, this will never happen, as
2979
   we'll force-create a SUBREG if needed.  */
2980
 
2981
static rtx
2982
emit_move_change_mode (enum machine_mode new_mode,
2983
                       enum machine_mode old_mode, rtx x, bool force)
2984
{
2985
  rtx ret;
2986
 
2987
  if (push_operand (x, GET_MODE (x)))
2988
    {
2989
      ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2990
      MEM_COPY_ATTRIBUTES (ret, x);
2991
    }
2992
  else if (MEM_P (x))
2993
    {
2994
      /* We don't have to worry about changing the address since the
2995
         size in bytes is supposed to be the same.  */
2996
      if (reload_in_progress)
2997
        {
2998
          /* Copy the MEM to change the mode and move any
2999
             substitutions from the old MEM to the new one.  */
3000
          ret = adjust_address_nv (x, new_mode, 0);
3001
          copy_replacements (x, ret);
3002
        }
3003
      else
3004
        ret = adjust_address (x, new_mode, 0);
3005
    }
3006
  else
3007
    {
3008
      /* Note that we do want simplify_subreg's behavior of validating
3009
         that the new mode is ok for a hard register.  If we were to use
3010
         simplify_gen_subreg, we would create the subreg, but would
3011
         probably run into the target not being able to implement it.  */
3012
      /* Except, of course, when FORCE is true, when this is exactly what
3013
         we want.  Which is needed for CCmodes on some targets.  */
3014
      if (force)
3015
        ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3016
      else
3017
        ret = simplify_subreg (new_mode, x, old_mode, 0);
3018
    }
3019
 
3020
  return ret;
3021
}
3022
 
3023
/* A subroutine of emit_move_insn_1.  Generate a move from Y into X using
3024
   an integer mode of the same size as MODE.  Returns the instruction
3025
   emitted, or NULL if such a move could not be generated.  */
3026
 
3027
static rtx
3028
emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3029
{
3030
  enum machine_mode imode;
3031
  enum insn_code code;
3032
 
3033
  /* There must exist a mode of the exact size we require.  */
3034
  imode = int_mode_for_mode (mode);
3035
  if (imode == BLKmode)
3036
    return NULL_RTX;
3037
 
3038
  /* The target must support moves in this mode.  */
3039
  code = optab_handler (mov_optab, imode)->insn_code;
3040
  if (code == CODE_FOR_nothing)
3041
    return NULL_RTX;
3042
 
3043
  x = emit_move_change_mode (imode, mode, x, force);
3044
  if (x == NULL_RTX)
3045
    return NULL_RTX;
3046
  y = emit_move_change_mode (imode, mode, y, force);
3047
  if (y == NULL_RTX)
3048
    return NULL_RTX;
3049
  return emit_insn (GEN_FCN (code) (x, y));
3050
}
3051
 
3052
/* A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
3053
   Return an equivalent MEM that does not use an auto-increment.  */
3054
 
3055
static rtx
3056
emit_move_resolve_push (enum machine_mode mode, rtx x)
3057
{
3058
  enum rtx_code code = GET_CODE (XEXP (x, 0));
3059
  HOST_WIDE_INT adjust;
3060
  rtx temp;
3061
 
3062
  adjust = GET_MODE_SIZE (mode);
3063
#ifdef PUSH_ROUNDING
3064
  adjust = PUSH_ROUNDING (adjust);
3065
#endif
3066
  if (code == PRE_DEC || code == POST_DEC)
3067
    adjust = -adjust;
3068
  else if (code == PRE_MODIFY || code == POST_MODIFY)
3069
    {
3070
      rtx expr = XEXP (XEXP (x, 0), 1);
3071
      HOST_WIDE_INT val;
3072
 
3073
      gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3074
      gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3075
      val = INTVAL (XEXP (expr, 1));
3076
      if (GET_CODE (expr) == MINUS)
3077
        val = -val;
3078
      gcc_assert (adjust == val || adjust == -val);
3079
      adjust = val;
3080
    }
3081
 
3082
  /* Do not use anti_adjust_stack, since we don't want to update
3083
     stack_pointer_delta.  */
3084
  temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3085
                              GEN_INT (adjust), stack_pointer_rtx,
3086
                              0, OPTAB_LIB_WIDEN);
3087
  if (temp != stack_pointer_rtx)
3088
    emit_move_insn (stack_pointer_rtx, temp);
3089
 
3090
  switch (code)
3091
    {
3092
    case PRE_INC:
3093
    case PRE_DEC:
3094
    case PRE_MODIFY:
3095
      temp = stack_pointer_rtx;
3096
      break;
3097
    case POST_INC:
3098
    case POST_DEC:
3099
    case POST_MODIFY:
3100
      temp = plus_constant (stack_pointer_rtx, -adjust);
3101
      break;
3102
    default:
3103
      gcc_unreachable ();
3104
    }
3105
 
3106
  return replace_equiv_address (x, temp);
3107
}
3108
 
3109
/* A subroutine of emit_move_complex.  Generate a move from Y into X.
3110
   X is known to satisfy push_operand, and MODE is known to be complex.
3111
   Returns the last instruction emitted.  */
3112
 
3113
rtx
3114
emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3115
{
3116
  enum machine_mode submode = GET_MODE_INNER (mode);
3117
  bool imag_first;
3118
 
3119
#ifdef PUSH_ROUNDING
3120
  unsigned int submodesize = GET_MODE_SIZE (submode);
3121
 
3122
  /* In case we output to the stack, but the size is smaller than the
3123
     machine can push exactly, we need to use move instructions.  */
3124
  if (PUSH_ROUNDING (submodesize) != submodesize)
3125
    {
3126
      x = emit_move_resolve_push (mode, x);
3127
      return emit_move_insn (x, y);
3128
    }
3129
#endif
3130
 
3131
  /* Note that the real part always precedes the imag part in memory
3132
     regardless of machine's endianness.  */
3133
  switch (GET_CODE (XEXP (x, 0)))
3134
    {
3135
    case PRE_DEC:
3136
    case POST_DEC:
3137
      imag_first = true;
3138
      break;
3139
    case PRE_INC:
3140
    case POST_INC:
3141
      imag_first = false;
3142
      break;
3143
    default:
3144
      gcc_unreachable ();
3145
    }
3146
 
3147
  emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3148
                  read_complex_part (y, imag_first));
3149
  return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3150
                         read_complex_part (y, !imag_first));
3151
}
3152
 
3153
/* A subroutine of emit_move_complex.  Perform the move from Y to X
3154
   via two moves of the parts.  Returns the last instruction emitted.  */
3155
 
3156
rtx
3157
emit_move_complex_parts (rtx x, rtx y)
3158
{
3159
  /* Show the output dies here.  This is necessary for SUBREGs
3160
     of pseudos since we cannot track their lifetimes correctly;
3161
     hard regs shouldn't appear here except as return values.  */
3162
  if (!reload_completed && !reload_in_progress
3163
      && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3164
    emit_clobber (x);
3165
 
3166
  write_complex_part (x, read_complex_part (y, false), false);
3167
  write_complex_part (x, read_complex_part (y, true), true);
3168
 
3169
  return get_last_insn ();
3170
}
3171
 
3172
/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3173
   MODE is known to be complex.  Returns the last instruction emitted.  */
3174
 
3175
static rtx
3176
emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3177
{
3178
  bool try_int;
3179
 
3180
  /* Need to take special care for pushes, to maintain proper ordering
3181
     of the data, and possibly extra padding.  */
3182
  if (push_operand (x, mode))
3183
    return emit_move_complex_push (mode, x, y);
3184
 
3185
  /* See if we can coerce the target into moving both values at once.  */
3186
 
3187
  /* Move floating point as parts.  */
3188
  if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3189
      && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3190
    try_int = false;
3191
  /* Not possible if the values are inherently not adjacent.  */
3192
  else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3193
    try_int = false;
3194
  /* Is possible if both are registers (or subregs of registers).  */
3195
  else if (register_operand (x, mode) && register_operand (y, mode))
3196
    try_int = true;
3197
  /* If one of the operands is a memory, and alignment constraints
3198
     are friendly enough, we may be able to do combined memory operations.
3199
     We do not attempt this if Y is a constant because that combination is
3200
     usually better with the by-parts thing below.  */
3201
  else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3202
           && (!STRICT_ALIGNMENT
3203
               || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3204
    try_int = true;
3205
  else
3206
    try_int = false;
3207
 
3208
  if (try_int)
3209
    {
3210
      rtx ret;
3211
 
3212
      /* For memory to memory moves, optimal behavior can be had with the
3213
         existing block move logic.  */
3214
      if (MEM_P (x) && MEM_P (y))
3215
        {
3216
          emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3217
                           BLOCK_OP_NO_LIBCALL);
3218
          return get_last_insn ();
3219
        }
3220
 
3221
      ret = emit_move_via_integer (mode, x, y, true);
3222
      if (ret)
3223
        return ret;
3224
    }
3225
 
3226
  return emit_move_complex_parts (x, y);
3227
}
3228
 
3229
/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3230
   MODE is known to be MODE_CC.  Returns the last instruction emitted.  */
3231
 
3232
static rtx
3233
emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3234
{
3235
  rtx ret;
3236
 
3237
  /* Assume all MODE_CC modes are equivalent; if we have movcc, use it.  */
3238
  if (mode != CCmode)
3239
    {
3240
      enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3241
      if (code != CODE_FOR_nothing)
3242
        {
3243
          x = emit_move_change_mode (CCmode, mode, x, true);
3244
          y = emit_move_change_mode (CCmode, mode, y, true);
3245
          return emit_insn (GEN_FCN (code) (x, y));
3246
        }
3247
    }
3248
 
3249
  /* Otherwise, find the MODE_INT mode of the same width.  */
3250
  ret = emit_move_via_integer (mode, x, y, false);
3251
  gcc_assert (ret != NULL);
3252
  return ret;
3253
}
3254
 
3255
/* Return true if word I of OP lies entirely in the
3256
   undefined bits of a paradoxical subreg.  */
3257
 
3258
static bool
3259
undefined_operand_subword_p (const_rtx op, int i)
3260
{
3261
  enum machine_mode innermode, innermostmode;
3262
  int offset;
3263
  if (GET_CODE (op) != SUBREG)
3264
    return false;
3265
  innermode = GET_MODE (op);
3266
  innermostmode = GET_MODE (SUBREG_REG (op));
3267
  offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3268
  /* The SUBREG_BYTE represents offset, as if the value were stored in
3269
     memory, except for a paradoxical subreg where we define
3270
     SUBREG_BYTE to be 0; undo this exception as in
3271
     simplify_subreg.  */
3272
  if (SUBREG_BYTE (op) == 0
3273
      && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3274
    {
3275
      int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3276
      if (WORDS_BIG_ENDIAN)
3277
        offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3278
      if (BYTES_BIG_ENDIAN)
3279
        offset += difference % UNITS_PER_WORD;
3280
    }
3281
  if (offset >= GET_MODE_SIZE (innermostmode)
3282
      || offset <= -GET_MODE_SIZE (word_mode))
3283
    return true;
3284
  return false;
3285
}
3286
 
3287
/* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3288
   MODE is any multi-word or full-word mode that lacks a move_insn
3289
   pattern.  Note that you will get better code if you define such
3290
   patterns, even if they must turn into multiple assembler instructions.  */
3291
 
3292
static rtx
3293
emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3294
{
3295
  rtx last_insn = 0;
3296
  rtx seq, inner;
3297
  bool need_clobber;
3298
  int i;
3299
 
3300
  gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3301
 
3302
  /* If X is a push on the stack, do the push now and replace
3303
     X with a reference to the stack pointer.  */
3304
  if (push_operand (x, mode))
3305
    x = emit_move_resolve_push (mode, x);
3306
 
3307
  /* If we are in reload, see if either operand is a MEM whose address
3308
     is scheduled for replacement.  */
3309
  if (reload_in_progress && MEM_P (x)
3310
      && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3311
    x = replace_equiv_address_nv (x, inner);
3312
  if (reload_in_progress && MEM_P (y)
3313
      && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3314
    y = replace_equiv_address_nv (y, inner);
3315
 
3316
  start_sequence ();
3317
 
3318
  need_clobber = false;
3319
  for (i = 0;
3320
       i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3321
       i++)
3322
    {
3323
      rtx xpart = operand_subword (x, i, 1, mode);
3324
      rtx ypart;
3325
 
3326
      /* Do not generate code for a move if it would come entirely
3327
         from the undefined bits of a paradoxical subreg.  */
3328
      if (undefined_operand_subword_p (y, i))
3329
        continue;
3330
 
3331
      ypart = operand_subword (y, i, 1, mode);
3332
 
3333
      /* If we can't get a part of Y, put Y into memory if it is a
3334
         constant.  Otherwise, force it into a register.  Then we must
3335
         be able to get a part of Y.  */
3336
      if (ypart == 0 && CONSTANT_P (y))
3337
        {
3338
          y = use_anchored_address (force_const_mem (mode, y));
3339
          ypart = operand_subword (y, i, 1, mode);
3340
        }
3341
      else if (ypart == 0)
3342
        ypart = operand_subword_force (y, i, mode);
3343
 
3344
      gcc_assert (xpart && ypart);
3345
 
3346
      need_clobber |= (GET_CODE (xpart) == SUBREG);
3347
 
3348
      last_insn = emit_move_insn (xpart, ypart);
3349
    }
3350
 
3351
  seq = get_insns ();
3352
  end_sequence ();
3353
 
3354
  /* Show the output dies here.  This is necessary for SUBREGs
3355
     of pseudos since we cannot track their lifetimes correctly;
3356
     hard regs shouldn't appear here except as return values.
3357
     We never want to emit such a clobber after reload.  */
3358
  if (x != y
3359
      && ! (reload_in_progress || reload_completed)
3360
      && need_clobber != 0)
3361
    emit_clobber (x);
3362
 
3363
  emit_insn (seq);
3364
 
3365
  return last_insn;
3366
}
3367
 
3368
/* Low level part of emit_move_insn.
3369
   Called just like emit_move_insn, but assumes X and Y
3370
   are basically valid.  */
3371
 
3372
rtx
3373
emit_move_insn_1 (rtx x, rtx y)
3374
{
3375
  enum machine_mode mode = GET_MODE (x);
3376
  enum insn_code code;
3377
 
3378
  gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3379
 
3380
  code = optab_handler (mov_optab, mode)->insn_code;
3381
  if (code != CODE_FOR_nothing)
3382
    return emit_insn (GEN_FCN (code) (x, y));
3383
 
3384
  /* Expand complex moves by moving real part and imag part.  */
3385
  if (COMPLEX_MODE_P (mode))
3386
    return emit_move_complex (mode, x, y);
3387
 
3388
  if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3389
      || ALL_FIXED_POINT_MODE_P (mode))
3390
    {
3391
      rtx result = emit_move_via_integer (mode, x, y, true);
3392
 
3393
      /* If we can't find an integer mode, use multi words.  */
3394
      if (result)
3395
        return result;
3396
      else
3397
        return emit_move_multi_word (mode, x, y);
3398
    }
3399
 
3400
  if (GET_MODE_CLASS (mode) == MODE_CC)
3401
    return emit_move_ccmode (mode, x, y);
3402
 
3403
  /* Try using a move pattern for the corresponding integer mode.  This is
3404
     only safe when simplify_subreg can convert MODE constants into integer
3405
     constants.  At present, it can only do this reliably if the value
3406
     fits within a HOST_WIDE_INT.  */
3407
  if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3408
    {
3409
      rtx ret = emit_move_via_integer (mode, x, y, false);
3410
      if (ret)
3411
        return ret;
3412
    }
3413
 
3414
  return emit_move_multi_word (mode, x, y);
3415
}
3416
 
3417
/* Generate code to copy Y into X.
3418
   Both Y and X must have the same mode, except that
3419
   Y can be a constant with VOIDmode.
3420
   This mode cannot be BLKmode; use emit_block_move for that.
3421
 
3422
   Return the last instruction emitted.  */
3423
 
3424
rtx
3425
emit_move_insn (rtx x, rtx y)
3426
{
3427
  enum machine_mode mode = GET_MODE (x);
3428
  rtx y_cst = NULL_RTX;
3429
  rtx last_insn, set;
3430
 
3431
  gcc_assert (mode != BLKmode
3432
              && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3433
 
3434
  if (CONSTANT_P (y))
3435
    {
3436
      if (optimize
3437
          && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3438
          && (last_insn = compress_float_constant (x, y)))
3439
        return last_insn;
3440
 
3441
      y_cst = y;
3442
 
3443
      if (!LEGITIMATE_CONSTANT_P (y))
3444
        {
3445
          y = force_const_mem (mode, y);
3446
 
3447
          /* If the target's cannot_force_const_mem prevented the spill,
3448
             assume that the target's move expanders will also take care
3449
             of the non-legitimate constant.  */
3450
          if (!y)
3451
            y = y_cst;
3452
          else
3453
            y = use_anchored_address (y);
3454
        }
3455
    }
3456
 
3457
  /* If X or Y are memory references, verify that their addresses are valid
3458
     for the machine.  */
3459
  if (MEM_P (x)
3460
      && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3461
                                         MEM_ADDR_SPACE (x))
3462
          && ! push_operand (x, GET_MODE (x))))
3463
    x = validize_mem (x);
3464
 
3465
  if (MEM_P (y)
3466
      && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3467
                                        MEM_ADDR_SPACE (y)))
3468
    y = validize_mem (y);
3469
 
3470
  gcc_assert (mode != BLKmode);
3471
 
3472
  last_insn = emit_move_insn_1 (x, y);
3473
 
3474
  if (y_cst && REG_P (x)
3475
      && (set = single_set (last_insn)) != NULL_RTX
3476
      && SET_DEST (set) == x
3477
      && ! rtx_equal_p (y_cst, SET_SRC (set)))
3478
    set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3479
 
3480
  return last_insn;
3481
}
3482
 
3483
/* If Y is representable exactly in a narrower mode, and the target can
3484
   perform the extension directly from constant or memory, then emit the
3485
   move as an extension.  */
3486
 
3487
static rtx
3488
compress_float_constant (rtx x, rtx y)
3489
{
3490
  enum machine_mode dstmode = GET_MODE (x);
3491
  enum machine_mode orig_srcmode = GET_MODE (y);
3492
  enum machine_mode srcmode;
3493
  REAL_VALUE_TYPE r;
3494
  int oldcost, newcost;
3495
  bool speed = optimize_insn_for_speed_p ();
3496
 
3497
  REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3498
 
3499
  if (LEGITIMATE_CONSTANT_P (y))
3500
    oldcost = rtx_cost (y, SET, speed);
3501
  else
3502
    oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3503
 
3504
  for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3505
       srcmode != orig_srcmode;
3506
       srcmode = GET_MODE_WIDER_MODE (srcmode))
3507
    {
3508
      enum insn_code ic;
3509
      rtx trunc_y, last_insn;
3510
 
3511
      /* Skip if the target can't extend this way.  */
3512
      ic = can_extend_p (dstmode, srcmode, 0);
3513
      if (ic == CODE_FOR_nothing)
3514
        continue;
3515
 
3516
      /* Skip if the narrowed value isn't exact.  */
3517
      if (! exact_real_truncate (srcmode, &r))
3518
        continue;
3519
 
3520
      trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3521
 
3522
      if (LEGITIMATE_CONSTANT_P (trunc_y))
3523
        {
3524
          /* Skip if the target needs extra instructions to perform
3525
             the extension.  */
3526
          if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3527
            continue;
3528
          /* This is valid, but may not be cheaper than the original. */
3529
          newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3530
          if (oldcost < newcost)
3531
            continue;
3532
        }
3533
      else if (float_extend_from_mem[dstmode][srcmode])
3534
        {
3535
          trunc_y = force_const_mem (srcmode, trunc_y);
3536
          /* This is valid, but may not be cheaper than the original. */
3537
          newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3538
          if (oldcost < newcost)
3539
            continue;
3540
          trunc_y = validize_mem (trunc_y);
3541
        }
3542
      else
3543
        continue;
3544
 
3545
      /* For CSE's benefit, force the compressed constant pool entry
3546
         into a new pseudo.  This constant may be used in different modes,
3547
         and if not, combine will put things back together for us.  */
3548
      trunc_y = force_reg (srcmode, trunc_y);
3549
      emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3550
      last_insn = get_last_insn ();
3551
 
3552
      if (REG_P (x))
3553
        set_unique_reg_note (last_insn, REG_EQUAL, y);
3554
 
3555
      return last_insn;
3556
    }
3557
 
3558
  return NULL_RTX;
3559
}
3560
 
3561
/* Pushing data onto the stack.  */
3562
 
3563
/* Push a block of length SIZE (perhaps variable)
3564
   and return an rtx to address the beginning of the block.
3565
   The value may be virtual_outgoing_args_rtx.
3566
 
3567
   EXTRA is the number of bytes of padding to push in addition to SIZE.
3568
   BELOW nonzero means this padding comes at low addresses;
3569
   otherwise, the padding comes at high addresses.  */
3570
 
3571
rtx
3572
push_block (rtx size, int extra, int below)
3573
{
3574
  rtx temp;
3575
 
3576
  size = convert_modes (Pmode, ptr_mode, size, 1);
3577
  if (CONSTANT_P (size))
3578
    anti_adjust_stack (plus_constant (size, extra));
3579
  else if (REG_P (size) && extra == 0)
3580
    anti_adjust_stack (size);
3581
  else
3582
    {
3583
      temp = copy_to_mode_reg (Pmode, size);
3584
      if (extra != 0)
3585
        temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3586
                             temp, 0, OPTAB_LIB_WIDEN);
3587
      anti_adjust_stack (temp);
3588
    }
3589
 
3590
#ifndef STACK_GROWS_DOWNWARD
3591
  if (0)
3592
#else
3593
  if (1)
3594
#endif
3595
    {
3596
      temp = virtual_outgoing_args_rtx;
3597
      if (extra != 0 && below)
3598
        temp = plus_constant (temp, extra);
3599
    }
3600
  else
3601
    {
3602
      if (CONST_INT_P (size))
3603
        temp = plus_constant (virtual_outgoing_args_rtx,
3604
                              -INTVAL (size) - (below ? 0 : extra));
3605
      else if (extra != 0 && !below)
3606
        temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3607
                             negate_rtx (Pmode, plus_constant (size, extra)));
3608
      else
3609
        temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3610
                             negate_rtx (Pmode, size));
3611
    }
3612
 
3613
  return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3614
}
3615
 
3616
#ifdef PUSH_ROUNDING
3617
 
3618
/* Emit single push insn.  */
3619
 
3620
static void
3621
emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3622
{
3623
  rtx dest_addr;
3624
  unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3625
  rtx dest;
3626
  enum insn_code icode;
3627
  insn_operand_predicate_fn pred;
3628
 
3629
  stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3630
  /* If there is push pattern, use it.  Otherwise try old way of throwing
3631
     MEM representing push operation to move expander.  */
3632
  icode = optab_handler (push_optab, mode)->insn_code;
3633
  if (icode != CODE_FOR_nothing)
3634
    {
3635
      if (((pred = insn_data[(int) icode].operand[0].predicate)
3636
           && !((*pred) (x, mode))))
3637
        x = force_reg (mode, x);
3638
      emit_insn (GEN_FCN (icode) (x));
3639
      return;
3640
    }
3641
  if (GET_MODE_SIZE (mode) == rounded_size)
3642
    dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3643
  /* If we are to pad downward, adjust the stack pointer first and
3644
     then store X into the stack location using an offset.  This is
3645
     because emit_move_insn does not know how to pad; it does not have
3646
     access to type.  */
3647
  else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3648
    {
3649
      unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3650
      HOST_WIDE_INT offset;
3651
 
3652
      emit_move_insn (stack_pointer_rtx,
3653
                      expand_binop (Pmode,
3654
#ifdef STACK_GROWS_DOWNWARD
3655
                                    sub_optab,
3656
#else
3657
                                    add_optab,
3658
#endif
3659
                                    stack_pointer_rtx,
3660
                                    GEN_INT (rounded_size),
3661
                                    NULL_RTX, 0, OPTAB_LIB_WIDEN));
3662
 
3663
      offset = (HOST_WIDE_INT) padding_size;
3664
#ifdef STACK_GROWS_DOWNWARD
3665
      if (STACK_PUSH_CODE == POST_DEC)
3666
        /* We have already decremented the stack pointer, so get the
3667
           previous value.  */
3668
        offset += (HOST_WIDE_INT) rounded_size;
3669
#else
3670
      if (STACK_PUSH_CODE == POST_INC)
3671
        /* We have already incremented the stack pointer, so get the
3672
           previous value.  */
3673
        offset -= (HOST_WIDE_INT) rounded_size;
3674
#endif
3675
      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3676
    }
3677
  else
3678
    {
3679
#ifdef STACK_GROWS_DOWNWARD
3680
      /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
3681
      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3682
                                GEN_INT (-(HOST_WIDE_INT) rounded_size));
3683
#else
3684
      /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
3685
      dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3686
                                GEN_INT (rounded_size));
3687
#endif
3688
      dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3689
    }
3690
 
3691
  dest = gen_rtx_MEM (mode, dest_addr);
3692
 
3693
  if (type != 0)
3694
    {
3695
      set_mem_attributes (dest, type, 1);
3696
 
3697
      if (flag_optimize_sibling_calls)
3698
        /* Function incoming arguments may overlap with sibling call
3699
           outgoing arguments and we cannot allow reordering of reads
3700
           from function arguments with stores to outgoing arguments
3701
           of sibling calls.  */
3702
        set_mem_alias_set (dest, 0);
3703
    }
3704
  emit_move_insn (dest, x);
3705
}
3706
#endif
3707
 
3708
/* Generate code to push X onto the stack, assuming it has mode MODE and
3709
   type TYPE.
3710
   MODE is redundant except when X is a CONST_INT (since they don't
3711
   carry mode info).
3712
   SIZE is an rtx for the size of data to be copied (in bytes),
3713
   needed only if X is BLKmode.
3714
 
3715
   ALIGN (in bits) is maximum alignment we can assume.
3716
 
3717
   If PARTIAL and REG are both nonzero, then copy that many of the first
3718
   bytes of X into registers starting with REG, and push the rest of X.
3719
   The amount of space pushed is decreased by PARTIAL bytes.
3720
   REG must be a hard register in this case.
3721
   If REG is zero but PARTIAL is not, take any all others actions for an
3722
   argument partially in registers, but do not actually load any
3723
   registers.
3724
 
3725
   EXTRA is the amount in bytes of extra space to leave next to this arg.
3726
   This is ignored if an argument block has already been allocated.
3727
 
3728
   On a machine that lacks real push insns, ARGS_ADDR is the address of
3729
   the bottom of the argument block for this call.  We use indexing off there
3730
   to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
3731
   argument block has not been preallocated.
3732
 
3733
   ARGS_SO_FAR is the size of args previously pushed for this call.
3734
 
3735
   REG_PARM_STACK_SPACE is nonzero if functions require stack space
3736
   for arguments passed in registers.  If nonzero, it will be the number
3737
   of bytes required.  */
3738
 
3739
void
3740
emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3741
                unsigned int align, int partial, rtx reg, int extra,
3742
                rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3743
                rtx alignment_pad)
3744
{
3745
  rtx xinner;
3746
  enum direction stack_direction
3747
#ifdef STACK_GROWS_DOWNWARD
3748
    = downward;
3749
#else
3750
    = upward;
3751
#endif
3752
 
3753
  /* Decide where to pad the argument: `downward' for below,
3754
     `upward' for above, or `none' for don't pad it.
3755
     Default is below for small data on big-endian machines; else above.  */
3756
  enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3757
 
3758
  /* Invert direction if stack is post-decrement.
3759
     FIXME: why?  */
3760
  if (STACK_PUSH_CODE == POST_DEC)
3761
    if (where_pad != none)
3762
      where_pad = (where_pad == downward ? upward : downward);
3763
 
3764
  xinner = x;
3765
 
3766
  if (mode == BLKmode
3767
      || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3768
    {
3769
      /* Copy a block into the stack, entirely or partially.  */
3770
 
3771
      rtx temp;
3772
      int used;
3773
      int offset;
3774
      int skip;
3775
 
3776
      offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3777
      used = partial - offset;
3778
 
3779
      if (mode != BLKmode)
3780
        {
3781
          /* A value is to be stored in an insufficiently aligned
3782
             stack slot; copy via a suitably aligned slot if
3783
             necessary.  */
3784
          size = GEN_INT (GET_MODE_SIZE (mode));
3785
          if (!MEM_P (xinner))
3786
            {
3787
              temp = assign_temp (type, 0, 1, 1);
3788
              emit_move_insn (temp, xinner);
3789
              xinner = temp;
3790
            }
3791
        }
3792
 
3793
      gcc_assert (size);
3794
 
3795
      /* USED is now the # of bytes we need not copy to the stack
3796
         because registers will take care of them.  */
3797
 
3798
      if (partial != 0)
3799
        xinner = adjust_address (xinner, BLKmode, used);
3800
 
3801
      /* If the partial register-part of the arg counts in its stack size,
3802
         skip the part of stack space corresponding to the registers.
3803
         Otherwise, start copying to the beginning of the stack space,
3804
         by setting SKIP to 0.  */
3805
      skip = (reg_parm_stack_space == 0) ? 0 : used;
3806
 
3807
#ifdef PUSH_ROUNDING
3808
      /* Do it with several push insns if that doesn't take lots of insns
3809
         and if there is no difficulty with push insns that skip bytes
3810
         on the stack for alignment purposes.  */
3811
      if (args_addr == 0
3812
          && PUSH_ARGS
3813
          && CONST_INT_P (size)
3814
          && skip == 0
3815
          && MEM_ALIGN (xinner) >= align
3816
          && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3817
          /* Here we avoid the case of a structure whose weak alignment
3818
             forces many pushes of a small amount of data,
3819
             and such small pushes do rounding that causes trouble.  */
3820
          && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3821
              || align >= BIGGEST_ALIGNMENT
3822
              || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3823
                  == (align / BITS_PER_UNIT)))
3824
          && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3825
        {
3826
          /* Push padding now if padding above and stack grows down,
3827
             or if padding below and stack grows up.
3828
             But if space already allocated, this has already been done.  */
3829
          if (extra && args_addr == 0
3830
              && where_pad != none && where_pad != stack_direction)
3831
            anti_adjust_stack (GEN_INT (extra));
3832
 
3833
          move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3834
        }
3835
      else
3836
#endif /* PUSH_ROUNDING  */
3837
        {
3838
          rtx target;
3839
 
3840
          /* Otherwise make space on the stack and copy the data
3841
             to the address of that space.  */
3842
 
3843
          /* Deduct words put into registers from the size we must copy.  */
3844
          if (partial != 0)
3845
            {
3846
              if (CONST_INT_P (size))
3847
                size = GEN_INT (INTVAL (size) - used);
3848
              else
3849
                size = expand_binop (GET_MODE (size), sub_optab, size,
3850
                                     GEN_INT (used), NULL_RTX, 0,
3851
                                     OPTAB_LIB_WIDEN);
3852
            }
3853
 
3854
          /* Get the address of the stack space.
3855
             In this case, we do not deal with EXTRA separately.
3856
             A single stack adjust will do.  */
3857
          if (! args_addr)
3858
            {
3859
              temp = push_block (size, extra, where_pad == downward);
3860
              extra = 0;
3861
            }
3862
          else if (CONST_INT_P (args_so_far))
3863
            temp = memory_address (BLKmode,
3864
                                   plus_constant (args_addr,
3865
                                                  skip + INTVAL (args_so_far)));
3866
          else
3867
            temp = memory_address (BLKmode,
3868
                                   plus_constant (gen_rtx_PLUS (Pmode,
3869
                                                                args_addr,
3870
                                                                args_so_far),
3871
                                                  skip));
3872
 
3873
          if (!ACCUMULATE_OUTGOING_ARGS)
3874
            {
3875
              /* If the source is referenced relative to the stack pointer,
3876
                 copy it to another register to stabilize it.  We do not need
3877
                 to do this if we know that we won't be changing sp.  */
3878
 
3879
              if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3880
                  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3881
                temp = copy_to_reg (temp);
3882
            }
3883
 
3884
          target = gen_rtx_MEM (BLKmode, temp);
3885
 
3886
          /* We do *not* set_mem_attributes here, because incoming arguments
3887
             may overlap with sibling call outgoing arguments and we cannot
3888
             allow reordering of reads from function arguments with stores
3889
             to outgoing arguments of sibling calls.  We do, however, want
3890
             to record the alignment of the stack slot.  */
3891
          /* ALIGN may well be better aligned than TYPE, e.g. due to
3892
             PARM_BOUNDARY.  Assume the caller isn't lying.  */
3893
          set_mem_align (target, align);
3894
 
3895
          emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3896
        }
3897
    }
3898
  else if (partial > 0)
3899
    {
3900
      /* Scalar partly in registers.  */
3901
 
3902
      int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3903
      int i;
3904
      int not_stack;
3905
      /* # bytes of start of argument
3906
         that we must make space for but need not store.  */
3907
      int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3908
      int args_offset = INTVAL (args_so_far);
3909
      int skip;
3910
 
3911
      /* Push padding now if padding above and stack grows down,
3912
         or if padding below and stack grows up.
3913
         But if space already allocated, this has already been done.  */
3914
      if (extra && args_addr == 0
3915
          && where_pad != none && where_pad != stack_direction)
3916
        anti_adjust_stack (GEN_INT (extra));
3917
 
3918
      /* If we make space by pushing it, we might as well push
3919
         the real data.  Otherwise, we can leave OFFSET nonzero
3920
         and leave the space uninitialized.  */
3921
      if (args_addr == 0)
3922
        offset = 0;
3923
 
3924
      /* Now NOT_STACK gets the number of words that we don't need to
3925
         allocate on the stack.  Convert OFFSET to words too.  */
3926
      not_stack = (partial - offset) / UNITS_PER_WORD;
3927
      offset /= UNITS_PER_WORD;
3928
 
3929
      /* If the partial register-part of the arg counts in its stack size,
3930
         skip the part of stack space corresponding to the registers.
3931
         Otherwise, start copying to the beginning of the stack space,
3932
         by setting SKIP to 0.  */
3933
      skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3934
 
3935
      if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3936
        x = validize_mem (force_const_mem (mode, x));
3937
 
3938
      /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3939
         SUBREGs of such registers are not allowed.  */
3940
      if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3941
           && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3942
        x = copy_to_reg (x);
3943
 
3944
      /* Loop over all the words allocated on the stack for this arg.  */
3945
      /* We can do it by words, because any scalar bigger than a word
3946
         has a size a multiple of a word.  */
3947
#ifndef PUSH_ARGS_REVERSED
3948
      for (i = not_stack; i < size; i++)
3949
#else
3950
      for (i = size - 1; i >= not_stack; i--)
3951
#endif
3952
        if (i >= not_stack + offset)
3953
          emit_push_insn (operand_subword_force (x, i, mode),
3954
                          word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3955
                          0, args_addr,
3956
                          GEN_INT (args_offset + ((i - not_stack + skip)
3957
                                                  * UNITS_PER_WORD)),
3958
                          reg_parm_stack_space, alignment_pad);
3959
    }
3960
  else
3961
    {
3962
      rtx addr;
3963
      rtx dest;
3964
 
3965
      /* Push padding now if padding above and stack grows down,
3966
         or if padding below and stack grows up.
3967
         But if space already allocated, this has already been done.  */
3968
      if (extra && args_addr == 0
3969
          && where_pad != none && where_pad != stack_direction)
3970
        anti_adjust_stack (GEN_INT (extra));
3971
 
3972
#ifdef PUSH_ROUNDING
3973
      if (args_addr == 0 && PUSH_ARGS)
3974
        emit_single_push_insn (mode, x, type);
3975
      else
3976
#endif
3977
        {
3978
          if (CONST_INT_P (args_so_far))
3979
            addr
3980
              = memory_address (mode,
3981
                                plus_constant (args_addr,
3982
                                               INTVAL (args_so_far)));
3983
          else
3984
            addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3985
                                                       args_so_far));
3986
          dest = gen_rtx_MEM (mode, addr);
3987
 
3988
          /* We do *not* set_mem_attributes here, because incoming arguments
3989
             may overlap with sibling call outgoing arguments and we cannot
3990
             allow reordering of reads from function arguments with stores
3991
             to outgoing arguments of sibling calls.  We do, however, want
3992
             to record the alignment of the stack slot.  */
3993
          /* ALIGN may well be better aligned than TYPE, e.g. due to
3994
             PARM_BOUNDARY.  Assume the caller isn't lying.  */
3995
          set_mem_align (dest, align);
3996
 
3997
          emit_move_insn (dest, x);
3998
        }
3999
    }
4000
 
4001
  /* If part should go in registers, copy that part
4002
     into the appropriate registers.  Do this now, at the end,
4003
     since mem-to-mem copies above may do function calls.  */
4004
  if (partial > 0 && reg != 0)
4005
    {
4006
      /* Handle calls that pass values in multiple non-contiguous locations.
4007
         The Irix 6 ABI has examples of this.  */
4008
      if (GET_CODE (reg) == PARALLEL)
4009
        emit_group_load (reg, x, type, -1);
4010
      else
4011
        {
4012
          gcc_assert (partial % UNITS_PER_WORD == 0);
4013
          move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4014
        }
4015
    }
4016
 
4017
  if (extra && args_addr == 0 && where_pad == stack_direction)
4018
    anti_adjust_stack (GEN_INT (extra));
4019
 
4020
  if (alignment_pad && args_addr == 0)
4021
    anti_adjust_stack (alignment_pad);
4022
}
4023
 
4024
/* Return X if X can be used as a subtarget in a sequence of arithmetic
4025
   operations.  */
4026
 
4027
static rtx
4028
get_subtarget (rtx x)
4029
{
4030
  return (optimize
4031
          || x == 0
4032
           /* Only registers can be subtargets.  */
4033
           || !REG_P (x)
4034
           /* Don't use hard regs to avoid extending their life.  */
4035
           || REGNO (x) < FIRST_PSEUDO_REGISTER
4036
          ? 0 : x);
4037
}
4038
 
4039
/* A subroutine of expand_assignment.  Optimize FIELD op= VAL, where
4040
   FIELD is a bitfield.  Returns true if the optimization was successful,
4041
   and there's nothing else to do.  */
4042
 
4043
static bool
4044
optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4045
                                 unsigned HOST_WIDE_INT bitpos,
4046
                                 enum machine_mode mode1, rtx str_rtx,
4047
                                 tree to, tree src)
4048
{
4049
  enum machine_mode str_mode = GET_MODE (str_rtx);
4050
  unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4051
  tree op0, op1;
4052
  rtx value, result;
4053
  optab binop;
4054
 
4055
  if (mode1 != VOIDmode
4056
      || bitsize >= BITS_PER_WORD
4057
      || str_bitsize > BITS_PER_WORD
4058
      || TREE_SIDE_EFFECTS (to)
4059
      || TREE_THIS_VOLATILE (to))
4060
    return false;
4061
 
4062
  STRIP_NOPS (src);
4063
  if (!BINARY_CLASS_P (src)
4064
      || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4065
    return false;
4066
 
4067
  op0 = TREE_OPERAND (src, 0);
4068
  op1 = TREE_OPERAND (src, 1);
4069
  STRIP_NOPS (op0);
4070
 
4071
  if (!operand_equal_p (to, op0, 0))
4072
    return false;
4073
 
4074
  if (MEM_P (str_rtx))
4075
    {
4076
      unsigned HOST_WIDE_INT offset1;
4077
 
4078
      if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4079
        str_mode = word_mode;
4080
      str_mode = get_best_mode (bitsize, bitpos,
4081
                                MEM_ALIGN (str_rtx), str_mode, 0);
4082
      if (str_mode == VOIDmode)
4083
        return false;
4084
      str_bitsize = GET_MODE_BITSIZE (str_mode);
4085
 
4086
      offset1 = bitpos;
4087
      bitpos %= str_bitsize;
4088
      offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4089
      str_rtx = adjust_address (str_rtx, str_mode, offset1);
4090
    }
4091
  else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4092
    return false;
4093
 
4094
  /* If the bit field covers the whole REG/MEM, store_field
4095
     will likely generate better code.  */
4096
  if (bitsize >= str_bitsize)
4097
    return false;
4098
 
4099
  /* We can't handle fields split across multiple entities.  */
4100
  if (bitpos + bitsize > str_bitsize)
4101
    return false;
4102
 
4103
  if (BYTES_BIG_ENDIAN)
4104
    bitpos = str_bitsize - bitpos - bitsize;
4105
 
4106
  switch (TREE_CODE (src))
4107
    {
4108
    case PLUS_EXPR:
4109
    case MINUS_EXPR:
4110
      /* For now, just optimize the case of the topmost bitfield
4111
         where we don't need to do any masking and also
4112
         1 bit bitfields where xor can be used.
4113
         We might win by one instruction for the other bitfields
4114
         too if insv/extv instructions aren't used, so that
4115
         can be added later.  */
4116
      if (bitpos + bitsize != str_bitsize
4117
          && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4118
        break;
4119
 
4120
      value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4121
      value = convert_modes (str_mode,
4122
                             TYPE_MODE (TREE_TYPE (op1)), value,
4123
                             TYPE_UNSIGNED (TREE_TYPE (op1)));
4124
 
4125
      /* We may be accessing data outside the field, which means
4126
         we can alias adjacent data.  */
4127
      if (MEM_P (str_rtx))
4128
        {
4129
          str_rtx = shallow_copy_rtx (str_rtx);
4130
          set_mem_alias_set (str_rtx, 0);
4131
          set_mem_expr (str_rtx, 0);
4132
        }
4133
 
4134
      binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4135
      if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4136
        {
4137
          value = expand_and (str_mode, value, const1_rtx, NULL);
4138
          binop = xor_optab;
4139
        }
4140
      value = expand_shift (LSHIFT_EXPR, str_mode, value,
4141
                            build_int_cst (NULL_TREE, bitpos),
4142
                            NULL_RTX, 1);
4143
      result = expand_binop (str_mode, binop, str_rtx,
4144
                             value, str_rtx, 1, OPTAB_WIDEN);
4145
      if (result != str_rtx)
4146
        emit_move_insn (str_rtx, result);
4147
      return true;
4148
 
4149
    case BIT_IOR_EXPR:
4150
    case BIT_XOR_EXPR:
4151
      if (TREE_CODE (op1) != INTEGER_CST)
4152
        break;
4153
      value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4154
      value = convert_modes (GET_MODE (str_rtx),
4155
                             TYPE_MODE (TREE_TYPE (op1)), value,
4156
                             TYPE_UNSIGNED (TREE_TYPE (op1)));
4157
 
4158
      /* We may be accessing data outside the field, which means
4159
         we can alias adjacent data.  */
4160
      if (MEM_P (str_rtx))
4161
        {
4162
          str_rtx = shallow_copy_rtx (str_rtx);
4163
          set_mem_alias_set (str_rtx, 0);
4164
          set_mem_expr (str_rtx, 0);
4165
        }
4166
 
4167
      binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4168
      if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4169
        {
4170
          rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4171
                              - 1);
4172
          value = expand_and (GET_MODE (str_rtx), value, mask,
4173
                              NULL_RTX);
4174
        }
4175
      value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4176
                            build_int_cst (NULL_TREE, bitpos),
4177
                            NULL_RTX, 1);
4178
      result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4179
                             value, str_rtx, 1, OPTAB_WIDEN);
4180
      if (result != str_rtx)
4181
        emit_move_insn (str_rtx, result);
4182
      return true;
4183
 
4184
    default:
4185
      break;
4186
    }
4187
 
4188
  return false;
4189
}
4190
 
4191
 
4192
/* Expand an assignment that stores the value of FROM into TO.  If NONTEMPORAL
4193
   is true, try generating a nontemporal store.  */
4194
 
4195
void
4196
expand_assignment (tree to, tree from, bool nontemporal)
4197
{
4198
  rtx to_rtx = 0;
4199
  rtx result;
4200
 
4201
  /* Don't crash if the lhs of the assignment was erroneous.  */
4202
  if (TREE_CODE (to) == ERROR_MARK)
4203
    {
4204
      result = expand_normal (from);
4205
      return;
4206
    }
4207
 
4208
  /* Optimize away no-op moves without side-effects.  */
4209
  if (operand_equal_p (to, from, 0))
4210
    return;
4211
 
4212
  /* Assignment of a structure component needs special treatment
4213
     if the structure component's rtx is not simply a MEM.
4214
     Assignment of an array element at a constant index, and assignment of
4215
     an array element in an unaligned packed structure field, has the same
4216
     problem.  */
4217
  if (handled_component_p (to)
4218
      || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4219
    {
4220
      enum machine_mode mode1;
4221
      HOST_WIDE_INT bitsize, bitpos;
4222
      tree offset;
4223
      int unsignedp;
4224
      int volatilep = 0;
4225
      tree tem;
4226
 
4227
      push_temp_slots ();
4228
      tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4229
                                 &unsignedp, &volatilep, true);
4230
 
4231
      /* If we are going to use store_bit_field and extract_bit_field,
4232
         make sure to_rtx will be safe for multiple use.  */
4233
 
4234
      to_rtx = expand_normal (tem);
4235
 
4236
      if (offset != 0)
4237
        {
4238
          enum machine_mode address_mode;
4239
          rtx offset_rtx;
4240
 
4241
          if (!MEM_P (to_rtx))
4242
            {
4243
              /* We can get constant negative offsets into arrays with broken
4244
                 user code.  Translate this to a trap instead of ICEing.  */
4245
              gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4246
              expand_builtin_trap ();
4247
              to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4248
            }
4249
 
4250
          offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4251
          address_mode
4252
            = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4253
          if (GET_MODE (offset_rtx) != address_mode)
4254
            offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4255
 
4256
          /* A constant address in TO_RTX can have VOIDmode, we must not try
4257
             to call force_reg for that case.  Avoid that case.  */
4258
          if (MEM_P (to_rtx)
4259
              && GET_MODE (to_rtx) == BLKmode
4260
              && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4261
              && bitsize > 0
4262
              && (bitpos % bitsize) == 0
4263
              && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4264
              && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4265
            {
4266
              to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4267
              bitpos = 0;
4268
            }
4269
 
4270
          to_rtx = offset_address (to_rtx, offset_rtx,
4271
                                   highest_pow2_factor_for_target (to,
4272
                                                                   offset));
4273
        }
4274
 
4275
      /* No action is needed if the target is not a memory and the field
4276
         lies completely outside that target.  This can occur if the source
4277
         code contains an out-of-bounds access to a small array.  */
4278
      if (!MEM_P (to_rtx)
4279
          && GET_MODE (to_rtx) != BLKmode
4280
          && (unsigned HOST_WIDE_INT) bitpos
4281
             >= GET_MODE_BITSIZE (GET_MODE (to_rtx)))
4282
        {
4283
          expand_normal (from);
4284
          result = NULL;
4285
        }
4286
      /* Handle expand_expr of a complex value returning a CONCAT.  */
4287
      else if (GET_CODE (to_rtx) == CONCAT)
4288
        {
4289
          if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from))))
4290
            {
4291
              gcc_assert (bitpos == 0);
4292
              result = store_expr (from, to_rtx, false, nontemporal);
4293
            }
4294
          else
4295
            {
4296
              gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4297
              result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4298
                                   nontemporal);
4299
            }
4300
        }
4301
      else
4302
        {
4303
          if (MEM_P (to_rtx))
4304
            {
4305
              /* If the field is at offset zero, we could have been given the
4306
                 DECL_RTX of the parent struct.  Don't munge it.  */
4307
              to_rtx = shallow_copy_rtx (to_rtx);
4308
 
4309
              set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4310
 
4311
              /* Deal with volatile and readonly fields.  The former is only
4312
                 done for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
4313
              if (volatilep)
4314
                MEM_VOLATILE_P (to_rtx) = 1;
4315
              if (component_uses_parent_alias_set (to))
4316
                MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4317
            }
4318
 
4319
          if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4320
                                               to_rtx, to, from))
4321
            result = NULL;
4322
          else
4323
            result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4324
                                  TREE_TYPE (tem), get_alias_set (to),
4325
                                  nontemporal);
4326
        }
4327
 
4328
      if (result)
4329
        preserve_temp_slots (result);
4330
      free_temp_slots ();
4331
      pop_temp_slots ();
4332
      return;
4333
    }
4334
 
4335
   else if (TREE_CODE (to) == MISALIGNED_INDIRECT_REF)
4336
     {
4337
       addr_space_t as = ADDR_SPACE_GENERIC;
4338
       enum machine_mode mode, op_mode1;
4339
       enum insn_code icode;
4340
       rtx reg, addr, mem, insn;
4341
 
4342
       if (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (to, 0))))
4343
         as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0))));
4344
 
4345
       reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4346
       reg = force_not_mem (reg);
4347
 
4348
       mode = TYPE_MODE (TREE_TYPE (to));
4349
       addr = expand_expr (TREE_OPERAND (to, 0), NULL_RTX, VOIDmode,
4350
                         EXPAND_SUM);
4351
       addr = memory_address_addr_space (mode, addr, as);
4352
       mem = gen_rtx_MEM (mode, addr);
4353
 
4354
       set_mem_attributes (mem, to, 0);
4355
       set_mem_addr_space (mem, as);
4356
 
4357
       icode = movmisalign_optab->handlers[mode].insn_code;
4358
       gcc_assert (icode != CODE_FOR_nothing);
4359
 
4360
       op_mode1 = insn_data[icode].operand[1].mode;
4361
       if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1)
4362
           && op_mode1 != VOIDmode)
4363
         reg = copy_to_mode_reg (op_mode1, reg);
4364
 
4365
      insn = GEN_FCN (icode) (mem, reg);
4366
       emit_insn (insn);
4367
       return;
4368
     }
4369
 
4370
  /* If the rhs is a function call and its value is not an aggregate,
4371
     call the function before we start to compute the lhs.
4372
     This is needed for correct code for cases such as
4373
     val = setjmp (buf) on machines where reference to val
4374
     requires loading up part of an address in a separate insn.
4375
 
4376
     Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4377
     since it might be a promoted variable where the zero- or sign- extension
4378
     needs to be done.  Handling this in the normal way is safe because no
4379
     computation is done before the call.  The same is true for SSA names.  */
4380
  if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4381
      && COMPLETE_TYPE_P (TREE_TYPE (from))
4382
      && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4383
      && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4384
             && REG_P (DECL_RTL (to)))
4385
            || TREE_CODE (to) == SSA_NAME))
4386
    {
4387
      rtx value;
4388
 
4389
      push_temp_slots ();
4390
      value = expand_normal (from);
4391
      if (to_rtx == 0)
4392
        to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4393
 
4394
      /* Handle calls that return values in multiple non-contiguous locations.
4395
         The Irix 6 ABI has examples of this.  */
4396
      if (GET_CODE (to_rtx) == PARALLEL)
4397
        emit_group_load (to_rtx, value, TREE_TYPE (from),
4398
                         int_size_in_bytes (TREE_TYPE (from)));
4399
      else if (GET_MODE (to_rtx) == BLKmode)
4400
        emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4401
      else
4402
        {
4403
          if (POINTER_TYPE_P (TREE_TYPE (to)))
4404
            value = convert_memory_address_addr_space
4405
                      (GET_MODE (to_rtx), value,
4406
                       TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4407
 
4408
          emit_move_insn (to_rtx, value);
4409
        }
4410
      preserve_temp_slots (to_rtx);
4411
      free_temp_slots ();
4412
      pop_temp_slots ();
4413
      return;
4414
    }
4415
 
4416
  /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
4417
     Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
4418
 
4419
  if (to_rtx == 0)
4420
    to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4421
 
4422
  /* Don't move directly into a return register.  */
4423
  if (TREE_CODE (to) == RESULT_DECL
4424
      && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4425
    {
4426
      rtx temp;
4427
 
4428
      push_temp_slots ();
4429
      temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4430
 
4431
      if (GET_CODE (to_rtx) == PARALLEL)
4432
        emit_group_load (to_rtx, temp, TREE_TYPE (from),
4433
                         int_size_in_bytes (TREE_TYPE (from)));
4434
      else
4435
        emit_move_insn (to_rtx, temp);
4436
 
4437
      preserve_temp_slots (to_rtx);
4438
      free_temp_slots ();
4439
      pop_temp_slots ();
4440
      return;
4441
    }
4442
 
4443
  /* In case we are returning the contents of an object which overlaps
4444
     the place the value is being stored, use a safe function when copying
4445
     a value through a pointer into a structure value return block.  */
4446
  if (TREE_CODE (to) == RESULT_DECL
4447
      && TREE_CODE (from) == INDIRECT_REF
4448
      && ADDR_SPACE_GENERIC_P
4449
           (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4450
      && refs_may_alias_p (to, from)
4451
      && cfun->returns_struct
4452
      && !cfun->returns_pcc_struct)
4453
    {
4454
      rtx from_rtx, size;
4455
 
4456
      push_temp_slots ();
4457
      size = expr_size (from);
4458
      from_rtx = expand_normal (from);
4459
 
4460
      emit_library_call (memmove_libfunc, LCT_NORMAL,
4461
                         VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4462
                         XEXP (from_rtx, 0), Pmode,
4463
                         convert_to_mode (TYPE_MODE (sizetype),
4464
                                          size, TYPE_UNSIGNED (sizetype)),
4465
                         TYPE_MODE (sizetype));
4466
 
4467
      preserve_temp_slots (to_rtx);
4468
      free_temp_slots ();
4469
      pop_temp_slots ();
4470
      return;
4471
    }
4472
 
4473
  /* Compute FROM and store the value in the rtx we got.  */
4474
 
4475
  push_temp_slots ();
4476
  result = store_expr (from, to_rtx, 0, nontemporal);
4477
  preserve_temp_slots (result);
4478
  free_temp_slots ();
4479
  pop_temp_slots ();
4480
  return;
4481
}
4482
 
4483
/* Emits nontemporal store insn that moves FROM to TO.  Returns true if this
4484
   succeeded, false otherwise.  */
4485
 
4486
bool
4487
emit_storent_insn (rtx to, rtx from)
4488
{
4489
  enum machine_mode mode = GET_MODE (to), imode;
4490
  enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4491
  rtx pattern;
4492
 
4493
  if (code == CODE_FOR_nothing)
4494
    return false;
4495
 
4496
  imode = insn_data[code].operand[0].mode;
4497
  if (!insn_data[code].operand[0].predicate (to, imode))
4498
    return false;
4499
 
4500
  imode = insn_data[code].operand[1].mode;
4501
  if (!insn_data[code].operand[1].predicate (from, imode))
4502
    {
4503
      from = copy_to_mode_reg (imode, from);
4504
      if (!insn_data[code].operand[1].predicate (from, imode))
4505
        return false;
4506
    }
4507
 
4508
  pattern = GEN_FCN (code) (to, from);
4509
  if (pattern == NULL_RTX)
4510
    return false;
4511
 
4512
  emit_insn (pattern);
4513
  return true;
4514
}
4515
 
4516
/* Generate code for computing expression EXP,
4517
   and storing the value into TARGET.
4518
 
4519
   If the mode is BLKmode then we may return TARGET itself.
4520
   It turns out that in BLKmode it doesn't cause a problem.
4521
   because C has no operators that could combine two different
4522
   assignments into the same BLKmode object with different values
4523
   with no sequence point.  Will other languages need this to
4524
   be more thorough?
4525
 
4526
   If CALL_PARAM_P is nonzero, this is a store into a call param on the
4527
   stack, and block moves may need to be treated specially.
4528
 
4529
   If NONTEMPORAL is true, try using a nontemporal store instruction.  */
4530
 
4531
rtx
4532
store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4533
{
4534
  rtx temp;
4535
  rtx alt_rtl = NULL_RTX;
4536
  location_t loc = EXPR_LOCATION (exp);
4537
 
4538
  if (VOID_TYPE_P (TREE_TYPE (exp)))
4539
    {
4540
      /* C++ can generate ?: expressions with a throw expression in one
4541
         branch and an rvalue in the other. Here, we resolve attempts to
4542
         store the throw expression's nonexistent result.  */
4543
      gcc_assert (!call_param_p);
4544
      expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4545
      return NULL_RTX;
4546
    }
4547
  if (TREE_CODE (exp) == COMPOUND_EXPR)
4548
    {
4549
      /* Perform first part of compound expression, then assign from second
4550
         part.  */
4551
      expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4552
                   call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4553
      return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4554
                         nontemporal);
4555
    }
4556
  else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4557
    {
4558
      /* For conditional expression, get safe form of the target.  Then
4559
         test the condition, doing the appropriate assignment on either
4560
         side.  This avoids the creation of unnecessary temporaries.
4561
         For non-BLKmode, it is more efficient not to do this.  */
4562
 
4563
      rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4564
 
4565
      do_pending_stack_adjust ();
4566
      NO_DEFER_POP;
4567
      jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
4568
      store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4569
                  nontemporal);
4570
      emit_jump_insn (gen_jump (lab2));
4571
      emit_barrier ();
4572
      emit_label (lab1);
4573
      store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4574
                  nontemporal);
4575
      emit_label (lab2);
4576
      OK_DEFER_POP;
4577
 
4578
      return NULL_RTX;
4579
    }
4580
  else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4581
    /* If this is a scalar in a register that is stored in a wider mode
4582
       than the declared mode, compute the result into its declared mode
4583
       and then convert to the wider mode.  Our value is the computed
4584
       expression.  */
4585
    {
4586
      rtx inner_target = 0;
4587
 
4588
      /* We can do the conversion inside EXP, which will often result
4589
         in some optimizations.  Do the conversion in two steps: first
4590
         change the signedness, if needed, then the extend.  But don't
4591
         do this if the type of EXP is a subtype of something else
4592
         since then the conversion might involve more than just
4593
         converting modes.  */
4594
      if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4595
          && TREE_TYPE (TREE_TYPE (exp)) == 0
4596
          && GET_MODE_PRECISION (GET_MODE (target))
4597
             == TYPE_PRECISION (TREE_TYPE (exp)))
4598
        {
4599
          if (TYPE_UNSIGNED (TREE_TYPE (exp))
4600
              != SUBREG_PROMOTED_UNSIGNED_P (target))
4601
            {
4602
              /* Some types, e.g. Fortran's logical*4, won't have a signed
4603
                 version, so use the mode instead.  */
4604
              tree ntype
4605
                = (signed_or_unsigned_type_for
4606
                   (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4607
              if (ntype == NULL)
4608
                ntype = lang_hooks.types.type_for_mode
4609
                  (TYPE_MODE (TREE_TYPE (exp)),
4610
                   SUBREG_PROMOTED_UNSIGNED_P (target));
4611
 
4612
              exp = fold_convert_loc (loc, ntype, exp);
4613
            }
4614
 
4615
          exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
4616
                                  (GET_MODE (SUBREG_REG (target)),
4617
                                   SUBREG_PROMOTED_UNSIGNED_P (target)),
4618
                                  exp);
4619
 
4620
          inner_target = SUBREG_REG (target);
4621
        }
4622
 
4623
      temp = expand_expr (exp, inner_target, VOIDmode,
4624
                          call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4625
 
4626
      /* If TEMP is a VOIDmode constant, use convert_modes to make
4627
         sure that we properly convert it.  */
4628
      if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4629
        {
4630
          temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4631
                                temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4632
          temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4633
                                GET_MODE (target), temp,
4634
                                SUBREG_PROMOTED_UNSIGNED_P (target));
4635
        }
4636
 
4637
      convert_move (SUBREG_REG (target), temp,
4638
                    SUBREG_PROMOTED_UNSIGNED_P (target));
4639
 
4640
      return NULL_RTX;
4641
    }
4642
  else if (TREE_CODE (exp) == STRING_CST
4643
           && !nontemporal && !call_param_p
4644
           && TREE_STRING_LENGTH (exp) > 0
4645
           && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4646
    {
4647
      /* Optimize initialization of an array with a STRING_CST.  */
4648
      HOST_WIDE_INT exp_len, str_copy_len;
4649
      rtx dest_mem;
4650
 
4651
      exp_len = int_expr_size (exp);
4652
      if (exp_len <= 0)
4653
        goto normal_expr;
4654
 
4655
      str_copy_len = strlen (TREE_STRING_POINTER (exp));
4656
      if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4657
        goto normal_expr;
4658
 
4659
      str_copy_len = TREE_STRING_LENGTH (exp);
4660
      if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4661
        {
4662
          str_copy_len += STORE_MAX_PIECES - 1;
4663
          str_copy_len &= ~(STORE_MAX_PIECES - 1);
4664
        }
4665
      str_copy_len = MIN (str_copy_len, exp_len);
4666
      if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4667
                                CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4668
                                MEM_ALIGN (target), false))
4669
        goto normal_expr;
4670
 
4671
      dest_mem = target;
4672
 
4673
      dest_mem = store_by_pieces (dest_mem,
4674
                                  str_copy_len, builtin_strncpy_read_str,
4675
                                  CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4676
                                  MEM_ALIGN (target), false,
4677
                                  exp_len > str_copy_len ? 1 : 0);
4678
      if (exp_len > str_copy_len)
4679
        clear_storage (adjust_address (dest_mem, BLKmode, 0),
4680
                       GEN_INT (exp_len - str_copy_len),
4681
                       BLOCK_OP_NORMAL);
4682
      return NULL_RTX;
4683
    }
4684
  else
4685
    {
4686
      rtx tmp_target;
4687
 
4688
  normal_expr:
4689
      /* If we want to use a nontemporal store, force the value to
4690
         register first.  */
4691
      tmp_target = nontemporal ? NULL_RTX : target;
4692
      temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4693
                               (call_param_p
4694
                                ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4695
                               &alt_rtl);
4696
    }
4697
 
4698
  /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4699
     the same as that of TARGET, adjust the constant.  This is needed, for
4700
     example, in case it is a CONST_DOUBLE and we want only a word-sized
4701
     value.  */
4702
  if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4703
      && TREE_CODE (exp) != ERROR_MARK
4704
      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4705
    temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4706
                          temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4707
 
4708
  /* If value was not generated in the target, store it there.
4709
     Convert the value to TARGET's type first if necessary and emit the
4710
     pending incrementations that have been queued when expanding EXP.
4711
     Note that we cannot emit the whole queue blindly because this will
4712
     effectively disable the POST_INC optimization later.
4713
 
4714
     If TEMP and TARGET compare equal according to rtx_equal_p, but
4715
     one or both of them are volatile memory refs, we have to distinguish
4716
     two cases:
4717
     - expand_expr has used TARGET.  In this case, we must not generate
4718
       another copy.  This can be detected by TARGET being equal according
4719
       to == .
4720
     - expand_expr has not used TARGET - that means that the source just
4721
       happens to have the same RTX form.  Since temp will have been created
4722
       by expand_expr, it will compare unequal according to == .
4723
       We must generate a copy in this case, to reach the correct number
4724
       of volatile memory references.  */
4725
 
4726
  if ((! rtx_equal_p (temp, target)
4727
       || (temp != target && (side_effects_p (temp)
4728
                              || side_effects_p (target))))
4729
      && TREE_CODE (exp) != ERROR_MARK
4730
      /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4731
         but TARGET is not valid memory reference, TEMP will differ
4732
         from TARGET although it is really the same location.  */
4733
      && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4734
      /* If there's nothing to copy, don't bother.  Don't call
4735
         expr_size unless necessary, because some front-ends (C++)
4736
         expr_size-hook must not be given objects that are not
4737
         supposed to be bit-copied or bit-initialized.  */
4738
      && expr_size (exp) != const0_rtx)
4739
    {
4740
      if (GET_MODE (temp) != GET_MODE (target)
4741
          && GET_MODE (temp) != VOIDmode)
4742
        {
4743
          int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4744
          if (GET_MODE (target) == BLKmode
4745
                   || GET_MODE (temp) == BLKmode)
4746
            emit_block_move (target, temp, expr_size (exp),
4747
                             (call_param_p
4748
                              ? BLOCK_OP_CALL_PARM
4749
                              : BLOCK_OP_NORMAL));
4750
          else
4751
            convert_move (target, temp, unsignedp);
4752
        }
4753
 
4754
      else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4755
        {
4756
          /* Handle copying a string constant into an array.  The string
4757
             constant may be shorter than the array.  So copy just the string's
4758
             actual length, and clear the rest.  First get the size of the data
4759
             type of the string, which is actually the size of the target.  */
4760
          rtx size = expr_size (exp);
4761
 
4762
          if (CONST_INT_P (size)
4763
              && INTVAL (size) < TREE_STRING_LENGTH (exp))
4764
            emit_block_move (target, temp, size,
4765
                             (call_param_p
4766
                              ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4767
          else
4768
            {
4769
              enum machine_mode pointer_mode
4770
                = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
4771
              enum machine_mode address_mode
4772
                = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
4773
 
4774
              /* Compute the size of the data to copy from the string.  */
4775
              tree copy_size
4776
                = size_binop_loc (loc, MIN_EXPR,
4777
                                  make_tree (sizetype, size),
4778
                                  size_int (TREE_STRING_LENGTH (exp)));
4779
              rtx copy_size_rtx
4780
                = expand_expr (copy_size, NULL_RTX, VOIDmode,
4781
                               (call_param_p
4782
                                ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4783
              rtx label = 0;
4784
 
4785
              /* Copy that much.  */
4786
              copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
4787
                                               TYPE_UNSIGNED (sizetype));
4788
              emit_block_move (target, temp, copy_size_rtx,
4789
                               (call_param_p
4790
                                ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4791
 
4792
              /* Figure out how much is left in TARGET that we have to clear.
4793
                 Do all calculations in pointer_mode.  */
4794
              if (CONST_INT_P (copy_size_rtx))
4795
                {
4796
                  size = plus_constant (size, -INTVAL (copy_size_rtx));
4797
                  target = adjust_address (target, BLKmode,
4798
                                           INTVAL (copy_size_rtx));
4799
                }
4800
              else
4801
                {
4802
                  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4803
                                       copy_size_rtx, NULL_RTX, 0,
4804
                                       OPTAB_LIB_WIDEN);
4805
 
4806
                  if (GET_MODE (copy_size_rtx) != address_mode)
4807
                    copy_size_rtx = convert_to_mode (address_mode,
4808
                                                     copy_size_rtx,
4809
                                                     TYPE_UNSIGNED (sizetype));
4810
 
4811
                  target = offset_address (target, copy_size_rtx,
4812
                                           highest_pow2_factor (copy_size));
4813
                  label = gen_label_rtx ();
4814
                  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4815
                                           GET_MODE (size), 0, label);
4816
                }
4817
 
4818
              if (size != const0_rtx)
4819
                clear_storage (target, size, BLOCK_OP_NORMAL);
4820
 
4821
              if (label)
4822
                emit_label (label);
4823
            }
4824
        }
4825
      /* Handle calls that return values in multiple non-contiguous locations.
4826
         The Irix 6 ABI has examples of this.  */
4827
      else if (GET_CODE (target) == PARALLEL)
4828
        emit_group_load (target, temp, TREE_TYPE (exp),
4829
                         int_size_in_bytes (TREE_TYPE (exp)));
4830
      else if (GET_MODE (temp) == BLKmode)
4831
        emit_block_move (target, temp, expr_size (exp),
4832
                         (call_param_p
4833
                          ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4834
      else if (nontemporal
4835
               && emit_storent_insn (target, temp))
4836
        /* If we managed to emit a nontemporal store, there is nothing else to
4837
           do.  */
4838
        ;
4839
      else
4840
        {
4841
          temp = force_operand (temp, target);
4842
          if (temp != target)
4843
            emit_move_insn (target, temp);
4844
        }
4845
    }
4846
 
4847
  return NULL_RTX;
4848
}
4849
 
4850
/* Helper for categorize_ctor_elements.  Identical interface.  */
4851
 
4852
static bool
4853
categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4854
                            HOST_WIDE_INT *p_elt_count,
4855
                            bool *p_must_clear)
4856
{
4857
  unsigned HOST_WIDE_INT idx;
4858
  HOST_WIDE_INT nz_elts, elt_count;
4859
  tree value, purpose;
4860
 
4861
  /* Whether CTOR is a valid constant initializer, in accordance with what
4862
     initializer_constant_valid_p does.  If inferred from the constructor
4863
     elements, true until proven otherwise.  */
4864
  bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4865
  bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4866
 
4867
  nz_elts = 0;
4868
  elt_count = 0;
4869
 
4870
  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4871
    {
4872
      HOST_WIDE_INT mult;
4873
 
4874
      mult = 1;
4875
      if (TREE_CODE (purpose) == RANGE_EXPR)
4876
        {
4877
          tree lo_index = TREE_OPERAND (purpose, 0);
4878
          tree hi_index = TREE_OPERAND (purpose, 1);
4879
 
4880
          if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4881
            mult = (tree_low_cst (hi_index, 1)
4882
                    - tree_low_cst (lo_index, 1) + 1);
4883
        }
4884
 
4885
      switch (TREE_CODE (value))
4886
        {
4887
        case CONSTRUCTOR:
4888
          {
4889
            HOST_WIDE_INT nz = 0, ic = 0;
4890
 
4891
            bool const_elt_p
4892
              = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4893
 
4894
            nz_elts += mult * nz;
4895
            elt_count += mult * ic;
4896
 
4897
            if (const_from_elts_p && const_p)
4898
              const_p = const_elt_p;
4899
          }
4900
          break;
4901
 
4902
        case INTEGER_CST:
4903
        case REAL_CST:
4904
        case FIXED_CST:
4905
          if (!initializer_zerop (value))
4906
            nz_elts += mult;
4907
          elt_count += mult;
4908
          break;
4909
 
4910
        case STRING_CST:
4911
          nz_elts += mult * TREE_STRING_LENGTH (value);
4912
          elt_count += mult * TREE_STRING_LENGTH (value);
4913
          break;
4914
 
4915
        case COMPLEX_CST:
4916
          if (!initializer_zerop (TREE_REALPART (value)))
4917
            nz_elts += mult;
4918
          if (!initializer_zerop (TREE_IMAGPART (value)))
4919
            nz_elts += mult;
4920
          elt_count += mult;
4921
          break;
4922
 
4923
        case VECTOR_CST:
4924
          {
4925
            tree v;
4926
            for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4927
              {
4928
                if (!initializer_zerop (TREE_VALUE (v)))
4929
                  nz_elts += mult;
4930
                elt_count += mult;
4931
              }
4932
          }
4933
          break;
4934
 
4935
        default:
4936
          nz_elts += mult;
4937
          elt_count += mult;
4938
 
4939
          if (const_from_elts_p && const_p)
4940
            const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4941
                      != NULL_TREE;
4942
          break;
4943
        }
4944
    }
4945
 
4946
  if (!*p_must_clear
4947
      && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4948
          || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4949
    {
4950
      tree init_sub_type;
4951
      bool clear_this = true;
4952
 
4953
      if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4954
        {
4955
          /* We don't expect more than one element of the union to be
4956
             initialized.  Not sure what we should do otherwise... */
4957
          gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4958
                      == 1);
4959
 
4960
          init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4961
                                                CONSTRUCTOR_ELTS (ctor),
4962
                                                0)->value);
4963
 
4964
          /* ??? We could look at each element of the union, and find the
4965
             largest element.  Which would avoid comparing the size of the
4966
             initialized element against any tail padding in the union.
4967
             Doesn't seem worth the effort...  */
4968
          if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4969
                                TYPE_SIZE (init_sub_type)) == 1)
4970
            {
4971
              /* And now we have to find out if the element itself is fully
4972
                 constructed.  E.g. for union { struct { int a, b; } s; } u
4973
                 = { .s = { .a = 1 } }.  */
4974
              if (elt_count == count_type_elements (init_sub_type, false))
4975
                clear_this = false;
4976
            }
4977
        }
4978
 
4979
      *p_must_clear = clear_this;
4980
    }
4981
 
4982
  *p_nz_elts += nz_elts;
4983
  *p_elt_count += elt_count;
4984
 
4985
  return const_p;
4986
}
4987
 
4988
/* Examine CTOR to discover:
4989
   * how many scalar fields are set to nonzero values,
4990
     and place it in *P_NZ_ELTS;
4991
   * how many scalar fields in total are in CTOR,
4992
     and place it in *P_ELT_COUNT.
4993
   * if a type is a union, and the initializer from the constructor
4994
     is not the largest element in the union, then set *p_must_clear.
4995
 
4996
   Return whether or not CTOR is a valid static constant initializer, the same
4997
   as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0".  */
4998
 
4999
bool
5000
categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5001
                          HOST_WIDE_INT *p_elt_count,
5002
                          bool *p_must_clear)
5003
{
5004
  *p_nz_elts = 0;
5005
  *p_elt_count = 0;
5006
  *p_must_clear = false;
5007
 
5008
  return
5009
    categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
5010
}
5011
 
5012
/* Count the number of scalars in TYPE.  Return -1 on overflow or
5013
   variable-sized.  If ALLOW_FLEXARR is true, don't count flexible
5014
   array member at the end of the structure.  */
5015
 
5016
HOST_WIDE_INT
5017
count_type_elements (const_tree type, bool allow_flexarr)
5018
{
5019
  const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
5020
  switch (TREE_CODE (type))
5021
    {
5022
    case ARRAY_TYPE:
5023
      {
5024
        tree telts = array_type_nelts (type);
5025
        if (telts && host_integerp (telts, 1))
5026
          {
5027
            HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
5028
            HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
5029
            if (n == 0)
5030
              return 0;
5031
            else if (max / n > m)
5032
              return n * m;
5033
          }
5034
        return -1;
5035
      }
5036
 
5037
    case RECORD_TYPE:
5038
      {
5039
        HOST_WIDE_INT n = 0, t;
5040
        tree f;
5041
 
5042
        for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5043
          if (TREE_CODE (f) == FIELD_DECL)
5044
            {
5045
              t = count_type_elements (TREE_TYPE (f), false);
5046
              if (t < 0)
5047
                {
5048
                  /* Check for structures with flexible array member.  */
5049
                  tree tf = TREE_TYPE (f);
5050
                  if (allow_flexarr
5051
                      && TREE_CHAIN (f) == NULL
5052
                      && TREE_CODE (tf) == ARRAY_TYPE
5053
                      && TYPE_DOMAIN (tf)
5054
                      && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5055
                      && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5056
                      && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5057
                      && int_size_in_bytes (type) >= 0)
5058
                    break;
5059
 
5060
                  return -1;
5061
                }
5062
              n += t;
5063
            }
5064
 
5065
        return n;
5066
      }
5067
 
5068
    case UNION_TYPE:
5069
    case QUAL_UNION_TYPE:
5070
      return -1;
5071
 
5072
    case COMPLEX_TYPE:
5073
      return 2;
5074
 
5075
    case VECTOR_TYPE:
5076
      return TYPE_VECTOR_SUBPARTS (type);
5077
 
5078
    case INTEGER_TYPE:
5079
    case REAL_TYPE:
5080
    case FIXED_POINT_TYPE:
5081
    case ENUMERAL_TYPE:
5082
    case BOOLEAN_TYPE:
5083
    case POINTER_TYPE:
5084
    case OFFSET_TYPE:
5085
    case REFERENCE_TYPE:
5086
      return 1;
5087
 
5088
    case ERROR_MARK:
5089
      return 0;
5090
 
5091
    case VOID_TYPE:
5092
    case METHOD_TYPE:
5093
    case FUNCTION_TYPE:
5094
    case LANG_TYPE:
5095
    default:
5096
      gcc_unreachable ();
5097
    }
5098
}
5099
 
5100
/* Return 1 if EXP contains mostly (3/4)  zeros.  */
5101
 
5102
static int
5103
mostly_zeros_p (const_tree exp)
5104
{
5105
  if (TREE_CODE (exp) == CONSTRUCTOR)
5106
 
5107
    {
5108
      HOST_WIDE_INT nz_elts, count, elts;
5109
      bool must_clear;
5110
 
5111
      categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5112
      if (must_clear)
5113
        return 1;
5114
 
5115
      elts = count_type_elements (TREE_TYPE (exp), false);
5116
 
5117
      return nz_elts < elts / 4;
5118
    }
5119
 
5120
  return initializer_zerop (exp);
5121
}
5122
 
5123
/* Return 1 if EXP contains all zeros.  */
5124
 
5125
static int
5126
all_zeros_p (const_tree exp)
5127
{
5128
  if (TREE_CODE (exp) == CONSTRUCTOR)
5129
 
5130
    {
5131
      HOST_WIDE_INT nz_elts, count;
5132
      bool must_clear;
5133
 
5134
      categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5135
      return nz_elts == 0;
5136
    }
5137
 
5138
  return initializer_zerop (exp);
5139
}
5140
 
5141
/* Helper function for store_constructor.
5142
   TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5143
   TYPE is the type of the CONSTRUCTOR, not the element type.
5144
   CLEARED is as for store_constructor.
5145
   ALIAS_SET is the alias set to use for any stores.
5146
 
5147
   This provides a recursive shortcut back to store_constructor when it isn't
5148
   necessary to go through store_field.  This is so that we can pass through
5149
   the cleared field to let store_constructor know that we may not have to
5150
   clear a substructure if the outer structure has already been cleared.  */
5151
 
5152
static void
5153
store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5154
                         HOST_WIDE_INT bitpos, enum machine_mode mode,
5155
                         tree exp, tree type, int cleared,
5156
                         alias_set_type alias_set)
5157
{
5158
  if (TREE_CODE (exp) == CONSTRUCTOR
5159
      /* We can only call store_constructor recursively if the size and
5160
         bit position are on a byte boundary.  */
5161
      && bitpos % BITS_PER_UNIT == 0
5162
      && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5163
      /* If we have a nonzero bitpos for a register target, then we just
5164
         let store_field do the bitfield handling.  This is unlikely to
5165
         generate unnecessary clear instructions anyways.  */
5166
      && (bitpos == 0 || MEM_P (target)))
5167
    {
5168
      if (MEM_P (target))
5169
        target
5170
          = adjust_address (target,
5171
                            GET_MODE (target) == BLKmode
5172
                            || 0 != (bitpos
5173
                                     % GET_MODE_ALIGNMENT (GET_MODE (target)))
5174
                            ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5175
 
5176
 
5177
      /* Update the alias set, if required.  */
5178
      if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5179
          && MEM_ALIAS_SET (target) != 0)
5180
        {
5181
          target = copy_rtx (target);
5182
          set_mem_alias_set (target, alias_set);
5183
        }
5184
 
5185
      store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5186
    }
5187
  else
5188
    store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5189
}
5190
 
5191
/* Store the value of constructor EXP into the rtx TARGET.
5192
   TARGET is either a REG or a MEM; we know it cannot conflict, since
5193
   safe_from_p has been called.
5194
   CLEARED is true if TARGET is known to have been zero'd.
5195
   SIZE is the number of bytes of TARGET we are allowed to modify: this
5196
   may not be the same as the size of EXP if we are assigning to a field
5197
   which has been packed to exclude padding bits.  */
5198
 
5199
static void
5200
store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5201
{
5202
  tree type = TREE_TYPE (exp);
5203
#ifdef WORD_REGISTER_OPERATIONS
5204
  HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5205
#endif
5206
 
5207
  switch (TREE_CODE (type))
5208
    {
5209
    case RECORD_TYPE:
5210
    case UNION_TYPE:
5211
    case QUAL_UNION_TYPE:
5212
      {
5213
        unsigned HOST_WIDE_INT idx;
5214
        tree field, value;
5215
 
5216
        /* If size is zero or the target is already cleared, do nothing.  */
5217
        if (size == 0 || cleared)
5218
          cleared = 1;
5219
        /* We either clear the aggregate or indicate the value is dead.  */
5220
        else if ((TREE_CODE (type) == UNION_TYPE
5221
                  || TREE_CODE (type) == QUAL_UNION_TYPE)
5222
                 && ! CONSTRUCTOR_ELTS (exp))
5223
          /* If the constructor is empty, clear the union.  */
5224
          {
5225
            clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5226
            cleared = 1;
5227
          }
5228
 
5229
        /* If we are building a static constructor into a register,
5230
           set the initial value as zero so we can fold the value into
5231
           a constant.  But if more than one register is involved,
5232
           this probably loses.  */
5233
        else if (REG_P (target) && TREE_STATIC (exp)
5234
                 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5235
          {
5236
            emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5237
            cleared = 1;
5238
          }
5239
 
5240
        /* If the constructor has fewer fields than the structure or
5241
           if we are initializing the structure to mostly zeros, clear
5242
           the whole structure first.  Don't do this if TARGET is a
5243
           register whose mode size isn't equal to SIZE since
5244
           clear_storage can't handle this case.  */
5245
        else if (size > 0
5246
                 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5247
                      != fields_length (type))
5248
                     || mostly_zeros_p (exp))
5249
                 && (!REG_P (target)
5250
                     || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5251
                         == size)))
5252
          {
5253
            clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5254
            cleared = 1;
5255
          }
5256
 
5257
        if (REG_P (target) && !cleared)
5258
          emit_clobber (target);
5259
 
5260
        /* Store each element of the constructor into the
5261
           corresponding field of TARGET.  */
5262
        FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5263
          {
5264
            enum machine_mode mode;
5265
            HOST_WIDE_INT bitsize;
5266
            HOST_WIDE_INT bitpos = 0;
5267
            tree offset;
5268
            rtx to_rtx = target;
5269
 
5270
            /* Just ignore missing fields.  We cleared the whole
5271
               structure, above, if any fields are missing.  */
5272
            if (field == 0)
5273
              continue;
5274
 
5275
            if (cleared && initializer_zerop (value))
5276
              continue;
5277
 
5278
            if (host_integerp (DECL_SIZE (field), 1))
5279
              bitsize = tree_low_cst (DECL_SIZE (field), 1);
5280
            else
5281
              bitsize = -1;
5282
 
5283
            mode = DECL_MODE (field);
5284
            if (DECL_BIT_FIELD (field))
5285
              mode = VOIDmode;
5286
 
5287
            offset = DECL_FIELD_OFFSET (field);
5288
            if (host_integerp (offset, 0)
5289
                && host_integerp (bit_position (field), 0))
5290
              {
5291
                bitpos = int_bit_position (field);
5292
                offset = 0;
5293
              }
5294
            else
5295
              bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5296
 
5297
            if (offset)
5298
              {
5299
                enum machine_mode address_mode;
5300
                rtx offset_rtx;
5301
 
5302
                offset
5303
                  = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5304
                                                    make_tree (TREE_TYPE (exp),
5305
                                                               target));
5306
 
5307
                offset_rtx = expand_normal (offset);
5308
                gcc_assert (MEM_P (to_rtx));
5309
 
5310
                address_mode
5311
                  = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5312
                if (GET_MODE (offset_rtx) != address_mode)
5313
                  offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5314
 
5315
                to_rtx = offset_address (to_rtx, offset_rtx,
5316
                                         highest_pow2_factor (offset));
5317
              }
5318
 
5319
#ifdef WORD_REGISTER_OPERATIONS
5320
            /* If this initializes a field that is smaller than a
5321
               word, at the start of a word, try to widen it to a full
5322
               word.  This special case allows us to output C++ member
5323
               function initializations in a form that the optimizers
5324
               can understand.  */
5325
            if (REG_P (target)
5326
                && bitsize < BITS_PER_WORD
5327
                && bitpos % BITS_PER_WORD == 0
5328
                && GET_MODE_CLASS (mode) == MODE_INT
5329
                && TREE_CODE (value) == INTEGER_CST
5330
                && exp_size >= 0
5331
                && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5332
              {
5333
                tree type = TREE_TYPE (value);
5334
 
5335
                if (TYPE_PRECISION (type) < BITS_PER_WORD)
5336
                  {
5337
                    type = lang_hooks.types.type_for_size
5338
                      (BITS_PER_WORD, TYPE_UNSIGNED (type));
5339
                    value = fold_convert (type, value);
5340
                  }
5341
 
5342
                if (BYTES_BIG_ENDIAN)
5343
                  value
5344
                   = fold_build2 (LSHIFT_EXPR, type, value,
5345
                                   build_int_cst (type,
5346
                                                  BITS_PER_WORD - bitsize));
5347
                bitsize = BITS_PER_WORD;
5348
                mode = word_mode;
5349
              }
5350
#endif
5351
 
5352
            if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5353
                && DECL_NONADDRESSABLE_P (field))
5354
              {
5355
                to_rtx = copy_rtx (to_rtx);
5356
                MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5357
              }
5358
 
5359
            store_constructor_field (to_rtx, bitsize, bitpos, mode,
5360
                                     value, type, cleared,
5361
                                     get_alias_set (TREE_TYPE (field)));
5362
          }
5363
        break;
5364
      }
5365
    case ARRAY_TYPE:
5366
      {
5367
        tree value, index;
5368
        unsigned HOST_WIDE_INT i;
5369
        int need_to_clear;
5370
        tree domain;
5371
        tree elttype = TREE_TYPE (type);
5372
        int const_bounds_p;
5373
        HOST_WIDE_INT minelt = 0;
5374
        HOST_WIDE_INT maxelt = 0;
5375
 
5376
        domain = TYPE_DOMAIN (type);
5377
        const_bounds_p = (TYPE_MIN_VALUE (domain)
5378
                          && TYPE_MAX_VALUE (domain)
5379
                          && host_integerp (TYPE_MIN_VALUE (domain), 0)
5380
                          && host_integerp (TYPE_MAX_VALUE (domain), 0));
5381
 
5382
        /* If we have constant bounds for the range of the type, get them.  */
5383
        if (const_bounds_p)
5384
          {
5385
            minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5386
            maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5387
          }
5388
 
5389
        /* If the constructor has fewer elements than the array, clear
5390
           the whole array first.  Similarly if this is static
5391
           constructor of a non-BLKmode object.  */
5392
        if (cleared)
5393
          need_to_clear = 0;
5394
        else if (REG_P (target) && TREE_STATIC (exp))
5395
          need_to_clear = 1;
5396
        else
5397
          {
5398
            unsigned HOST_WIDE_INT idx;
5399
            tree index, value;
5400
            HOST_WIDE_INT count = 0, zero_count = 0;
5401
            need_to_clear = ! const_bounds_p;
5402
 
5403
            /* This loop is a more accurate version of the loop in
5404
               mostly_zeros_p (it handles RANGE_EXPR in an index).  It
5405
               is also needed to check for missing elements.  */
5406
            FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5407
              {
5408
                HOST_WIDE_INT this_node_count;
5409
 
5410
                if (need_to_clear)
5411
                  break;
5412
 
5413
                if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5414
                  {
5415
                    tree lo_index = TREE_OPERAND (index, 0);
5416
                    tree hi_index = TREE_OPERAND (index, 1);
5417
 
5418
                    if (! host_integerp (lo_index, 1)
5419
                        || ! host_integerp (hi_index, 1))
5420
                      {
5421
                        need_to_clear = 1;
5422
                        break;
5423
                      }
5424
 
5425
                    this_node_count = (tree_low_cst (hi_index, 1)
5426
                                       - tree_low_cst (lo_index, 1) + 1);
5427
                  }
5428
                else
5429
                  this_node_count = 1;
5430
 
5431
                count += this_node_count;
5432
                if (mostly_zeros_p (value))
5433
                  zero_count += this_node_count;
5434
              }
5435
 
5436
            /* Clear the entire array first if there are any missing
5437
               elements, or if the incidence of zero elements is >=
5438
               75%.  */
5439
            if (! need_to_clear
5440
                && (count < maxelt - minelt + 1
5441
                    || 4 * zero_count >= 3 * count))
5442
              need_to_clear = 1;
5443
          }
5444
 
5445
        if (need_to_clear && size > 0)
5446
          {
5447
            if (REG_P (target))
5448
              emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
5449
            else
5450
              clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5451
            cleared = 1;
5452
          }
5453
 
5454
        if (!cleared && REG_P (target))
5455
          /* Inform later passes that the old value is dead.  */
5456
          emit_clobber (target);
5457
 
5458
        /* Store each element of the constructor into the
5459
           corresponding element of TARGET, determined by counting the
5460
           elements.  */
5461
        FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5462
          {
5463
            enum machine_mode mode;
5464
            HOST_WIDE_INT bitsize;
5465
            HOST_WIDE_INT bitpos;
5466
            rtx xtarget = target;
5467
 
5468
            if (cleared && initializer_zerop (value))
5469
              continue;
5470
 
5471
            mode = TYPE_MODE (elttype);
5472
            if (mode == BLKmode)
5473
              bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5474
                         ? tree_low_cst (TYPE_SIZE (elttype), 1)
5475
                         : -1);
5476
            else
5477
              bitsize = GET_MODE_BITSIZE (mode);
5478
 
5479
            if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5480
              {
5481
                tree lo_index = TREE_OPERAND (index, 0);
5482
                tree hi_index = TREE_OPERAND (index, 1);
5483
                rtx index_r, pos_rtx;
5484
                HOST_WIDE_INT lo, hi, count;
5485
                tree position;
5486
 
5487
                /* If the range is constant and "small", unroll the loop.  */
5488
                if (const_bounds_p
5489
                    && host_integerp (lo_index, 0)
5490
                    && host_integerp (hi_index, 0)
5491
                    && (lo = tree_low_cst (lo_index, 0),
5492
                        hi = tree_low_cst (hi_index, 0),
5493
                        count = hi - lo + 1,
5494
                        (!MEM_P (target)
5495
                         || count <= 2
5496
                         || (host_integerp (TYPE_SIZE (elttype), 1)
5497
                             && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5498
                                 <= 40 * 8)))))
5499
                  {
5500
                    lo -= minelt;  hi -= minelt;
5501
                    for (; lo <= hi; lo++)
5502
                      {
5503
                        bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5504
 
5505
                        if (MEM_P (target)
5506
                            && !MEM_KEEP_ALIAS_SET_P (target)
5507
                            && TREE_CODE (type) == ARRAY_TYPE
5508
                            && TYPE_NONALIASED_COMPONENT (type))
5509
                          {
5510
                            target = copy_rtx (target);
5511
                            MEM_KEEP_ALIAS_SET_P (target) = 1;
5512
                          }
5513
 
5514
                        store_constructor_field
5515
                          (target, bitsize, bitpos, mode, value, type, cleared,
5516
                           get_alias_set (elttype));
5517
                      }
5518
                  }
5519
                else
5520
                  {
5521
                    rtx loop_start = gen_label_rtx ();
5522
                    rtx loop_end = gen_label_rtx ();
5523
                    tree exit_cond;
5524
 
5525
                    expand_normal (hi_index);
5526
 
5527
                    index = build_decl (EXPR_LOCATION (exp),
5528
                                        VAR_DECL, NULL_TREE, domain);
5529
                    index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
5530
                    SET_DECL_RTL (index, index_r);
5531
                    store_expr (lo_index, index_r, 0, false);
5532
 
5533
                    /* Build the head of the loop.  */
5534
                    do_pending_stack_adjust ();
5535
                    emit_label (loop_start);
5536
 
5537
                    /* Assign value to element index.  */
5538
                    position =
5539
                      fold_convert (ssizetype,
5540
                                    fold_build2 (MINUS_EXPR,
5541
                                                 TREE_TYPE (index),
5542
                                                 index,
5543
                                                 TYPE_MIN_VALUE (domain)));
5544
 
5545
                    position =
5546
                        size_binop (MULT_EXPR, position,
5547
                                    fold_convert (ssizetype,
5548
                                                  TYPE_SIZE_UNIT (elttype)));
5549
 
5550
                    pos_rtx = expand_normal (position);
5551
                    xtarget = offset_address (target, pos_rtx,
5552
                                              highest_pow2_factor (position));
5553
                    xtarget = adjust_address (xtarget, mode, 0);
5554
                    if (TREE_CODE (value) == CONSTRUCTOR)
5555
                      store_constructor (value, xtarget, cleared,
5556
                                         bitsize / BITS_PER_UNIT);
5557
                    else
5558
                      store_expr (value, xtarget, 0, false);
5559
 
5560
                    /* Generate a conditional jump to exit the loop.  */
5561
                    exit_cond = build2 (LT_EXPR, integer_type_node,
5562
                                        index, hi_index);
5563
                    jumpif (exit_cond, loop_end, -1);
5564
 
5565
                    /* Update the loop counter, and jump to the head of
5566
                       the loop.  */
5567
                    expand_assignment (index,
5568
                                       build2 (PLUS_EXPR, TREE_TYPE (index),
5569
                                               index, integer_one_node),
5570
                                       false);
5571
 
5572
                    emit_jump (loop_start);
5573
 
5574
                    /* Build the end of the loop.  */
5575
                    emit_label (loop_end);
5576
                  }
5577
              }
5578
            else if ((index != 0 && ! host_integerp (index, 0))
5579
                     || ! host_integerp (TYPE_SIZE (elttype), 1))
5580
              {
5581
                tree position;
5582
 
5583
                if (index == 0)
5584
                  index = ssize_int (1);
5585
 
5586
                if (minelt)
5587
                  index = fold_convert (ssizetype,
5588
                                        fold_build2 (MINUS_EXPR,
5589
                                                     TREE_TYPE (index),
5590
                                                     index,
5591
                                                     TYPE_MIN_VALUE (domain)));
5592
 
5593
                position =
5594
                  size_binop (MULT_EXPR, index,
5595
                              fold_convert (ssizetype,
5596
                                            TYPE_SIZE_UNIT (elttype)));
5597
                xtarget = offset_address (target,
5598
                                          expand_normal (position),
5599
                                          highest_pow2_factor (position));
5600
                xtarget = adjust_address (xtarget, mode, 0);
5601
                store_expr (value, xtarget, 0, false);
5602
              }
5603
            else
5604
              {
5605
                if (index != 0)
5606
                  bitpos = ((tree_low_cst (index, 0) - minelt)
5607
                            * tree_low_cst (TYPE_SIZE (elttype), 1));
5608
                else
5609
                  bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5610
 
5611
                if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5612
                    && TREE_CODE (type) == ARRAY_TYPE
5613
                    && TYPE_NONALIASED_COMPONENT (type))
5614
                  {
5615
                    target = copy_rtx (target);
5616
                    MEM_KEEP_ALIAS_SET_P (target) = 1;
5617
                  }
5618
                store_constructor_field (target, bitsize, bitpos, mode, value,
5619
                                         type, cleared, get_alias_set (elttype));
5620
              }
5621
          }
5622
        break;
5623
      }
5624
 
5625
    case VECTOR_TYPE:
5626
      {
5627
        unsigned HOST_WIDE_INT idx;
5628
        constructor_elt *ce;
5629
        int i;
5630
        int need_to_clear;
5631
        int icode = 0;
5632
        tree elttype = TREE_TYPE (type);
5633
        int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5634
        enum machine_mode eltmode = TYPE_MODE (elttype);
5635
        HOST_WIDE_INT bitsize;
5636
        HOST_WIDE_INT bitpos;
5637
        rtvec vector = NULL;
5638
        unsigned n_elts;
5639
        alias_set_type alias;
5640
 
5641
        gcc_assert (eltmode != BLKmode);
5642
 
5643
        n_elts = TYPE_VECTOR_SUBPARTS (type);
5644
        if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5645
          {
5646
            enum machine_mode mode = GET_MODE (target);
5647
 
5648
            icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5649
            if (icode != CODE_FOR_nothing)
5650
              {
5651
                unsigned int i;
5652
 
5653
                vector = rtvec_alloc (n_elts);
5654
                for (i = 0; i < n_elts; i++)
5655
                  RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5656
              }
5657
          }
5658
 
5659
        /* If the constructor has fewer elements than the vector,
5660
           clear the whole array first.  Similarly if this is static
5661
           constructor of a non-BLKmode object.  */
5662
        if (cleared)
5663
          need_to_clear = 0;
5664
        else if (REG_P (target) && TREE_STATIC (exp))
5665
          need_to_clear = 1;
5666
        else
5667
          {
5668
            unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5669
            tree value;
5670
 
5671
            FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5672
              {
5673
                int n_elts_here = tree_low_cst
5674
                  (int_const_binop (TRUNC_DIV_EXPR,
5675
                                    TYPE_SIZE (TREE_TYPE (value)),
5676
                                    TYPE_SIZE (elttype), 0), 1);
5677
 
5678
                count += n_elts_here;
5679
                if (mostly_zeros_p (value))
5680
                  zero_count += n_elts_here;
5681
              }
5682
 
5683
            /* Clear the entire vector first if there are any missing elements,
5684
               or if the incidence of zero elements is >= 75%.  */
5685
            need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5686
          }
5687
 
5688
        if (need_to_clear && size > 0 && !vector)
5689
          {
5690
            if (REG_P (target))
5691
              emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5692
            else
5693
              clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5694
            cleared = 1;
5695
          }
5696
 
5697
        /* Inform later passes that the old value is dead.  */
5698
        if (!cleared && !vector && REG_P (target))
5699
          emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5700
 
5701
        if (MEM_P (target))
5702
          alias = MEM_ALIAS_SET (target);
5703
        else
5704
          alias = get_alias_set (elttype);
5705
 
5706
        /* Store each element of the constructor into the corresponding
5707
           element of TARGET, determined by counting the elements.  */
5708
        for (idx = 0, i = 0;
5709
             VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5710
             idx++, i += bitsize / elt_size)
5711
          {
5712
            HOST_WIDE_INT eltpos;
5713
            tree value = ce->value;
5714
 
5715
            bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5716
            if (cleared && initializer_zerop (value))
5717
              continue;
5718
 
5719
            if (ce->index)
5720
              eltpos = tree_low_cst (ce->index, 1);
5721
            else
5722
              eltpos = i;
5723
 
5724
            if (vector)
5725
              {
5726
                /* Vector CONSTRUCTORs should only be built from smaller
5727
                   vectors in the case of BLKmode vectors.  */
5728
                gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5729
                RTVEC_ELT (vector, eltpos)
5730
                  = expand_normal (value);
5731
              }
5732
            else
5733
              {
5734
                enum machine_mode value_mode =
5735
                  TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5736
                  ? TYPE_MODE (TREE_TYPE (value))
5737
                  : eltmode;
5738
                bitpos = eltpos * elt_size;
5739
                store_constructor_field (target, bitsize, bitpos,
5740
                                         value_mode, value, type,
5741
                                         cleared, alias);
5742
              }
5743
          }
5744
 
5745
        if (vector)
5746
          emit_insn (GEN_FCN (icode)
5747
                     (target,
5748
                      gen_rtx_PARALLEL (GET_MODE (target), vector)));
5749
        break;
5750
      }
5751
 
5752
    default:
5753
      gcc_unreachable ();
5754
    }
5755
}
5756
 
5757
/* Store the value of EXP (an expression tree)
5758
   into a subfield of TARGET which has mode MODE and occupies
5759
   BITSIZE bits, starting BITPOS bits from the start of TARGET.
5760
   If MODE is VOIDmode, it means that we are storing into a bit-field.
5761
 
5762
   Always return const0_rtx unless we have something particular to
5763
   return.
5764
 
5765
   TYPE is the type of the underlying object,
5766
 
5767
   ALIAS_SET is the alias set for the destination.  This value will
5768
   (in general) be different from that for TARGET, since TARGET is a
5769
   reference to the containing structure.
5770
 
5771
   If NONTEMPORAL is true, try generating a nontemporal store.  */
5772
 
5773
static rtx
5774
store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5775
             enum machine_mode mode, tree exp, tree type,
5776
             alias_set_type alias_set, bool nontemporal)
5777
{
5778
  if (TREE_CODE (exp) == ERROR_MARK)
5779
    return const0_rtx;
5780
 
5781
  /* If we have nothing to store, do nothing unless the expression has
5782
     side-effects.  */
5783
  if (bitsize == 0)
5784
    return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5785
 
5786
  /* If we are storing into an unaligned field of an aligned union that is
5787
     in a register, we may have the mode of TARGET being an integer mode but
5788
     MODE == BLKmode.  In that case, get an aligned object whose size and
5789
     alignment are the same as TARGET and store TARGET into it (we can avoid
5790
     the store if the field being stored is the entire width of TARGET).  Then
5791
     call ourselves recursively to store the field into a BLKmode version of
5792
     that object.  Finally, load from the object into TARGET.  This is not
5793
     very efficient in general, but should only be slightly more expensive
5794
     than the otherwise-required unaligned accesses.  Perhaps this can be
5795
     cleaned up later.  It's tempting to make OBJECT readonly, but it's set
5796
     twice, once with emit_move_insn and once via store_field.  */
5797
 
5798
  if (mode == BLKmode
5799
      && (REG_P (target) || GET_CODE (target) == SUBREG))
5800
    {
5801
      rtx object = assign_temp (type, 0, 1, 1);
5802
      rtx blk_object = adjust_address (object, BLKmode, 0);
5803
 
5804
      if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5805
        emit_move_insn (object, target);
5806
 
5807
      store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5808
                   nontemporal);
5809
 
5810
      emit_move_insn (target, object);
5811
 
5812
      /* We want to return the BLKmode version of the data.  */
5813
      return blk_object;
5814
    }
5815
 
5816
  if (GET_CODE (target) == CONCAT)
5817
    {
5818
      /* We're storing into a struct containing a single __complex.  */
5819
 
5820
      gcc_assert (!bitpos);
5821
      return store_expr (exp, target, 0, nontemporal);
5822
    }
5823
 
5824
  /* If the structure is in a register or if the component
5825
     is a bit field, we cannot use addressing to access it.
5826
     Use bit-field techniques or SUBREG to store in it.  */
5827
 
5828
  if (mode == VOIDmode
5829
      || (mode != BLKmode && ! direct_store[(int) mode]
5830
          && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5831
          && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5832
      || REG_P (target)
5833
      || GET_CODE (target) == SUBREG
5834
      /* If the field isn't aligned enough to store as an ordinary memref,
5835
         store it as a bit field.  */
5836
      || (mode != BLKmode
5837
          && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5838
                || bitpos % GET_MODE_ALIGNMENT (mode))
5839
               && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5840
              || (bitpos % BITS_PER_UNIT != 0)))
5841
      /* If the RHS and field are a constant size and the size of the
5842
         RHS isn't the same size as the bitfield, we must use bitfield
5843
         operations.  */
5844
      || (bitsize >= 0
5845
          && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5846
          && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5847
    {
5848
      rtx temp;
5849
      gimple nop_def;
5850
 
5851
      /* If EXP is a NOP_EXPR of precision less than its mode, then that
5852
         implies a mask operation.  If the precision is the same size as
5853
         the field we're storing into, that mask is redundant.  This is
5854
         particularly common with bit field assignments generated by the
5855
         C front end.  */
5856
      nop_def = get_def_for_expr (exp, NOP_EXPR);
5857
      if (nop_def)
5858
        {
5859
          tree type = TREE_TYPE (exp);
5860
          if (INTEGRAL_TYPE_P (type)
5861
              && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5862
              && bitsize == TYPE_PRECISION (type))
5863
            {
5864
              tree op = gimple_assign_rhs1 (nop_def);
5865
              type = TREE_TYPE (op);
5866
              if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5867
                exp = op;
5868
            }
5869
        }
5870
 
5871
      temp = expand_normal (exp);
5872
 
5873
      /* If BITSIZE is narrower than the size of the type of EXP
5874
         we will be narrowing TEMP.  Normally, what's wanted are the
5875
         low-order bits.  However, if EXP's type is a record and this is
5876
         big-endian machine, we want the upper BITSIZE bits.  */
5877
      if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5878
          && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5879
          && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5880
        temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5881
                             size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5882
                                       - bitsize),
5883
                             NULL_RTX, 1);
5884
 
5885
      /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5886
         MODE.  */
5887
      if (mode != VOIDmode && mode != BLKmode
5888
          && mode != TYPE_MODE (TREE_TYPE (exp)))
5889
        temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5890
 
5891
      /* If the modes of TEMP and TARGET are both BLKmode, both
5892
         must be in memory and BITPOS must be aligned on a byte
5893
         boundary.  If so, we simply do a block copy.  Likewise
5894
         for a BLKmode-like TARGET.  */
5895
      if (GET_MODE (temp) == BLKmode
5896
          && (GET_MODE (target) == BLKmode
5897
              || (MEM_P (target)
5898
                  && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5899
                  && (bitpos % BITS_PER_UNIT) == 0
5900
                  && (bitsize % BITS_PER_UNIT) == 0)))
5901
        {
5902
          gcc_assert (MEM_P (target) && MEM_P (temp)
5903
                      && (bitpos % BITS_PER_UNIT) == 0);
5904
 
5905
          target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5906
          emit_block_move (target, temp,
5907
                           GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5908
                                    / BITS_PER_UNIT),
5909
                           BLOCK_OP_NORMAL);
5910
 
5911
          return const0_rtx;
5912
        }
5913
 
5914
      /* Store the value in the bitfield.  */
5915
      store_bit_field (target, bitsize, bitpos, mode, temp);
5916
 
5917
      return const0_rtx;
5918
    }
5919
  else
5920
    {
5921
      /* Now build a reference to just the desired component.  */
5922
      rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5923
 
5924
      if (to_rtx == target)
5925
        to_rtx = copy_rtx (to_rtx);
5926
 
5927
      MEM_SET_IN_STRUCT_P (to_rtx, 1);
5928
      if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5929
        set_mem_alias_set (to_rtx, alias_set);
5930
 
5931
      return store_expr (exp, to_rtx, 0, nontemporal);
5932
    }
5933
}
5934
 
5935
/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5936
   an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5937
   codes and find the ultimate containing object, which we return.
5938
 
5939
   We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5940
   bit position, and *PUNSIGNEDP to the signedness of the field.
5941
   If the position of the field is variable, we store a tree
5942
   giving the variable offset (in units) in *POFFSET.
5943
   This offset is in addition to the bit position.
5944
   If the position is not variable, we store 0 in *POFFSET.
5945
 
5946
   If any of the extraction expressions is volatile,
5947
   we store 1 in *PVOLATILEP.  Otherwise we don't change that.
5948
 
5949
   If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5950
   Otherwise, it is a mode that can be used to access the field.
5951
 
5952
   If the field describes a variable-sized object, *PMODE is set to
5953
   BLKmode and *PBITSIZE is set to -1.  An access cannot be made in
5954
   this case, but the address of the object can be found.
5955
 
5956
   If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5957
   look through nodes that serve as markers of a greater alignment than
5958
   the one that can be deduced from the expression.  These nodes make it
5959
   possible for front-ends to prevent temporaries from being created by
5960
   the middle-end on alignment considerations.  For that purpose, the
5961
   normal operating mode at high-level is to always pass FALSE so that
5962
   the ultimate containing object is really returned; moreover, the
5963
   associated predicate handled_component_p will always return TRUE
5964
   on these nodes, thus indicating that they are essentially handled
5965
   by get_inner_reference.  TRUE should only be passed when the caller
5966
   is scanning the expression in order to build another representation
5967
   and specifically knows how to handle these nodes; as such, this is
5968
   the normal operating mode in the RTL expanders.  */
5969
 
5970
tree
5971
get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5972
                     HOST_WIDE_INT *pbitpos, tree *poffset,
5973
                     enum machine_mode *pmode, int *punsignedp,
5974
                     int *pvolatilep, bool keep_aligning)
5975
{
5976
  tree size_tree = 0;
5977
  enum machine_mode mode = VOIDmode;
5978
  bool blkmode_bitfield = false;
5979
  tree offset = size_zero_node;
5980
  tree bit_offset = bitsize_zero_node;
5981
 
5982
  /* First get the mode, signedness, and size.  We do this from just the
5983
     outermost expression.  */
5984
  *pbitsize = -1;
5985
  if (TREE_CODE (exp) == COMPONENT_REF)
5986
    {
5987
      tree field = TREE_OPERAND (exp, 1);
5988
      size_tree = DECL_SIZE (field);
5989
      if (!DECL_BIT_FIELD (field))
5990
        mode = DECL_MODE (field);
5991
      else if (DECL_MODE (field) == BLKmode)
5992
        blkmode_bitfield = true;
5993
 
5994
      *punsignedp = DECL_UNSIGNED (field);
5995
    }
5996
  else if (TREE_CODE (exp) == BIT_FIELD_REF)
5997
    {
5998
      size_tree = TREE_OPERAND (exp, 1);
5999
      *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6000
                     || TYPE_UNSIGNED (TREE_TYPE (exp)));
6001
 
6002
      /* For vector types, with the correct size of access, use the mode of
6003
         inner type.  */
6004
      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6005
          && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6006
          && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6007
        mode = TYPE_MODE (TREE_TYPE (exp));
6008
    }
6009
  else
6010
    {
6011
      mode = TYPE_MODE (TREE_TYPE (exp));
6012
      *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6013
 
6014
      if (mode == BLKmode)
6015
        size_tree = TYPE_SIZE (TREE_TYPE (exp));
6016
      else
6017
        *pbitsize = GET_MODE_BITSIZE (mode);
6018
    }
6019
 
6020
  if (size_tree != 0)
6021
    {
6022
      if (! host_integerp (size_tree, 1))
6023
        mode = BLKmode, *pbitsize = -1;
6024
      else
6025
        *pbitsize = tree_low_cst (size_tree, 1);
6026
    }
6027
 
6028
  /* Compute cumulative bit-offset for nested component-refs and array-refs,
6029
     and find the ultimate containing object.  */
6030
  while (1)
6031
    {
6032
      switch (TREE_CODE (exp))
6033
        {
6034
        case BIT_FIELD_REF:
6035
          bit_offset = size_binop (PLUS_EXPR, bit_offset,
6036
                                   TREE_OPERAND (exp, 2));
6037
          break;
6038
 
6039
        case COMPONENT_REF:
6040
          {
6041
            tree field = TREE_OPERAND (exp, 1);
6042
            tree this_offset = component_ref_field_offset (exp);
6043
 
6044
            /* If this field hasn't been filled in yet, don't go past it.
6045
               This should only happen when folding expressions made during
6046
               type construction.  */
6047
            if (this_offset == 0)
6048
              break;
6049
 
6050
            offset = size_binop (PLUS_EXPR, offset, this_offset);
6051
            bit_offset = size_binop (PLUS_EXPR, bit_offset,
6052
                                     DECL_FIELD_BIT_OFFSET (field));
6053
 
6054
            /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
6055
          }
6056
          break;
6057
 
6058
        case ARRAY_REF:
6059
        case ARRAY_RANGE_REF:
6060
          {
6061
            tree index = TREE_OPERAND (exp, 1);
6062
            tree low_bound = array_ref_low_bound (exp);
6063
            tree unit_size = array_ref_element_size (exp);
6064
 
6065
            /* We assume all arrays have sizes that are a multiple of a byte.
6066
               First subtract the lower bound, if any, in the type of the
6067
               index, then convert to sizetype and multiply by the size of
6068
               the array element.  */
6069
            if (! integer_zerop (low_bound))
6070
              index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6071
                                   index, low_bound);
6072
 
6073
            offset = size_binop (PLUS_EXPR, offset,
6074
                                 size_binop (MULT_EXPR,
6075
                                             fold_convert (sizetype, index),
6076
                                             unit_size));
6077
          }
6078
          break;
6079
 
6080
        case REALPART_EXPR:
6081
          break;
6082
 
6083
        case IMAGPART_EXPR:
6084
          bit_offset = size_binop (PLUS_EXPR, bit_offset,
6085
                                   bitsize_int (*pbitsize));
6086
          break;
6087
 
6088
        case VIEW_CONVERT_EXPR:
6089
          if (keep_aligning && STRICT_ALIGNMENT
6090
              && (TYPE_ALIGN (TREE_TYPE (exp))
6091
               > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6092
              && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6093
                  < BIGGEST_ALIGNMENT)
6094
              && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6095
                  || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6096
            goto done;
6097
          break;
6098
 
6099
        default:
6100
          goto done;
6101
        }
6102
 
6103
      /* If any reference in the chain is volatile, the effect is volatile.  */
6104
      if (TREE_THIS_VOLATILE (exp))
6105
        *pvolatilep = 1;
6106
 
6107
      exp = TREE_OPERAND (exp, 0);
6108
    }
6109
 done:
6110
 
6111
  /* If OFFSET is constant, see if we can return the whole thing as a
6112
     constant bit position.  Make sure to handle overflow during
6113
     this conversion.  */
6114
  if (host_integerp (offset, 0))
6115
    {
6116
      double_int tem = double_int_mul (tree_to_double_int (offset),
6117
                                       uhwi_to_double_int (BITS_PER_UNIT));
6118
      tem = double_int_add (tem, tree_to_double_int (bit_offset));
6119
      if (double_int_fits_in_shwi_p (tem))
6120
        {
6121
          *pbitpos = double_int_to_shwi (tem);
6122
          *poffset = offset = NULL_TREE;
6123
        }
6124
    }
6125
 
6126
  /* Otherwise, split it up.  */
6127
  if (offset)
6128
    {
6129
      *pbitpos = tree_low_cst (bit_offset, 0);
6130
      *poffset = offset;
6131
    }
6132
 
6133
  /* We can use BLKmode for a byte-aligned BLKmode bitfield.  */
6134
  if (mode == VOIDmode
6135
      && blkmode_bitfield
6136
      && (*pbitpos % BITS_PER_UNIT) == 0
6137
      && (*pbitsize % BITS_PER_UNIT) == 0)
6138
    *pmode = BLKmode;
6139
  else
6140
    *pmode = mode;
6141
 
6142
  return exp;
6143
}
6144
 
6145
/* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6146
   ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6147
   EXP is marked as PACKED.  */
6148
 
6149
bool
6150
contains_packed_reference (const_tree exp)
6151
{
6152
  bool packed_p = false;
6153
 
6154
  while (1)
6155
    {
6156
      switch (TREE_CODE (exp))
6157
        {
6158
        case COMPONENT_REF:
6159
          {
6160
            tree field = TREE_OPERAND (exp, 1);
6161
            packed_p = DECL_PACKED (field)
6162
                       || TYPE_PACKED (TREE_TYPE (field))
6163
                       || TYPE_PACKED (TREE_TYPE (exp));
6164
            if (packed_p)
6165
              goto done;
6166
          }
6167
          break;
6168
 
6169
        case BIT_FIELD_REF:
6170
        case ARRAY_REF:
6171
        case ARRAY_RANGE_REF:
6172
        case REALPART_EXPR:
6173
        case IMAGPART_EXPR:
6174
        case VIEW_CONVERT_EXPR:
6175
          break;
6176
 
6177
        default:
6178
          goto done;
6179
        }
6180
      exp = TREE_OPERAND (exp, 0);
6181
    }
6182
 done:
6183
  return packed_p;
6184
}
6185
 
6186
/* Return a tree of sizetype representing the size, in bytes, of the element
6187
   of EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6188
 
6189
tree
6190
array_ref_element_size (tree exp)
6191
{
6192
  tree aligned_size = TREE_OPERAND (exp, 3);
6193
  tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6194
  location_t loc = EXPR_LOCATION (exp);
6195
 
6196
  /* If a size was specified in the ARRAY_REF, it's the size measured
6197
     in alignment units of the element type.  So multiply by that value.  */
6198
  if (aligned_size)
6199
    {
6200
      /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6201
         sizetype from another type of the same width and signedness.  */
6202
      if (TREE_TYPE (aligned_size) != sizetype)
6203
        aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6204
      return size_binop_loc (loc, MULT_EXPR, aligned_size,
6205
                             size_int (TYPE_ALIGN_UNIT (elmt_type)));
6206
    }
6207
 
6208
  /* Otherwise, take the size from that of the element type.  Substitute
6209
     any PLACEHOLDER_EXPR that we have.  */
6210
  else
6211
    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6212
}
6213
 
6214
/* Return a tree representing the lower bound of the array mentioned in
6215
   EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6216
 
6217
tree
6218
array_ref_low_bound (tree exp)
6219
{
6220
  tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6221
 
6222
  /* If a lower bound is specified in EXP, use it.  */
6223
  if (TREE_OPERAND (exp, 2))
6224
    return TREE_OPERAND (exp, 2);
6225
 
6226
  /* Otherwise, if there is a domain type and it has a lower bound, use it,
6227
     substituting for a PLACEHOLDER_EXPR as needed.  */
6228
  if (domain_type && TYPE_MIN_VALUE (domain_type))
6229
    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6230
 
6231
  /* Otherwise, return a zero of the appropriate type.  */
6232
  return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6233
}
6234
 
6235
/* Return a tree representing the upper bound of the array mentioned in
6236
   EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6237
 
6238
tree
6239
array_ref_up_bound (tree exp)
6240
{
6241
  tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6242
 
6243
  /* If there is a domain type and it has an upper bound, use it, substituting
6244
     for a PLACEHOLDER_EXPR as needed.  */
6245
  if (domain_type && TYPE_MAX_VALUE (domain_type))
6246
    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6247
 
6248
  /* Otherwise fail.  */
6249
  return NULL_TREE;
6250
}
6251
 
6252
/* Return a tree representing the offset, in bytes, of the field referenced
6253
   by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
6254
 
6255
tree
6256
component_ref_field_offset (tree exp)
6257
{
6258
  tree aligned_offset = TREE_OPERAND (exp, 2);
6259
  tree field = TREE_OPERAND (exp, 1);
6260
  location_t loc = EXPR_LOCATION (exp);
6261
 
6262
  /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6263
     in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
6264
     value.  */
6265
  if (aligned_offset)
6266
    {
6267
      /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6268
         sizetype from another type of the same width and signedness.  */
6269
      if (TREE_TYPE (aligned_offset) != sizetype)
6270
        aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6271
      return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6272
                             size_int (DECL_OFFSET_ALIGN (field)
6273
                                       / BITS_PER_UNIT));
6274
    }
6275
 
6276
  /* Otherwise, take the offset from that of the field.  Substitute
6277
     any PLACEHOLDER_EXPR that we have.  */
6278
  else
6279
    return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6280
}
6281
 
6282
/* Alignment in bits the TARGET of an assignment may be assumed to have.  */
6283
 
6284
static unsigned HOST_WIDE_INT
6285
target_align (const_tree target)
6286
{
6287
  /* We might have a chain of nested references with intermediate misaligning
6288
     bitfields components, so need to recurse to find out.  */
6289
 
6290
  unsigned HOST_WIDE_INT this_align, outer_align;
6291
 
6292
  switch (TREE_CODE (target))
6293
    {
6294
    case BIT_FIELD_REF:
6295
      return 1;
6296
 
6297
    case COMPONENT_REF:
6298
      this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6299
      outer_align = target_align (TREE_OPERAND (target, 0));
6300
      return MIN (this_align, outer_align);
6301
 
6302
    case ARRAY_REF:
6303
    case ARRAY_RANGE_REF:
6304
      this_align = TYPE_ALIGN (TREE_TYPE (target));
6305
      outer_align = target_align (TREE_OPERAND (target, 0));
6306
      return MIN (this_align, outer_align);
6307
 
6308
    CASE_CONVERT:
6309
    case NON_LVALUE_EXPR:
6310
    case VIEW_CONVERT_EXPR:
6311
      this_align = TYPE_ALIGN (TREE_TYPE (target));
6312
      outer_align = target_align (TREE_OPERAND (target, 0));
6313
      return MAX (this_align, outer_align);
6314
 
6315
    default:
6316
      return TYPE_ALIGN (TREE_TYPE (target));
6317
    }
6318
}
6319
 
6320
 
6321
/* Given an rtx VALUE that may contain additions and multiplications, return
6322
   an equivalent value that just refers to a register, memory, or constant.
6323
   This is done by generating instructions to perform the arithmetic and
6324
   returning a pseudo-register containing the value.
6325
 
6326
   The returned value may be a REG, SUBREG, MEM or constant.  */
6327
 
6328
rtx
6329
force_operand (rtx value, rtx target)
6330
{
6331
  rtx op1, op2;
6332
  /* Use subtarget as the target for operand 0 of a binary operation.  */
6333
  rtx subtarget = get_subtarget (target);
6334
  enum rtx_code code = GET_CODE (value);
6335
 
6336
  /* Check for subreg applied to an expression produced by loop optimizer.  */
6337
  if (code == SUBREG
6338
      && !REG_P (SUBREG_REG (value))
6339
      && !MEM_P (SUBREG_REG (value)))
6340
    {
6341
      value
6342
        = simplify_gen_subreg (GET_MODE (value),
6343
                               force_reg (GET_MODE (SUBREG_REG (value)),
6344
                                          force_operand (SUBREG_REG (value),
6345
                                                         NULL_RTX)),
6346
                               GET_MODE (SUBREG_REG (value)),
6347
                               SUBREG_BYTE (value));
6348
      code = GET_CODE (value);
6349
    }
6350
 
6351
  /* Check for a PIC address load.  */
6352
  if ((code == PLUS || code == MINUS)
6353
      && XEXP (value, 0) == pic_offset_table_rtx
6354
      && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6355
          || GET_CODE (XEXP (value, 1)) == LABEL_REF
6356
          || GET_CODE (XEXP (value, 1)) == CONST))
6357
    {
6358
      if (!subtarget)
6359
        subtarget = gen_reg_rtx (GET_MODE (value));
6360
      emit_move_insn (subtarget, value);
6361
      return subtarget;
6362
    }
6363
 
6364
  if (ARITHMETIC_P (value))
6365
    {
6366
      op2 = XEXP (value, 1);
6367
      if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6368
        subtarget = 0;
6369
      if (code == MINUS && CONST_INT_P (op2))
6370
        {
6371
          code = PLUS;
6372
          op2 = negate_rtx (GET_MODE (value), op2);
6373
        }
6374
 
6375
      /* Check for an addition with OP2 a constant integer and our first
6376
         operand a PLUS of a virtual register and something else.  In that
6377
         case, we want to emit the sum of the virtual register and the
6378
         constant first and then add the other value.  This allows virtual
6379
         register instantiation to simply modify the constant rather than
6380
         creating another one around this addition.  */
6381
      if (code == PLUS && CONST_INT_P (op2)
6382
          && GET_CODE (XEXP (value, 0)) == PLUS
6383
          && REG_P (XEXP (XEXP (value, 0), 0))
6384
          && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6385
          && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6386
        {
6387
          rtx temp = expand_simple_binop (GET_MODE (value), code,
6388
                                          XEXP (XEXP (value, 0), 0), op2,
6389
                                          subtarget, 0, OPTAB_LIB_WIDEN);
6390
          return expand_simple_binop (GET_MODE (value), code, temp,
6391
                                      force_operand (XEXP (XEXP (value,
6392
                                                                 0), 1), 0),
6393
                                      target, 0, OPTAB_LIB_WIDEN);
6394
        }
6395
 
6396
      op1 = force_operand (XEXP (value, 0), subtarget);
6397
      op2 = force_operand (op2, NULL_RTX);
6398
      switch (code)
6399
        {
6400
        case MULT:
6401
          return expand_mult (GET_MODE (value), op1, op2, target, 1);
6402
        case DIV:
6403
          if (!INTEGRAL_MODE_P (GET_MODE (value)))
6404
            return expand_simple_binop (GET_MODE (value), code, op1, op2,
6405
                                        target, 1, OPTAB_LIB_WIDEN);
6406
          else
6407
            return expand_divmod (0,
6408
                                  FLOAT_MODE_P (GET_MODE (value))
6409
                                  ? RDIV_EXPR : TRUNC_DIV_EXPR,
6410
                                  GET_MODE (value), op1, op2, target, 0);
6411
        case MOD:
6412
          return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6413
                                target, 0);
6414
        case UDIV:
6415
          return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6416
                                target, 1);
6417
        case UMOD:
6418
          return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6419
                                target, 1);
6420
        case ASHIFTRT:
6421
          return expand_simple_binop (GET_MODE (value), code, op1, op2,
6422
                                      target, 0, OPTAB_LIB_WIDEN);
6423
        default:
6424
          return expand_simple_binop (GET_MODE (value), code, op1, op2,
6425
                                      target, 1, OPTAB_LIB_WIDEN);
6426
        }
6427
    }
6428
  if (UNARY_P (value))
6429
    {
6430
      if (!target)
6431
        target = gen_reg_rtx (GET_MODE (value));
6432
      op1 = force_operand (XEXP (value, 0), NULL_RTX);
6433
      switch (code)
6434
        {
6435
        case ZERO_EXTEND:
6436
        case SIGN_EXTEND:
6437
        case TRUNCATE:
6438
        case FLOAT_EXTEND:
6439
        case FLOAT_TRUNCATE:
6440
          convert_move (target, op1, code == ZERO_EXTEND);
6441
          return target;
6442
 
6443
        case FIX:
6444
        case UNSIGNED_FIX:
6445
          expand_fix (target, op1, code == UNSIGNED_FIX);
6446
          return target;
6447
 
6448
        case FLOAT:
6449
        case UNSIGNED_FLOAT:
6450
          expand_float (target, op1, code == UNSIGNED_FLOAT);
6451
          return target;
6452
 
6453
        default:
6454
          return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6455
        }
6456
    }
6457
 
6458
#ifdef INSN_SCHEDULING
6459
  /* On machines that have insn scheduling, we want all memory reference to be
6460
     explicit, so we need to deal with such paradoxical SUBREGs.  */
6461
  if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6462
      && (GET_MODE_SIZE (GET_MODE (value))
6463
          > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6464
    value
6465
      = simplify_gen_subreg (GET_MODE (value),
6466
                             force_reg (GET_MODE (SUBREG_REG (value)),
6467
                                        force_operand (SUBREG_REG (value),
6468
                                                       NULL_RTX)),
6469
                             GET_MODE (SUBREG_REG (value)),
6470
                             SUBREG_BYTE (value));
6471
#endif
6472
 
6473
  return value;
6474
}
6475
 
6476
/* Subroutine of expand_expr: return nonzero iff there is no way that
6477
   EXP can reference X, which is being modified.  TOP_P is nonzero if this
6478
   call is going to be used to determine whether we need a temporary
6479
   for EXP, as opposed to a recursive call to this function.
6480
 
6481
   It is always safe for this routine to return zero since it merely
6482
   searches for optimization opportunities.  */
6483
 
6484
int
6485
safe_from_p (const_rtx x, tree exp, int top_p)
6486
{
6487
  rtx exp_rtl = 0;
6488
  int i, nops;
6489
 
6490
  if (x == 0
6491
      /* If EXP has varying size, we MUST use a target since we currently
6492
         have no way of allocating temporaries of variable size
6493
         (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6494
         So we assume here that something at a higher level has prevented a
6495
         clash.  This is somewhat bogus, but the best we can do.  Only
6496
         do this when X is BLKmode and when we are at the top level.  */
6497
      || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6498
          && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6499
          && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6500
              || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6501
              || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6502
              != INTEGER_CST)
6503
          && GET_MODE (x) == BLKmode)
6504
      /* If X is in the outgoing argument area, it is always safe.  */
6505
      || (MEM_P (x)
6506
          && (XEXP (x, 0) == virtual_outgoing_args_rtx
6507
              || (GET_CODE (XEXP (x, 0)) == PLUS
6508
                  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6509
    return 1;
6510
 
6511
  /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6512
     find the underlying pseudo.  */
6513
  if (GET_CODE (x) == SUBREG)
6514
    {
6515
      x = SUBREG_REG (x);
6516
      if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6517
        return 0;
6518
    }
6519
 
6520
  /* Now look at our tree code and possibly recurse.  */
6521
  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6522
    {
6523
    case tcc_declaration:
6524
      exp_rtl = DECL_RTL_IF_SET (exp);
6525
      break;
6526
 
6527
    case tcc_constant:
6528
      return 1;
6529
 
6530
    case tcc_exceptional:
6531
      if (TREE_CODE (exp) == TREE_LIST)
6532
        {
6533
          while (1)
6534
            {
6535
              if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6536
                return 0;
6537
              exp = TREE_CHAIN (exp);
6538
              if (!exp)
6539
                return 1;
6540
              if (TREE_CODE (exp) != TREE_LIST)
6541
                return safe_from_p (x, exp, 0);
6542
            }
6543
        }
6544
      else if (TREE_CODE (exp) == CONSTRUCTOR)
6545
        {
6546
          constructor_elt *ce;
6547
          unsigned HOST_WIDE_INT idx;
6548
 
6549
          for (idx = 0;
6550
               VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6551
               idx++)
6552
            if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6553
                || !safe_from_p (x, ce->value, 0))
6554
              return 0;
6555
          return 1;
6556
        }
6557
      else if (TREE_CODE (exp) == ERROR_MARK)
6558
        return 1;       /* An already-visited SAVE_EXPR? */
6559
      else
6560
        return 0;
6561
 
6562
    case tcc_statement:
6563
      /* The only case we look at here is the DECL_INITIAL inside a
6564
         DECL_EXPR.  */
6565
      return (TREE_CODE (exp) != DECL_EXPR
6566
              || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6567
              || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6568
              || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6569
 
6570
    case tcc_binary:
6571
    case tcc_comparison:
6572
      if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6573
        return 0;
6574
      /* Fall through.  */
6575
 
6576
    case tcc_unary:
6577
      return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6578
 
6579
    case tcc_expression:
6580
    case tcc_reference:
6581
    case tcc_vl_exp:
6582
      /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
6583
         the expression.  If it is set, we conflict iff we are that rtx or
6584
         both are in memory.  Otherwise, we check all operands of the
6585
         expression recursively.  */
6586
 
6587
      switch (TREE_CODE (exp))
6588
        {
6589
        case ADDR_EXPR:
6590
          /* If the operand is static or we are static, we can't conflict.
6591
             Likewise if we don't conflict with the operand at all.  */
6592
          if (staticp (TREE_OPERAND (exp, 0))
6593
              || TREE_STATIC (exp)
6594
              || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6595
            return 1;
6596
 
6597
          /* Otherwise, the only way this can conflict is if we are taking
6598
             the address of a DECL a that address if part of X, which is
6599
             very rare.  */
6600
          exp = TREE_OPERAND (exp, 0);
6601
          if (DECL_P (exp))
6602
            {
6603
              if (!DECL_RTL_SET_P (exp)
6604
                  || !MEM_P (DECL_RTL (exp)))
6605
                return 0;
6606
              else
6607
                exp_rtl = XEXP (DECL_RTL (exp), 0);
6608
            }
6609
          break;
6610
 
6611
        case MISALIGNED_INDIRECT_REF:
6612
        case ALIGN_INDIRECT_REF:
6613
        case INDIRECT_REF:
6614
          if (MEM_P (x)
6615
              && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6616
                                        get_alias_set (exp)))
6617
            return 0;
6618
          break;
6619
 
6620
        case CALL_EXPR:
6621
          /* Assume that the call will clobber all hard registers and
6622
             all of memory.  */
6623
          if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6624
              || MEM_P (x))
6625
            return 0;
6626
          break;
6627
 
6628
        case WITH_CLEANUP_EXPR:
6629
        case CLEANUP_POINT_EXPR:
6630
          /* Lowered by gimplify.c.  */
6631
          gcc_unreachable ();
6632
 
6633
        case SAVE_EXPR:
6634
          return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6635
 
6636
        default:
6637
          break;
6638
        }
6639
 
6640
      /* If we have an rtx, we do not need to scan our operands.  */
6641
      if (exp_rtl)
6642
        break;
6643
 
6644
      nops = TREE_OPERAND_LENGTH (exp);
6645
      for (i = 0; i < nops; i++)
6646
        if (TREE_OPERAND (exp, i) != 0
6647
            && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6648
          return 0;
6649
 
6650
      break;
6651
 
6652
    case tcc_type:
6653
      /* Should never get a type here.  */
6654
      gcc_unreachable ();
6655
    }
6656
 
6657
  /* If we have an rtl, find any enclosed object.  Then see if we conflict
6658
     with it.  */
6659
  if (exp_rtl)
6660
    {
6661
      if (GET_CODE (exp_rtl) == SUBREG)
6662
        {
6663
          exp_rtl = SUBREG_REG (exp_rtl);
6664
          if (REG_P (exp_rtl)
6665
              && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6666
            return 0;
6667
        }
6668
 
6669
      /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
6670
         are memory and they conflict.  */
6671
      return ! (rtx_equal_p (x, exp_rtl)
6672
                || (MEM_P (x) && MEM_P (exp_rtl)
6673
                    && true_dependence (exp_rtl, VOIDmode, x,
6674
                                        rtx_addr_varies_p)));
6675
    }
6676
 
6677
  /* If we reach here, it is safe.  */
6678
  return 1;
6679
}
6680
 
6681
 
6682
/* Return the highest power of two that EXP is known to be a multiple of.
6683
   This is used in updating alignment of MEMs in array references.  */
6684
 
6685
unsigned HOST_WIDE_INT
6686
highest_pow2_factor (const_tree exp)
6687
{
6688
  unsigned HOST_WIDE_INT c0, c1;
6689
 
6690
  switch (TREE_CODE (exp))
6691
    {
6692
    case INTEGER_CST:
6693
      /* We can find the lowest bit that's a one.  If the low
6694
         HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6695
         We need to handle this case since we can find it in a COND_EXPR,
6696
         a MIN_EXPR, or a MAX_EXPR.  If the constant overflows, we have an
6697
         erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6698
         later ICE.  */
6699
      if (TREE_OVERFLOW (exp))
6700
        return BIGGEST_ALIGNMENT;
6701
      else
6702
        {
6703
          /* Note: tree_low_cst is intentionally not used here,
6704
             we don't care about the upper bits.  */
6705
          c0 = TREE_INT_CST_LOW (exp);
6706
          c0 &= -c0;
6707
          return c0 ? c0 : BIGGEST_ALIGNMENT;
6708
        }
6709
      break;
6710
 
6711
    case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
6712
      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6713
      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6714
      return MIN (c0, c1);
6715
 
6716
    case MULT_EXPR:
6717
      c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6718
      c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6719
      return c0 * c1;
6720
 
6721
    case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
6722
    case CEIL_DIV_EXPR:
6723
      if (integer_pow2p (TREE_OPERAND (exp, 1))
6724
          && host_integerp (TREE_OPERAND (exp, 1), 1))
6725
        {
6726
          c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6727
          c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6728
          return MAX (1, c0 / c1);
6729
        }
6730
      break;
6731
 
6732
    case BIT_AND_EXPR:
6733
      /* The highest power of two of a bit-and expression is the maximum of
6734
         that of its operands.  We typically get here for a complex LHS and
6735
         a constant negative power of two on the RHS to force an explicit
6736
         alignment, so don't bother looking at the LHS.  */
6737
      return highest_pow2_factor (TREE_OPERAND (exp, 1));
6738
 
6739
    CASE_CONVERT:
6740
    case SAVE_EXPR:
6741
      return highest_pow2_factor (TREE_OPERAND (exp, 0));
6742
 
6743
    case COMPOUND_EXPR:
6744
      return highest_pow2_factor (TREE_OPERAND (exp, 1));
6745
 
6746
    case COND_EXPR:
6747
      c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6748
      c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6749
      return MIN (c0, c1);
6750
 
6751
    default:
6752
      break;
6753
    }
6754
 
6755
  return 1;
6756
}
6757
 
6758
/* Similar, except that the alignment requirements of TARGET are
6759
   taken into account.  Assume it is at least as aligned as its
6760
   type, unless it is a COMPONENT_REF in which case the layout of
6761
   the structure gives the alignment.  */
6762
 
6763
static unsigned HOST_WIDE_INT
6764
highest_pow2_factor_for_target (const_tree target, const_tree exp)
6765
{
6766
  unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
6767
  unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
6768
 
6769
  return MAX (factor, talign);
6770
}
6771
 
6772
/* Return &VAR expression for emulated thread local VAR.  */
6773
 
6774
static tree
6775
emutls_var_address (tree var)
6776
{
6777
  tree emuvar = emutls_decl (var);
6778
  tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6779
  tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6780
  tree arglist = build_tree_list (NULL_TREE, arg);
6781
  tree call = build_function_call_expr (UNKNOWN_LOCATION, fn, arglist);
6782
  return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6783
}
6784
 
6785
 
6786
/* Subroutine of expand_expr.  Expand the two operands of a binary
6787
   expression EXP0 and EXP1 placing the results in OP0 and OP1.
6788
   The value may be stored in TARGET if TARGET is nonzero.  The
6789
   MODIFIER argument is as documented by expand_expr.  */
6790
 
6791
static void
6792
expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6793
                 enum expand_modifier modifier)
6794
{
6795
  if (! safe_from_p (target, exp1, 1))
6796
    target = 0;
6797
  if (operand_equal_p (exp0, exp1, 0))
6798
    {
6799
      *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6800
      *op1 = copy_rtx (*op0);
6801
    }
6802
  else
6803
    {
6804
      /* If we need to preserve evaluation order, copy exp0 into its own
6805
         temporary variable so that it can't be clobbered by exp1.  */
6806
      if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6807
        exp0 = save_expr (exp0);
6808
      *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6809
      *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6810
    }
6811
}
6812
 
6813
 
6814
/* Return a MEM that contains constant EXP.  DEFER is as for
6815
   output_constant_def and MODIFIER is as for expand_expr.  */
6816
 
6817
static rtx
6818
expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6819
{
6820
  rtx mem;
6821
 
6822
  mem = output_constant_def (exp, defer);
6823
  if (modifier != EXPAND_INITIALIZER)
6824
    mem = use_anchored_address (mem);
6825
  return mem;
6826
}
6827
 
6828
/* A subroutine of expand_expr_addr_expr.  Evaluate the address of EXP.
6829
   The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
6830
 
6831
static rtx
6832
expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6833
                         enum expand_modifier modifier, addr_space_t as)
6834
{
6835
  rtx result, subtarget;
6836
  tree inner, offset;
6837
  HOST_WIDE_INT bitsize, bitpos;
6838
  int volatilep, unsignedp;
6839
  enum machine_mode mode1;
6840
 
6841
  /* If we are taking the address of a constant and are at the top level,
6842
     we have to use output_constant_def since we can't call force_const_mem
6843
     at top level.  */
6844
  /* ??? This should be considered a front-end bug.  We should not be
6845
     generating ADDR_EXPR of something that isn't an LVALUE.  The only
6846
     exception here is STRING_CST.  */
6847
  if (CONSTANT_CLASS_P (exp))
6848
    return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6849
 
6850
  /* Everything must be something allowed by is_gimple_addressable.  */
6851
  switch (TREE_CODE (exp))
6852
    {
6853
    case INDIRECT_REF:
6854
      /* This case will happen via recursion for &a->b.  */
6855
      return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6856
 
6857
    case CONST_DECL:
6858
      /* Expand the initializer like constants above.  */
6859
      return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0);
6860
 
6861
    case REALPART_EXPR:
6862
      /* The real part of the complex number is always first, therefore
6863
         the address is the same as the address of the parent object.  */
6864
      offset = 0;
6865
      bitpos = 0;
6866
      inner = TREE_OPERAND (exp, 0);
6867
      break;
6868
 
6869
    case IMAGPART_EXPR:
6870
      /* The imaginary part of the complex number is always second.
6871
         The expression is therefore always offset by the size of the
6872
         scalar type.  */
6873
      offset = 0;
6874
      bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6875
      inner = TREE_OPERAND (exp, 0);
6876
      break;
6877
 
6878
    case VAR_DECL:
6879
      /* TLS emulation hook - replace __thread VAR's &VAR with
6880
         __emutls_get_address (&_emutls.VAR).  */
6881
      if (! targetm.have_tls
6882
          && TREE_CODE (exp) == VAR_DECL
6883
          && DECL_THREAD_LOCAL_P (exp))
6884
        {
6885
          exp = emutls_var_address (exp);
6886
          return expand_expr (exp, target, tmode, modifier);
6887
        }
6888
      /* Fall through.  */
6889
 
6890
    default:
6891
      /* If the object is a DECL, then expand it for its rtl.  Don't bypass
6892
         expand_expr, as that can have various side effects; LABEL_DECLs for
6893
         example, may not have their DECL_RTL set yet.  Expand the rtl of
6894
         CONSTRUCTORs too, which should yield a memory reference for the
6895
         constructor's contents.  Assume language specific tree nodes can
6896
         be expanded in some interesting way.  */
6897
      gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6898
      if (DECL_P (exp)
6899
          || TREE_CODE (exp) == CONSTRUCTOR
6900
          || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6901
        {
6902
          result = expand_expr (exp, target, tmode,
6903
                                modifier == EXPAND_INITIALIZER
6904
                                ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6905
 
6906
          /* If the DECL isn't in memory, then the DECL wasn't properly
6907
             marked TREE_ADDRESSABLE, which will be either a front-end
6908
             or a tree optimizer bug.  */
6909
          gcc_assert (MEM_P (result));
6910
          result = XEXP (result, 0);
6911
 
6912
          /* ??? Is this needed anymore?  */
6913
          if (DECL_P (exp) && !TREE_USED (exp) == 0)
6914
            {
6915
              assemble_external (exp);
6916
              TREE_USED (exp) = 1;
6917
            }
6918
 
6919
          if (modifier != EXPAND_INITIALIZER
6920
              && modifier != EXPAND_CONST_ADDRESS)
6921
            result = force_operand (result, target);
6922
          return result;
6923
        }
6924
 
6925
      /* Pass FALSE as the last argument to get_inner_reference although
6926
         we are expanding to RTL.  The rationale is that we know how to
6927
         handle "aligning nodes" here: we can just bypass them because
6928
         they won't change the final object whose address will be returned
6929
         (they actually exist only for that purpose).  */
6930
      inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6931
                                   &mode1, &unsignedp, &volatilep, false);
6932
      break;
6933
    }
6934
 
6935
  /* We must have made progress.  */
6936
  gcc_assert (inner != exp);
6937
 
6938
  subtarget = offset || bitpos ? NULL_RTX : target;
6939
  /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6940
     inner alignment, force the inner to be sufficiently aligned.  */
6941
  if (CONSTANT_CLASS_P (inner)
6942
      && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
6943
    {
6944
      inner = copy_node (inner);
6945
      TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
6946
      TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
6947
      TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
6948
    }
6949
  result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
6950
 
6951
  if (offset)
6952
    {
6953
      rtx tmp;
6954
 
6955
      if (modifier != EXPAND_NORMAL)
6956
        result = force_operand (result, NULL);
6957
      tmp = expand_expr (offset, NULL_RTX, tmode,
6958
                         modifier == EXPAND_INITIALIZER
6959
                          ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6960
 
6961
      result = convert_memory_address_addr_space (tmode, result, as);
6962
      tmp = convert_memory_address_addr_space (tmode, tmp, as);
6963
 
6964
      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6965
        result = gen_rtx_PLUS (tmode, result, tmp);
6966
      else
6967
        {
6968
          subtarget = bitpos ? NULL_RTX : target;
6969
          result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6970
                                        1, OPTAB_LIB_WIDEN);
6971
        }
6972
    }
6973
 
6974
  if (bitpos)
6975
    {
6976
      /* Someone beforehand should have rejected taking the address
6977
         of such an object.  */
6978
      gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6979
 
6980
      result = plus_constant (result, bitpos / BITS_PER_UNIT);
6981
      if (modifier < EXPAND_SUM)
6982
        result = force_operand (result, target);
6983
    }
6984
 
6985
  return result;
6986
}
6987
 
6988
/* A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
6989
   The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
6990
 
6991
static rtx
6992
expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6993
                       enum expand_modifier modifier)
6994
{
6995
  addr_space_t as = ADDR_SPACE_GENERIC;
6996
  enum machine_mode address_mode = Pmode;
6997
  enum machine_mode pointer_mode = ptr_mode;
6998
  enum machine_mode rmode;
6999
  rtx result;
7000
 
7001
  /* Target mode of VOIDmode says "whatever's natural".  */
7002
  if (tmode == VOIDmode)
7003
    tmode = TYPE_MODE (TREE_TYPE (exp));
7004
 
7005
  if (POINTER_TYPE_P (TREE_TYPE (exp)))
7006
    {
7007
      as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7008
      address_mode = targetm.addr_space.address_mode (as);
7009
      pointer_mode = targetm.addr_space.pointer_mode (as);
7010
    }
7011
 
7012
  /* We can get called with some Weird Things if the user does silliness
7013
     like "(short) &a".  In that case, convert_memory_address won't do
7014
     the right thing, so ignore the given target mode.  */
7015
  if (tmode != address_mode && tmode != pointer_mode)
7016
    tmode = address_mode;
7017
 
7018
  result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7019
                                    tmode, modifier, as);
7020
 
7021
  /* Despite expand_expr claims concerning ignoring TMODE when not
7022
     strictly convenient, stuff breaks if we don't honor it.  Note
7023
     that combined with the above, we only do this for pointer modes.  */
7024
  rmode = GET_MODE (result);
7025
  if (rmode == VOIDmode)
7026
    rmode = tmode;
7027
  if (rmode != tmode)
7028
    result = convert_memory_address_addr_space (tmode, result, as);
7029
 
7030
  return result;
7031
}
7032
 
7033
/* Generate code for computing CONSTRUCTOR EXP.
7034
   An rtx for the computed value is returned.  If AVOID_TEMP_MEM
7035
   is TRUE, instead of creating a temporary variable in memory
7036
   NULL is returned and the caller needs to handle it differently.  */
7037
 
7038
static rtx
7039
expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7040
                    bool avoid_temp_mem)
7041
{
7042
  tree type = TREE_TYPE (exp);
7043
  enum machine_mode mode = TYPE_MODE (type);
7044
 
7045
  /* Try to avoid creating a temporary at all.  This is possible
7046
     if all of the initializer is zero.
7047
     FIXME: try to handle all [0..255] initializers we can handle
7048
     with memset.  */
7049
  if (TREE_STATIC (exp)
7050
      && !TREE_ADDRESSABLE (exp)
7051
      && target != 0 && mode == BLKmode
7052
      && all_zeros_p (exp))
7053
    {
7054
      clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7055
      return target;
7056
    }
7057
 
7058
  /* All elts simple constants => refer to a constant in memory.  But
7059
     if this is a non-BLKmode mode, let it store a field at a time
7060
     since that should make a CONST_INT or CONST_DOUBLE when we
7061
     fold.  Likewise, if we have a target we can use, it is best to
7062
     store directly into the target unless the type is large enough
7063
     that memcpy will be used.  If we are making an initializer and
7064
     all operands are constant, put it in memory as well.
7065
 
7066
     FIXME: Avoid trying to fill vector constructors piece-meal.
7067
     Output them with output_constant_def below unless we're sure
7068
     they're zeros.  This should go away when vector initializers
7069
     are treated like VECTOR_CST instead of arrays.  */
7070
  if ((TREE_STATIC (exp)
7071
       && ((mode == BLKmode
7072
            && ! (target != 0 && safe_from_p (target, exp, 1)))
7073
                  || TREE_ADDRESSABLE (exp)
7074
                  || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7075
                      && (! MOVE_BY_PIECES_P
7076
                                     (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7077
                                      TYPE_ALIGN (type)))
7078
                      && ! mostly_zeros_p (exp))))
7079
      || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7080
          && TREE_CONSTANT (exp)))
7081
    {
7082
      rtx constructor;
7083
 
7084
      if (avoid_temp_mem)
7085
        return NULL_RTX;
7086
 
7087
      constructor = expand_expr_constant (exp, 1, modifier);
7088
 
7089
      if (modifier != EXPAND_CONST_ADDRESS
7090
          && modifier != EXPAND_INITIALIZER
7091
          && modifier != EXPAND_SUM)
7092
        constructor = validize_mem (constructor);
7093
 
7094
      return constructor;
7095
    }
7096
 
7097
  /* Handle calls that pass values in multiple non-contiguous
7098
     locations.  The Irix 6 ABI has examples of this.  */
7099
  if (target == 0 || ! safe_from_p (target, exp, 1)
7100
      || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7101
    {
7102
      if (avoid_temp_mem)
7103
        return NULL_RTX;
7104
 
7105
      target
7106
        = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7107
                                                    | (TREE_READONLY (exp)
7108
                                                       * TYPE_QUAL_CONST))),
7109
                       0, TREE_ADDRESSABLE (exp), 1);
7110
    }
7111
 
7112
  store_constructor (exp, target, 0, int_expr_size (exp));
7113
  return target;
7114
}
7115
 
7116
 
7117
/* expand_expr: generate code for computing expression EXP.
7118
   An rtx for the computed value is returned.  The value is never null.
7119
   In the case of a void EXP, const0_rtx is returned.
7120
 
7121
   The value may be stored in TARGET if TARGET is nonzero.
7122
   TARGET is just a suggestion; callers must assume that
7123
   the rtx returned may not be the same as TARGET.
7124
 
7125
   If TARGET is CONST0_RTX, it means that the value will be ignored.
7126
 
7127
   If TMODE is not VOIDmode, it suggests generating the
7128
   result in mode TMODE.  But this is done only when convenient.
7129
   Otherwise, TMODE is ignored and the value generated in its natural mode.
7130
   TMODE is just a suggestion; callers must assume that
7131
   the rtx returned may not have mode TMODE.
7132
 
7133
   Note that TARGET may have neither TMODE nor MODE.  In that case, it
7134
   probably will not be used.
7135
 
7136
   If MODIFIER is EXPAND_SUM then when EXP is an addition
7137
   we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7138
   or a nest of (PLUS ...) and (MINUS ...) where the terms are
7139
   products as above, or REG or MEM, or constant.
7140
   Ordinarily in such cases we would output mul or add instructions
7141
   and then return a pseudo reg containing the sum.
7142
 
7143
   EXPAND_INITIALIZER is much like EXPAND_SUM except that
7144
   it also marks a label as absolutely required (it can't be dead).
7145
   It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7146
   This is used for outputting expressions used in initializers.
7147
 
7148
   EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7149
   with a constant address even if that address is not normally legitimate.
7150
   EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7151
 
7152
   EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7153
   a call parameter.  Such targets require special care as we haven't yet
7154
   marked TARGET so that it's safe from being trashed by libcalls.  We
7155
   don't want to use TARGET for anything but the final result;
7156
   Intermediate values must go elsewhere.   Additionally, calls to
7157
   emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7158
 
7159
   If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7160
   address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7161
   DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
7162
   COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7163
   recursively.  */
7164
 
7165
rtx
7166
expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7167
                  enum expand_modifier modifier, rtx *alt_rtl)
7168
{
7169
  rtx ret;
7170
 
7171
  /* Handle ERROR_MARK before anybody tries to access its type.  */
7172
  if (TREE_CODE (exp) == ERROR_MARK
7173
      || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7174
    {
7175
      ret = CONST0_RTX (tmode);
7176
      return ret ? ret : const0_rtx;
7177
    }
7178
 
7179
  /* If this is an expression of some kind and it has an associated line
7180
     number, then emit the line number before expanding the expression.
7181
 
7182
     We need to save and restore the file and line information so that
7183
     errors discovered during expansion are emitted with the right
7184
     information.  It would be better of the diagnostic routines
7185
     used the file/line information embedded in the tree nodes rather
7186
     than globals.  */
7187
  if (cfun && EXPR_HAS_LOCATION (exp))
7188
    {
7189
      location_t saved_location = input_location;
7190
      location_t saved_curr_loc = get_curr_insn_source_location ();
7191
      tree saved_block = get_curr_insn_block ();
7192
      input_location = EXPR_LOCATION (exp);
7193
      set_curr_insn_source_location (input_location);
7194
 
7195
      /* Record where the insns produced belong.  */
7196
      set_curr_insn_block (TREE_BLOCK (exp));
7197
 
7198
      ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7199
 
7200
      input_location = saved_location;
7201
      set_curr_insn_block (saved_block);
7202
      set_curr_insn_source_location (saved_curr_loc);
7203
    }
7204
  else
7205
    {
7206
      ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7207
    }
7208
 
7209
  return ret;
7210
}
7211
 
7212
rtx
7213
expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7214
                    enum expand_modifier modifier)
7215
{
7216
  rtx op0, op1, op2, temp;
7217
  tree type;
7218
  int unsignedp;
7219
  enum machine_mode mode;
7220
  enum tree_code code = ops->code;
7221
  optab this_optab;
7222
  rtx subtarget, original_target;
7223
  int ignore;
7224
  tree subexp0, subexp1;
7225
  bool reduce_bit_field;
7226
  gimple subexp0_def, subexp1_def;
7227
  tree top0, top1;
7228
  location_t loc = ops->location;
7229
  tree treeop0, treeop1;
7230
#define REDUCE_BIT_FIELD(expr)  (reduce_bit_field                         \
7231
                                 ? reduce_to_bit_field_precision ((expr), \
7232
                                                                  target, \
7233
                                                                  type)   \
7234
                                 : (expr))
7235
 
7236
  type = ops->type;
7237
  mode = TYPE_MODE (type);
7238
  unsignedp = TYPE_UNSIGNED (type);
7239
 
7240
  treeop0 = ops->op0;
7241
  treeop1 = ops->op1;
7242
 
7243
  /* We should be called only on simple (binary or unary) expressions,
7244
     exactly those that are valid in gimple expressions that aren't
7245
     GIMPLE_SINGLE_RHS (or invalid).  */
7246
  gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7247
              || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS);
7248
 
7249
  ignore = (target == const0_rtx
7250
            || ((CONVERT_EXPR_CODE_P (code)
7251
                 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7252
                && TREE_CODE (type) == VOID_TYPE));
7253
 
7254
  /* We should be called only if we need the result.  */
7255
  gcc_assert (!ignore);
7256
 
7257
  /* An operation in what may be a bit-field type needs the
7258
     result to be reduced to the precision of the bit-field type,
7259
     which is narrower than that of the type's mode.  */
7260
  reduce_bit_field = (TREE_CODE (type) == INTEGER_TYPE
7261
                      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7262
 
7263
  if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7264
    target = 0;
7265
 
7266
  /* Use subtarget as the target for operand 0 of a binary operation.  */
7267
  subtarget = get_subtarget (target);
7268
  original_target = target;
7269
 
7270
  switch (code)
7271
    {
7272
    case NON_LVALUE_EXPR:
7273
    case PAREN_EXPR:
7274
    CASE_CONVERT:
7275
      if (treeop0 == error_mark_node)
7276
        return const0_rtx;
7277
 
7278
      if (TREE_CODE (type) == UNION_TYPE)
7279
        {
7280
          tree valtype = TREE_TYPE (treeop0);
7281
 
7282
          /* If both input and output are BLKmode, this conversion isn't doing
7283
             anything except possibly changing memory attribute.  */
7284
          if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7285
            {
7286
              rtx result = expand_expr (treeop0, target, tmode,
7287
                                        modifier);
7288
 
7289
              result = copy_rtx (result);
7290
              set_mem_attributes (result, type, 0);
7291
              return result;
7292
            }
7293
 
7294
          if (target == 0)
7295
            {
7296
              if (TYPE_MODE (type) != BLKmode)
7297
                target = gen_reg_rtx (TYPE_MODE (type));
7298
              else
7299
                target = assign_temp (type, 0, 1, 1);
7300
            }
7301
 
7302
          if (MEM_P (target))
7303
            /* Store data into beginning of memory target.  */
7304
            store_expr (treeop0,
7305
                        adjust_address (target, TYPE_MODE (valtype), 0),
7306
                        modifier == EXPAND_STACK_PARM,
7307
                        false);
7308
 
7309
          else
7310
            {
7311
              gcc_assert (REG_P (target));
7312
 
7313
              /* Store this field into a union of the proper type.  */
7314
              store_field (target,
7315
                           MIN ((int_size_in_bytes (TREE_TYPE
7316
                                                    (treeop0))
7317
                                 * BITS_PER_UNIT),
7318
                                (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7319
                           0, TYPE_MODE (valtype), treeop0,
7320
                           type, 0, false);
7321
            }
7322
 
7323
          /* Return the entire union.  */
7324
          return target;
7325
        }
7326
 
7327
      if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7328
        {
7329
          op0 = expand_expr (treeop0, target, VOIDmode,
7330
                             modifier);
7331
 
7332
          /* If the signedness of the conversion differs and OP0 is
7333
             a promoted SUBREG, clear that indication since we now
7334
             have to do the proper extension.  */
7335
          if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7336
              && GET_CODE (op0) == SUBREG)
7337
            SUBREG_PROMOTED_VAR_P (op0) = 0;
7338
 
7339
          return REDUCE_BIT_FIELD (op0);
7340
        }
7341
 
7342
      op0 = expand_expr (treeop0, NULL_RTX, mode,
7343
                         modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7344
      if (GET_MODE (op0) == mode)
7345
        ;
7346
 
7347
      /* If OP0 is a constant, just convert it into the proper mode.  */
7348
      else if (CONSTANT_P (op0))
7349
        {
7350
          tree inner_type = TREE_TYPE (treeop0);
7351
          enum machine_mode inner_mode = TYPE_MODE (inner_type);
7352
 
7353
          if (modifier == EXPAND_INITIALIZER)
7354
            op0 = simplify_gen_subreg (mode, op0, inner_mode,
7355
                                       subreg_lowpart_offset (mode,
7356
                                                              inner_mode));
7357
          else
7358
            op0=  convert_modes (mode, inner_mode, op0,
7359
                                 TYPE_UNSIGNED (inner_type));
7360
        }
7361
 
7362
      else if (modifier == EXPAND_INITIALIZER)
7363
        op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7364
 
7365
      else if (target == 0)
7366
        op0 = convert_to_mode (mode, op0,
7367
                               TYPE_UNSIGNED (TREE_TYPE
7368
                                              (treeop0)));
7369
      else
7370
        {
7371
          convert_move (target, op0,
7372
                        TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7373
          op0 = target;
7374
        }
7375
 
7376
      return REDUCE_BIT_FIELD (op0);
7377
 
7378
    case ADDR_SPACE_CONVERT_EXPR:
7379
      {
7380
        tree treeop0_type = TREE_TYPE (treeop0);
7381
        addr_space_t as_to;
7382
        addr_space_t as_from;
7383
 
7384
        gcc_assert (POINTER_TYPE_P (type));
7385
        gcc_assert (POINTER_TYPE_P (treeop0_type));
7386
 
7387
        as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7388
        as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7389
 
7390
        /* Conversions between pointers to the same address space should
7391
           have been implemented via CONVERT_EXPR / NOP_EXPR.  */
7392
        gcc_assert (as_to != as_from);
7393
 
7394
        /* Ask target code to handle conversion between pointers
7395
           to overlapping address spaces.  */
7396
        if (targetm.addr_space.subset_p (as_to, as_from)
7397
            || targetm.addr_space.subset_p (as_from, as_to))
7398
          {
7399
            op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7400
            op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7401
            gcc_assert (op0);
7402
            return op0;
7403
          }
7404
 
7405
        /* For disjoint address spaces, converting anything but
7406
           a null pointer invokes undefined behaviour.  We simply
7407
           always return a null pointer here.  */
7408
        return CONST0_RTX (mode);
7409
      }
7410
 
7411
    case POINTER_PLUS_EXPR:
7412
      /* Even though the sizetype mode and the pointer's mode can be different
7413
         expand is able to handle this correctly and get the correct result out
7414
         of the PLUS_EXPR code.  */
7415
      /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7416
         if sizetype precision is smaller than pointer precision.  */
7417
      if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7418
        treeop1 = fold_convert_loc (loc, type,
7419
                                    fold_convert_loc (loc, ssizetype,
7420
                                                      treeop1));
7421
    case PLUS_EXPR:
7422
 
7423
      /* Check if this is a case for multiplication and addition.  */
7424
      if ((TREE_CODE (type) == INTEGER_TYPE
7425
           || TREE_CODE (type) == FIXED_POINT_TYPE)
7426
          && (subexp0_def = get_def_for_expr (treeop0,
7427
                                              MULT_EXPR)))
7428
        {
7429
          tree subsubexp0, subsubexp1;
7430
          gimple subsubexp0_def, subsubexp1_def;
7431
          enum tree_code this_code;
7432
 
7433
          this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
7434
                                                       : FIXED_CONVERT_EXPR;
7435
          subsubexp0 = gimple_assign_rhs1 (subexp0_def);
7436
          subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
7437
          subsubexp1 = gimple_assign_rhs2 (subexp0_def);
7438
          subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
7439
          if (subsubexp0_def && subsubexp1_def
7440
              && (top0 = gimple_assign_rhs1 (subsubexp0_def))
7441
              && (top1 = gimple_assign_rhs1 (subsubexp1_def))
7442
              && (TYPE_PRECISION (TREE_TYPE (top0))
7443
                  < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
7444
              && (TYPE_PRECISION (TREE_TYPE (top0))
7445
                  == TYPE_PRECISION (TREE_TYPE (top1)))
7446
              && (TYPE_UNSIGNED (TREE_TYPE (top0))
7447
                  == TYPE_UNSIGNED (TREE_TYPE (top1))))
7448
            {
7449
              tree op0type = TREE_TYPE (top0);
7450
              enum machine_mode innermode = TYPE_MODE (op0type);
7451
              bool zextend_p = TYPE_UNSIGNED (op0type);
7452
              bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
7453
              if (sat_p == 0)
7454
                this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
7455
              else
7456
                this_optab = zextend_p ? usmadd_widen_optab
7457
                                       : ssmadd_widen_optab;
7458
              if (mode == GET_MODE_2XWIDER_MODE (innermode)
7459
                  && (optab_handler (this_optab, mode)->insn_code
7460
                      != CODE_FOR_nothing))
7461
                {
7462
                  expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7463
                                   EXPAND_NORMAL);
7464
                  op2 = expand_expr (treeop1, subtarget,
7465
                                     VOIDmode, EXPAND_NORMAL);
7466
                  temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
7467
                                            target, unsignedp);
7468
                  gcc_assert (temp);
7469
                  return REDUCE_BIT_FIELD (temp);
7470
                }
7471
            }
7472
        }
7473
 
7474
      /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7475
         something else, make sure we add the register to the constant and
7476
         then to the other thing.  This case can occur during strength
7477
         reduction and doing it this way will produce better code if the
7478
         frame pointer or argument pointer is eliminated.
7479
 
7480
         fold-const.c will ensure that the constant is always in the inner
7481
         PLUS_EXPR, so the only case we need to do anything about is if
7482
         sp, ap, or fp is our second argument, in which case we must swap
7483
         the innermost first argument and our second argument.  */
7484
 
7485
      if (TREE_CODE (treeop0) == PLUS_EXPR
7486
          && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7487
          && TREE_CODE (treeop1) == VAR_DECL
7488
          && (DECL_RTL (treeop1) == frame_pointer_rtx
7489
              || DECL_RTL (treeop1) == stack_pointer_rtx
7490
              || DECL_RTL (treeop1) == arg_pointer_rtx))
7491
        {
7492
          tree t = treeop1;
7493
 
7494
          treeop1 = TREE_OPERAND (treeop0, 0);
7495
          TREE_OPERAND (treeop0, 0) = t;
7496
        }
7497
 
7498
      /* If the result is to be ptr_mode and we are adding an integer to
7499
         something, we might be forming a constant.  So try to use
7500
         plus_constant.  If it produces a sum and we can't accept it,
7501
         use force_operand.  This allows P = &ARR[const] to generate
7502
         efficient code on machines where a SYMBOL_REF is not a valid
7503
         address.
7504
 
7505
         If this is an EXPAND_SUM call, always return the sum.  */
7506
      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7507
          || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7508
        {
7509
          if (modifier == EXPAND_STACK_PARM)
7510
            target = 0;
7511
          if (TREE_CODE (treeop0) == INTEGER_CST
7512
              && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7513
              && TREE_CONSTANT (treeop1))
7514
            {
7515
              rtx constant_part;
7516
 
7517
              op1 = expand_expr (treeop1, subtarget, VOIDmode,
7518
                                 EXPAND_SUM);
7519
              /* Use immed_double_const to ensure that the constant is
7520
                 truncated according to the mode of OP1, then sign extended
7521
                 to a HOST_WIDE_INT.  Using the constant directly can result
7522
                 in non-canonical RTL in a 64x32 cross compile.  */
7523
              constant_part
7524
                = immed_double_const (TREE_INT_CST_LOW (treeop0),
7525
                                      (HOST_WIDE_INT) 0,
7526
                                      TYPE_MODE (TREE_TYPE (treeop1)));
7527
              op1 = plus_constant (op1, INTVAL (constant_part));
7528
              if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7529
                op1 = force_operand (op1, target);
7530
              return REDUCE_BIT_FIELD (op1);
7531
            }
7532
 
7533
          else if (TREE_CODE (treeop1) == INTEGER_CST
7534
                   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7535
                   && TREE_CONSTANT (treeop0))
7536
            {
7537
              rtx constant_part;
7538
 
7539
              op0 = expand_expr (treeop0, subtarget, VOIDmode,
7540
                                 (modifier == EXPAND_INITIALIZER
7541
                                 ? EXPAND_INITIALIZER : EXPAND_SUM));
7542
              if (! CONSTANT_P (op0))
7543
                {
7544
                  op1 = expand_expr (treeop1, NULL_RTX,
7545
                                     VOIDmode, modifier);
7546
                  /* Return a PLUS if modifier says it's OK.  */
7547
                  if (modifier == EXPAND_SUM
7548
                      || modifier == EXPAND_INITIALIZER)
7549
                    return simplify_gen_binary (PLUS, mode, op0, op1);
7550
                  goto binop2;
7551
                }
7552
              /* Use immed_double_const to ensure that the constant is
7553
                 truncated according to the mode of OP1, then sign extended
7554
                 to a HOST_WIDE_INT.  Using the constant directly can result
7555
                 in non-canonical RTL in a 64x32 cross compile.  */
7556
              constant_part
7557
                = immed_double_const (TREE_INT_CST_LOW (treeop1),
7558
                                      (HOST_WIDE_INT) 0,
7559
                                      TYPE_MODE (TREE_TYPE (treeop0)));
7560
              op0 = plus_constant (op0, INTVAL (constant_part));
7561
              if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7562
                op0 = force_operand (op0, target);
7563
              return REDUCE_BIT_FIELD (op0);
7564
            }
7565
        }
7566
 
7567
      /* No sense saving up arithmetic to be done
7568
         if it's all in the wrong mode to form part of an address.
7569
         And force_operand won't know whether to sign-extend or
7570
         zero-extend.  */
7571
      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7572
          || mode != ptr_mode)
7573
        {
7574
          expand_operands (treeop0, treeop1,
7575
                           subtarget, &op0, &op1, EXPAND_NORMAL);
7576
          if (op0 == const0_rtx)
7577
            return op1;
7578
          if (op1 == const0_rtx)
7579
            return op0;
7580
          goto binop2;
7581
        }
7582
 
7583
      expand_operands (treeop0, treeop1,
7584
                       subtarget, &op0, &op1, modifier);
7585
      return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7586
 
7587
    case MINUS_EXPR:
7588
      /* Check if this is a case for multiplication and subtraction.  */
7589
      if ((TREE_CODE (type) == INTEGER_TYPE
7590
           || TREE_CODE (type) == FIXED_POINT_TYPE)
7591
          && (subexp1_def = get_def_for_expr (treeop1,
7592
                                              MULT_EXPR)))
7593
        {
7594
          tree subsubexp0, subsubexp1;
7595
          gimple subsubexp0_def, subsubexp1_def;
7596
          enum tree_code this_code;
7597
 
7598
          this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
7599
                                                       : FIXED_CONVERT_EXPR;
7600
          subsubexp0 = gimple_assign_rhs1 (subexp1_def);
7601
          subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
7602
          subsubexp1 = gimple_assign_rhs2 (subexp1_def);
7603
          subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
7604
          if (subsubexp0_def && subsubexp1_def
7605
              && (top0 = gimple_assign_rhs1 (subsubexp0_def))
7606
              && (top1 = gimple_assign_rhs1 (subsubexp1_def))
7607
              && (TYPE_PRECISION (TREE_TYPE (top0))
7608
                  < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
7609
              && (TYPE_PRECISION (TREE_TYPE (top0))
7610
                  == TYPE_PRECISION (TREE_TYPE (top1)))
7611
              && (TYPE_UNSIGNED (TREE_TYPE (top0))
7612
                  == TYPE_UNSIGNED (TREE_TYPE (top1))))
7613
            {
7614
              tree op0type = TREE_TYPE (top0);
7615
              enum machine_mode innermode = TYPE_MODE (op0type);
7616
              bool zextend_p = TYPE_UNSIGNED (op0type);
7617
              bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
7618
              if (sat_p == 0)
7619
                this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
7620
              else
7621
                this_optab = zextend_p ? usmsub_widen_optab
7622
                                       : ssmsub_widen_optab;
7623
              if (mode == GET_MODE_2XWIDER_MODE (innermode)
7624
                  && (optab_handler (this_optab, mode)->insn_code
7625
                      != CODE_FOR_nothing))
7626
                {
7627
                  expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7628
                                   EXPAND_NORMAL);
7629
                  op2 = expand_expr (treeop0, subtarget,
7630
                                     VOIDmode, EXPAND_NORMAL);
7631
                  temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
7632
                                            target, unsignedp);
7633
                  gcc_assert (temp);
7634
                  return REDUCE_BIT_FIELD (temp);
7635
                }
7636
            }
7637
        }
7638
 
7639
      /* For initializers, we are allowed to return a MINUS of two
7640
         symbolic constants.  Here we handle all cases when both operands
7641
         are constant.  */
7642
      /* Handle difference of two symbolic constants,
7643
         for the sake of an initializer.  */
7644
      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7645
          && really_constant_p (treeop0)
7646
          && really_constant_p (treeop1))
7647
        {
7648
          expand_operands (treeop0, treeop1,
7649
                           NULL_RTX, &op0, &op1, modifier);
7650
 
7651
          /* If the last operand is a CONST_INT, use plus_constant of
7652
             the negated constant.  Else make the MINUS.  */
7653
          if (CONST_INT_P (op1))
7654
            return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7655
          else
7656
            return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7657
        }
7658
 
7659
      /* No sense saving up arithmetic to be done
7660
         if it's all in the wrong mode to form part of an address.
7661
         And force_operand won't know whether to sign-extend or
7662
         zero-extend.  */
7663
      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7664
          || mode != ptr_mode)
7665
        goto binop;
7666
 
7667
      expand_operands (treeop0, treeop1,
7668
                       subtarget, &op0, &op1, modifier);
7669
 
7670
      /* Convert A - const to A + (-const).  */
7671
      if (CONST_INT_P (op1))
7672
        {
7673
          op1 = negate_rtx (mode, op1);
7674
          return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7675
        }
7676
 
7677
      goto binop2;
7678
 
7679
    case MULT_EXPR:
7680
      /* If this is a fixed-point operation, then we cannot use the code
7681
         below because "expand_mult" doesn't support sat/no-sat fixed-point
7682
         multiplications.   */
7683
      if (ALL_FIXED_POINT_MODE_P (mode))
7684
        goto binop;
7685
 
7686
      /* If first operand is constant, swap them.
7687
         Thus the following special case checks need only
7688
         check the second operand.  */
7689
      if (TREE_CODE (treeop0) == INTEGER_CST)
7690
        {
7691
          tree t1 = treeop0;
7692
          treeop0 = treeop1;
7693
          treeop1 = t1;
7694
        }
7695
 
7696
      /* Attempt to return something suitable for generating an
7697
         indexed address, for machines that support that.  */
7698
 
7699
      if (modifier == EXPAND_SUM && mode == ptr_mode
7700
          && host_integerp (treeop1, 0))
7701
        {
7702
          tree exp1 = treeop1;
7703
 
7704
          op0 = expand_expr (treeop0, subtarget, VOIDmode,
7705
                             EXPAND_SUM);
7706
 
7707
          if (!REG_P (op0))
7708
            op0 = force_operand (op0, NULL_RTX);
7709
          if (!REG_P (op0))
7710
            op0 = copy_to_mode_reg (mode, op0);
7711
 
7712
          return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7713
                               gen_int_mode (tree_low_cst (exp1, 0),
7714
                                             TYPE_MODE (TREE_TYPE (exp1)))));
7715
        }
7716
 
7717
      if (modifier == EXPAND_STACK_PARM)
7718
        target = 0;
7719
 
7720
      /* Check for multiplying things that have been extended
7721
         from a narrower type.  If this machine supports multiplying
7722
         in that narrower type with a result in the desired type,
7723
         do it that way, and avoid the explicit type-conversion.  */
7724
 
7725
      subexp0 = treeop0;
7726
      subexp1 = treeop1;
7727
      subexp0_def = get_def_for_expr (subexp0, NOP_EXPR);
7728
      subexp1_def = get_def_for_expr (subexp1, NOP_EXPR);
7729
      top0 = top1 = NULL_TREE;
7730
 
7731
      /* First, check if we have a multiplication of one signed and one
7732
         unsigned operand.  */
7733
      if (subexp0_def
7734
          && (top0 = gimple_assign_rhs1 (subexp0_def))
7735
          && subexp1_def
7736
          && (top1 = gimple_assign_rhs1 (subexp1_def))
7737
          && TREE_CODE (type) == INTEGER_TYPE
7738
          && (TYPE_PRECISION (TREE_TYPE (top0))
7739
              < TYPE_PRECISION (TREE_TYPE (subexp0)))
7740
          && (TYPE_PRECISION (TREE_TYPE (top0))
7741
              == TYPE_PRECISION (TREE_TYPE (top1)))
7742
          && (TYPE_UNSIGNED (TREE_TYPE (top0))
7743
              != TYPE_UNSIGNED (TREE_TYPE (top1))))
7744
        {
7745
          enum machine_mode innermode
7746
            = TYPE_MODE (TREE_TYPE (top0));
7747
          this_optab = usmul_widen_optab;
7748
          if (mode == GET_MODE_WIDER_MODE (innermode))
7749
            {
7750
              if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
7751
                {
7752
                  if (TYPE_UNSIGNED (TREE_TYPE (top0)))
7753
                    expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7754
                                     EXPAND_NORMAL);
7755
                  else
7756
                    expand_operands (top0, top1, NULL_RTX, &op1, &op0,
7757
                                     EXPAND_NORMAL);
7758
 
7759
                  goto binop3;
7760
                }
7761
            }
7762
        }
7763
      /* Check for a multiplication with matching signedness.  If
7764
         valid, TOP0 and TOP1 were set in the previous if
7765
         condition.  */
7766
      else if (top0
7767
          && TREE_CODE (type) == INTEGER_TYPE
7768
          && (TYPE_PRECISION (TREE_TYPE (top0))
7769
              < TYPE_PRECISION (TREE_TYPE (subexp0)))
7770
          && ((TREE_CODE (subexp1) == INTEGER_CST
7771
               && int_fits_type_p (subexp1, TREE_TYPE (top0))
7772
               /* Don't use a widening multiply if a shift will do.  */
7773
               && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (subexp1)))
7774
                    > HOST_BITS_PER_WIDE_INT)
7775
                   || exact_log2 (TREE_INT_CST_LOW (subexp1)) < 0))
7776
              ||
7777
              (top1
7778
               && (TYPE_PRECISION (TREE_TYPE (top1))
7779
                   == TYPE_PRECISION (TREE_TYPE (top0))
7780
               /* If both operands are extended, they must either both
7781
                  be zero-extended or both be sign-extended.  */
7782
               && (TYPE_UNSIGNED (TREE_TYPE (top1))
7783
                   == TYPE_UNSIGNED (TREE_TYPE (top0)))))))
7784
        {
7785
          tree op0type = TREE_TYPE (top0);
7786
          enum machine_mode innermode = TYPE_MODE (op0type);
7787
          bool zextend_p = TYPE_UNSIGNED (op0type);
7788
          optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7789
          this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7790
 
7791
          if (mode == GET_MODE_2XWIDER_MODE (innermode))
7792
            {
7793
              if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
7794
                {
7795
                  if (TREE_CODE (subexp1) == INTEGER_CST)
7796
                    expand_operands (top0, subexp1, NULL_RTX, &op0, &op1,
7797
                                     EXPAND_NORMAL);
7798
                  else
7799
                    expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7800
                                     EXPAND_NORMAL);
7801
                  goto binop3;
7802
                }
7803
              else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
7804
                       && innermode == word_mode)
7805
                {
7806
                  rtx htem, hipart;
7807
                  op0 = expand_normal (top0);
7808
                  if (TREE_CODE (subexp1) == INTEGER_CST)
7809
                    op1 = convert_modes (innermode, mode,
7810
                                         expand_normal (subexp1), unsignedp);
7811
                  else
7812
                    op1 = expand_normal (top1);
7813
                  temp = expand_binop (mode, other_optab, op0, op1, target,
7814
                                       unsignedp, OPTAB_LIB_WIDEN);
7815
                  hipart = gen_highpart (innermode, temp);
7816
                  htem = expand_mult_highpart_adjust (innermode, hipart,
7817
                                                      op0, op1, hipart,
7818
                                                      zextend_p);
7819
                  if (htem != hipart)
7820
                    emit_move_insn (hipart, htem);
7821
                  return REDUCE_BIT_FIELD (temp);
7822
                }
7823
            }
7824
        }
7825
      expand_operands (subexp0, subexp1, subtarget, &op0, &op1, EXPAND_NORMAL);
7826
      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7827
 
7828
    case TRUNC_DIV_EXPR:
7829
    case FLOOR_DIV_EXPR:
7830
    case CEIL_DIV_EXPR:
7831
    case ROUND_DIV_EXPR:
7832
    case EXACT_DIV_EXPR:
7833
      /* If this is a fixed-point operation, then we cannot use the code
7834
         below because "expand_divmod" doesn't support sat/no-sat fixed-point
7835
         divisions.   */
7836
      if (ALL_FIXED_POINT_MODE_P (mode))
7837
        goto binop;
7838
 
7839
      if (modifier == EXPAND_STACK_PARM)
7840
        target = 0;
7841
      /* Possible optimization: compute the dividend with EXPAND_SUM
7842
         then if the divisor is constant can optimize the case
7843
         where some terms of the dividend have coeffs divisible by it.  */
7844
      expand_operands (treeop0, treeop1,
7845
                       subtarget, &op0, &op1, EXPAND_NORMAL);
7846
      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7847
 
7848
    case RDIV_EXPR:
7849
      goto binop;
7850
 
7851
    case TRUNC_MOD_EXPR:
7852
    case FLOOR_MOD_EXPR:
7853
    case CEIL_MOD_EXPR:
7854
    case ROUND_MOD_EXPR:
7855
      if (modifier == EXPAND_STACK_PARM)
7856
        target = 0;
7857
      expand_operands (treeop0, treeop1,
7858
                       subtarget, &op0, &op1, EXPAND_NORMAL);
7859
      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7860
 
7861
    case FIXED_CONVERT_EXPR:
7862
      op0 = expand_normal (treeop0);
7863
      if (target == 0 || modifier == EXPAND_STACK_PARM)
7864
        target = gen_reg_rtx (mode);
7865
 
7866
      if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
7867
           && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7868
          || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
7869
        expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
7870
      else
7871
        expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
7872
      return target;
7873
 
7874
    case FIX_TRUNC_EXPR:
7875
      op0 = expand_normal (treeop0);
7876
      if (target == 0 || modifier == EXPAND_STACK_PARM)
7877
        target = gen_reg_rtx (mode);
7878
      expand_fix (target, op0, unsignedp);
7879
      return target;
7880
 
7881
    case FLOAT_EXPR:
7882
      op0 = expand_normal (treeop0);
7883
      if (target == 0 || modifier == EXPAND_STACK_PARM)
7884
        target = gen_reg_rtx (mode);
7885
      /* expand_float can't figure out what to do if FROM has VOIDmode.
7886
         So give it the correct mode.  With -O, cse will optimize this.  */
7887
      if (GET_MODE (op0) == VOIDmode)
7888
        op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
7889
                                op0);
7890
      expand_float (target, op0,
7891
                    TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7892
      return target;
7893
 
7894
    case NEGATE_EXPR:
7895
      op0 = expand_expr (treeop0, subtarget,
7896
                         VOIDmode, EXPAND_NORMAL);
7897
      if (modifier == EXPAND_STACK_PARM)
7898
        target = 0;
7899
      temp = expand_unop (mode,
7900
                          optab_for_tree_code (NEGATE_EXPR, type,
7901
                                               optab_default),
7902
                          op0, target, 0);
7903
      gcc_assert (temp);
7904
      return REDUCE_BIT_FIELD (temp);
7905
 
7906
    case ABS_EXPR:
7907
      op0 = expand_expr (treeop0, subtarget,
7908
                         VOIDmode, EXPAND_NORMAL);
7909
      if (modifier == EXPAND_STACK_PARM)
7910
        target = 0;
7911
 
7912
      /* ABS_EXPR is not valid for complex arguments.  */
7913
      gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7914
                  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7915
 
7916
      /* Unsigned abs is simply the operand.  Testing here means we don't
7917
         risk generating incorrect code below.  */
7918
      if (TYPE_UNSIGNED (type))
7919
        return op0;
7920
 
7921
      return expand_abs (mode, op0, target, unsignedp,
7922
                         safe_from_p (target, treeop0, 1));
7923
 
7924
    case MAX_EXPR:
7925
    case MIN_EXPR:
7926
      target = original_target;
7927
      if (target == 0
7928
          || modifier == EXPAND_STACK_PARM
7929
          || (MEM_P (target) && MEM_VOLATILE_P (target))
7930
          || GET_MODE (target) != mode
7931
          || (REG_P (target)
7932
              && REGNO (target) < FIRST_PSEUDO_REGISTER))
7933
        target = gen_reg_rtx (mode);
7934
      expand_operands (treeop0, treeop1,
7935
                       target, &op0, &op1, EXPAND_NORMAL);
7936
 
7937
      /* First try to do it with a special MIN or MAX instruction.
7938
         If that does not win, use a conditional jump to select the proper
7939
         value.  */
7940
      this_optab = optab_for_tree_code (code, type, optab_default);
7941
      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7942
                           OPTAB_WIDEN);
7943
      if (temp != 0)
7944
        return temp;
7945
 
7946
      /* At this point, a MEM target is no longer useful; we will get better
7947
         code without it.  */
7948
 
7949
      if (! REG_P (target))
7950
        target = gen_reg_rtx (mode);
7951
 
7952
      /* If op1 was placed in target, swap op0 and op1.  */
7953
      if (target != op0 && target == op1)
7954
        {
7955
          temp = op0;
7956
          op0 = op1;
7957
          op1 = temp;
7958
        }
7959
 
7960
      /* We generate better code and avoid problems with op1 mentioning
7961
         target by forcing op1 into a pseudo if it isn't a constant.  */
7962
      if (! CONSTANT_P (op1))
7963
        op1 = force_reg (mode, op1);
7964
 
7965
      {
7966
        enum rtx_code comparison_code;
7967
        rtx cmpop1 = op1;
7968
 
7969
        if (code == MAX_EXPR)
7970
          comparison_code = unsignedp ? GEU : GE;
7971
        else
7972
          comparison_code = unsignedp ? LEU : LE;
7973
 
7974
        /* Canonicalize to comparisons against 0.  */
7975
        if (op1 == const1_rtx)
7976
          {
7977
            /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7978
               or (a != 0 ? a : 1) for unsigned.
7979
               For MIN we are safe converting (a <= 1 ? a : 1)
7980
               into (a <= 0 ? a : 1)  */
7981
            cmpop1 = const0_rtx;
7982
            if (code == MAX_EXPR)
7983
              comparison_code = unsignedp ? NE : GT;
7984
          }
7985
        if (op1 == constm1_rtx && !unsignedp)
7986
          {
7987
            /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7988
               and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7989
            cmpop1 = const0_rtx;
7990
            if (code == MIN_EXPR)
7991
              comparison_code = LT;
7992
          }
7993
#ifdef HAVE_conditional_move
7994
        /* Use a conditional move if possible.  */
7995
        if (can_conditionally_move_p (mode))
7996
          {
7997
            rtx insn;
7998
 
7999
            /* ??? Same problem as in expmed.c: emit_conditional_move
8000
               forces a stack adjustment via compare_from_rtx, and we
8001
               lose the stack adjustment if the sequence we are about
8002
               to create is discarded.  */
8003
            do_pending_stack_adjust ();
8004
 
8005
            start_sequence ();
8006
 
8007
            /* Try to emit the conditional move.  */
8008
            insn = emit_conditional_move (target, comparison_code,
8009
                                          op0, cmpop1, mode,
8010
                                          op0, op1, mode,
8011
                                          unsignedp);
8012
 
8013
            /* If we could do the conditional move, emit the sequence,
8014
               and return.  */
8015
            if (insn)
8016
              {
8017
                rtx seq = get_insns ();
8018
                end_sequence ();
8019
                emit_insn (seq);
8020
                return target;
8021
              }
8022
 
8023
            /* Otherwise discard the sequence and fall back to code with
8024
               branches.  */
8025
            end_sequence ();
8026
          }
8027
#endif
8028
        if (target != op0)
8029
          emit_move_insn (target, op0);
8030
 
8031
        temp = gen_label_rtx ();
8032
        do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8033
                                 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8034
                                 -1);
8035
      }
8036
      emit_move_insn (target, op1);
8037
      emit_label (temp);
8038
      return target;
8039
 
8040
    case BIT_NOT_EXPR:
8041
      op0 = expand_expr (treeop0, subtarget,
8042
                         VOIDmode, EXPAND_NORMAL);
8043
      if (modifier == EXPAND_STACK_PARM)
8044
        target = 0;
8045
      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8046
      gcc_assert (temp);
8047
      return temp;
8048
 
8049
      /* ??? Can optimize bitwise operations with one arg constant.
8050
         Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8051
         and (a bitwise1 b) bitwise2 b (etc)
8052
         but that is probably not worth while.  */
8053
 
8054
      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
8055
         boolean values when we want in all cases to compute both of them.  In
8056
         general it is fastest to do TRUTH_AND_EXPR by computing both operands
8057
         as actual zero-or-1 values and then bitwise anding.  In cases where
8058
         there cannot be any side effects, better code would be made by
8059
         treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8060
         how to recognize those cases.  */
8061
 
8062
    case TRUTH_AND_EXPR:
8063
      code = BIT_AND_EXPR;
8064
    case BIT_AND_EXPR:
8065
      goto binop;
8066
 
8067
    case TRUTH_OR_EXPR:
8068
      code = BIT_IOR_EXPR;
8069
    case BIT_IOR_EXPR:
8070
      goto binop;
8071
 
8072
    case TRUTH_XOR_EXPR:
8073
      code = BIT_XOR_EXPR;
8074
    case BIT_XOR_EXPR:
8075
      goto binop;
8076
 
8077
    case LROTATE_EXPR:
8078
    case RROTATE_EXPR:
8079
      gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8080
                  || (GET_MODE_PRECISION (TYPE_MODE (type))
8081
                      == TYPE_PRECISION (type)));
8082
      /* fall through */
8083
 
8084
    case LSHIFT_EXPR:
8085
    case RSHIFT_EXPR:
8086
      /* If this is a fixed-point operation, then we cannot use the code
8087
         below because "expand_shift" doesn't support sat/no-sat fixed-point
8088
         shifts.   */
8089
      if (ALL_FIXED_POINT_MODE_P (mode))
8090
        goto binop;
8091
 
8092
      if (! safe_from_p (subtarget, treeop1, 1))
8093
        subtarget = 0;
8094
      if (modifier == EXPAND_STACK_PARM)
8095
        target = 0;
8096
      op0 = expand_expr (treeop0, subtarget,
8097
                         VOIDmode, EXPAND_NORMAL);
8098
      temp = expand_shift (code, mode, op0, treeop1, target,
8099
                           unsignedp);
8100
      if (code == LSHIFT_EXPR)
8101
        temp = REDUCE_BIT_FIELD (temp);
8102
      return temp;
8103
 
8104
      /* Could determine the answer when only additive constants differ.  Also,
8105
         the addition of one can be handled by changing the condition.  */
8106
    case LT_EXPR:
8107
    case LE_EXPR:
8108
    case GT_EXPR:
8109
    case GE_EXPR:
8110
    case EQ_EXPR:
8111
    case NE_EXPR:
8112
    case UNORDERED_EXPR:
8113
    case ORDERED_EXPR:
8114
    case UNLT_EXPR:
8115
    case UNLE_EXPR:
8116
    case UNGT_EXPR:
8117
    case UNGE_EXPR:
8118
    case UNEQ_EXPR:
8119
    case LTGT_EXPR:
8120
      temp = do_store_flag (ops,
8121
                            modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8122
                            tmode != VOIDmode ? tmode : mode);
8123
      if (temp)
8124
        return temp;
8125
 
8126
      /* Use a compare and a jump for BLKmode comparisons, or for function
8127
         type comparisons is HAVE_canonicalize_funcptr_for_compare.  */
8128
 
8129
      if ((target == 0
8130
           || modifier == EXPAND_STACK_PARM
8131
           || ! safe_from_p (target, treeop0, 1)
8132
           || ! safe_from_p (target, treeop1, 1)
8133
           /* Make sure we don't have a hard reg (such as function's return
8134
              value) live across basic blocks, if not optimizing.  */
8135
           || (!optimize && REG_P (target)
8136
               && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8137
        target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8138
 
8139
      emit_move_insn (target, const0_rtx);
8140
 
8141
      op1 = gen_label_rtx ();
8142
      jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8143
 
8144
      emit_move_insn (target, const1_rtx);
8145
 
8146
      emit_label (op1);
8147
      return target;
8148
 
8149
    case TRUTH_NOT_EXPR:
8150
      if (modifier == EXPAND_STACK_PARM)
8151
        target = 0;
8152
      op0 = expand_expr (treeop0, target,
8153
                         VOIDmode, EXPAND_NORMAL);
8154
      /* The parser is careful to generate TRUTH_NOT_EXPR
8155
         only with operands that are always zero or one.  */
8156
      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8157
                           target, 1, OPTAB_LIB_WIDEN);
8158
      gcc_assert (temp);
8159
      return temp;
8160
 
8161
    case COMPLEX_EXPR:
8162
      /* Get the rtx code of the operands.  */
8163
      op0 = expand_normal (treeop0);
8164
      op1 = expand_normal (treeop1);
8165
 
8166
      if (!target)
8167
        target = gen_reg_rtx (TYPE_MODE (type));
8168
 
8169
      /* Move the real (op0) and imaginary (op1) parts to their location.  */
8170
      write_complex_part (target, op0, false);
8171
      write_complex_part (target, op1, true);
8172
 
8173
      return target;
8174
 
8175
    case WIDEN_SUM_EXPR:
8176
      {
8177
        tree oprnd0 = treeop0;
8178
        tree oprnd1 = treeop1;
8179
 
8180
        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8181
        target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8182
                                            target, unsignedp);
8183
        return target;
8184
      }
8185
 
8186
    case REDUC_MAX_EXPR:
8187
    case REDUC_MIN_EXPR:
8188
    case REDUC_PLUS_EXPR:
8189
      {
8190
        op0 = expand_normal (treeop0);
8191
        this_optab = optab_for_tree_code (code, type, optab_default);
8192
        temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8193
        gcc_assert (temp);
8194
        return temp;
8195
      }
8196
 
8197
    case VEC_EXTRACT_EVEN_EXPR:
8198
    case VEC_EXTRACT_ODD_EXPR:
8199
      {
8200
        expand_operands (treeop0,  treeop1,
8201
                         NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8202
        this_optab = optab_for_tree_code (code, type, optab_default);
8203
        temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8204
                             OPTAB_WIDEN);
8205
        gcc_assert (temp);
8206
        return temp;
8207
      }
8208
 
8209
    case VEC_INTERLEAVE_HIGH_EXPR:
8210
    case VEC_INTERLEAVE_LOW_EXPR:
8211
      {
8212
        expand_operands (treeop0,  treeop1,
8213
                         NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8214
        this_optab = optab_for_tree_code (code, type, optab_default);
8215
        temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8216
                             OPTAB_WIDEN);
8217
        gcc_assert (temp);
8218
        return temp;
8219
      }
8220
 
8221
    case VEC_LSHIFT_EXPR:
8222
    case VEC_RSHIFT_EXPR:
8223
      {
8224
        target = expand_vec_shift_expr (ops, target);
8225
        return target;
8226
      }
8227
 
8228
    case VEC_UNPACK_HI_EXPR:
8229
    case VEC_UNPACK_LO_EXPR:
8230
      {
8231
        op0 = expand_normal (treeop0);
8232
        this_optab = optab_for_tree_code (code, type, optab_default);
8233
        temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8234
                                          target, unsignedp);
8235
        gcc_assert (temp);
8236
        return temp;
8237
      }
8238
 
8239
    case VEC_UNPACK_FLOAT_HI_EXPR:
8240
    case VEC_UNPACK_FLOAT_LO_EXPR:
8241
      {
8242
        op0 = expand_normal (treeop0);
8243
        /* The signedness is determined from input operand.  */
8244
        this_optab = optab_for_tree_code (code,
8245
                                          TREE_TYPE (treeop0),
8246
                                          optab_default);
8247
        temp = expand_widen_pattern_expr
8248
          (ops, op0, NULL_RTX, NULL_RTX,
8249
           target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8250
 
8251
        gcc_assert (temp);
8252
        return temp;
8253
      }
8254
 
8255
    case VEC_WIDEN_MULT_HI_EXPR:
8256
    case VEC_WIDEN_MULT_LO_EXPR:
8257
      {
8258
        tree oprnd0 = treeop0;
8259
        tree oprnd1 = treeop1;
8260
 
8261
        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8262
        target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8263
                                            target, unsignedp);
8264
        gcc_assert (target);
8265
        return target;
8266
      }
8267
 
8268
    case VEC_PACK_TRUNC_EXPR:
8269
    case VEC_PACK_SAT_EXPR:
8270
    case VEC_PACK_FIX_TRUNC_EXPR:
8271
      mode = TYPE_MODE (TREE_TYPE (treeop0));
8272
      goto binop;
8273
 
8274
    default:
8275
      gcc_unreachable ();
8276
    }
8277
 
8278
  /* Here to do an ordinary binary operator.  */
8279
 binop:
8280
  expand_operands (treeop0, treeop1,
8281
                   subtarget, &op0, &op1, EXPAND_NORMAL);
8282
 binop2:
8283
  this_optab = optab_for_tree_code (code, type, optab_default);
8284
 binop3:
8285
  if (modifier == EXPAND_STACK_PARM)
8286
    target = 0;
8287
  temp = expand_binop (mode, this_optab, op0, op1, target,
8288
                       unsignedp, OPTAB_LIB_WIDEN);
8289
  gcc_assert (temp);
8290
  return REDUCE_BIT_FIELD (temp);
8291
}
8292
#undef REDUCE_BIT_FIELD
8293
 
8294
rtx
8295
expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8296
                    enum expand_modifier modifier, rtx *alt_rtl)
8297
{
8298
  rtx op0, op1, temp, decl_rtl;
8299
  tree type;
8300
  int unsignedp;
8301
  enum machine_mode mode;
8302
  enum tree_code code = TREE_CODE (exp);
8303
  optab this_optab;
8304
  rtx subtarget, original_target;
8305
  int ignore;
8306
  tree context;
8307
  bool reduce_bit_field;
8308
  location_t loc = EXPR_LOCATION (exp);
8309
  struct separate_ops ops;
8310
  tree treeop0, treeop1, treeop2;
8311
 
8312
  type = TREE_TYPE (exp);
8313
  mode = TYPE_MODE (type);
8314
  unsignedp = TYPE_UNSIGNED (type);
8315
 
8316
  treeop0 = treeop1 = treeop2 = NULL_TREE;
8317
  if (!VL_EXP_CLASS_P (exp))
8318
    switch (TREE_CODE_LENGTH (code))
8319
      {
8320
        default:
8321
        case 3: treeop2 = TREE_OPERAND (exp, 2);
8322
        case 2: treeop1 = TREE_OPERAND (exp, 1);
8323
        case 1: treeop0 = TREE_OPERAND (exp, 0);
8324
        case 0: break;
8325
      }
8326
  ops.code = code;
8327
  ops.type = type;
8328
  ops.op0 = treeop0;
8329
  ops.op1 = treeop1;
8330
  ops.op2 = treeop2;
8331
  ops.location = loc;
8332
 
8333
  ignore = (target == const0_rtx
8334
            || ((CONVERT_EXPR_CODE_P (code)
8335
                 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8336
                && TREE_CODE (type) == VOID_TYPE));
8337
 
8338
  /* An operation in what may be a bit-field type needs the
8339
     result to be reduced to the precision of the bit-field type,
8340
     which is narrower than that of the type's mode.  */
8341
  reduce_bit_field = (!ignore
8342
                      && TREE_CODE (type) == INTEGER_TYPE
8343
                      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8344
 
8345
  /* If we are going to ignore this result, we need only do something
8346
     if there is a side-effect somewhere in the expression.  If there
8347
     is, short-circuit the most common cases here.  Note that we must
8348
     not call expand_expr with anything but const0_rtx in case this
8349
     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
8350
 
8351
  if (ignore)
8352
    {
8353
      if (! TREE_SIDE_EFFECTS (exp))
8354
        return const0_rtx;
8355
 
8356
      /* Ensure we reference a volatile object even if value is ignored, but
8357
         don't do this if all we are doing is taking its address.  */
8358
      if (TREE_THIS_VOLATILE (exp)
8359
          && TREE_CODE (exp) != FUNCTION_DECL
8360
          && mode != VOIDmode && mode != BLKmode
8361
          && modifier != EXPAND_CONST_ADDRESS)
8362
        {
8363
          temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8364
          if (MEM_P (temp))
8365
            temp = copy_to_reg (temp);
8366
          return const0_rtx;
8367
        }
8368
 
8369
      if (TREE_CODE_CLASS (code) == tcc_unary
8370
          || code == COMPONENT_REF || code == INDIRECT_REF)
8371
        return expand_expr (treeop0, const0_rtx, VOIDmode,
8372
                            modifier);
8373
 
8374
      else if (TREE_CODE_CLASS (code) == tcc_binary
8375
               || TREE_CODE_CLASS (code) == tcc_comparison
8376
               || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8377
        {
8378
          expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8379
          expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8380
          return const0_rtx;
8381
        }
8382
      else if (code == BIT_FIELD_REF)
8383
        {
8384
          expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8385
          expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8386
          expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8387
          return const0_rtx;
8388
        }
8389
 
8390
      target = 0;
8391
    }
8392
 
8393
  if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8394
    target = 0;
8395
 
8396
  /* Use subtarget as the target for operand 0 of a binary operation.  */
8397
  subtarget = get_subtarget (target);
8398
  original_target = target;
8399
 
8400
  switch (code)
8401
    {
8402
    case LABEL_DECL:
8403
      {
8404
        tree function = decl_function_context (exp);
8405
 
8406
        temp = label_rtx (exp);
8407
        temp = gen_rtx_LABEL_REF (Pmode, temp);
8408
 
8409
        if (function != current_function_decl
8410
            && function != 0)
8411
          LABEL_REF_NONLOCAL_P (temp) = 1;
8412
 
8413
        temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8414
        return temp;
8415
      }
8416
 
8417
    case SSA_NAME:
8418
      /* ??? ivopts calls expander, without any preparation from
8419
         out-of-ssa.  So fake instructions as if this was an access to the
8420
         base variable.  This unnecessarily allocates a pseudo, see how we can
8421
         reuse it, if partition base vars have it set already.  */
8422
      if (!currently_expanding_to_rtl)
8423
        return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier, NULL);
8424
      {
8425
        gimple g = get_gimple_for_ssa_name (exp);
8426
        if (g)
8427
          return expand_expr_real (gimple_assign_rhs_to_tree (g), target,
8428
                                   tmode, modifier, NULL);
8429
      }
8430
      decl_rtl = get_rtx_for_ssa_name (exp);
8431
      exp = SSA_NAME_VAR (exp);
8432
      goto expand_decl_rtl;
8433
 
8434
    case PARM_DECL:
8435
    case VAR_DECL:
8436
      /* If a static var's type was incomplete when the decl was written,
8437
         but the type is complete now, lay out the decl now.  */
8438
      if (DECL_SIZE (exp) == 0
8439
          && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
8440
          && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
8441
        layout_decl (exp, 0);
8442
 
8443
      /* TLS emulation hook - replace __thread vars with
8444
         *__emutls_get_address (&_emutls.var).  */
8445
      if (! targetm.have_tls
8446
          && TREE_CODE (exp) == VAR_DECL
8447
          && DECL_THREAD_LOCAL_P (exp))
8448
        {
8449
          exp = build_fold_indirect_ref_loc (loc, emutls_var_address (exp));
8450
          return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
8451
        }
8452
 
8453
      /* ... fall through ...  */
8454
 
8455
    case FUNCTION_DECL:
8456
    case RESULT_DECL:
8457
      decl_rtl = DECL_RTL (exp);
8458
    expand_decl_rtl:
8459
      gcc_assert (decl_rtl);
8460
      decl_rtl = copy_rtx (decl_rtl);
8461
 
8462
      /* Ensure variable marked as used even if it doesn't go through
8463
         a parser.  If it hasn't be used yet, write out an external
8464
         definition.  */
8465
      if (! TREE_USED (exp))
8466
        {
8467
          assemble_external (exp);
8468
          TREE_USED (exp) = 1;
8469
        }
8470
 
8471
      /* Show we haven't gotten RTL for this yet.  */
8472
      temp = 0;
8473
 
8474
      /* Variables inherited from containing functions should have
8475
         been lowered by this point.  */
8476
      context = decl_function_context (exp);
8477
      gcc_assert (!context
8478
                  || context == current_function_decl
8479
                  || TREE_STATIC (exp)
8480
                  /* ??? C++ creates functions that are not TREE_STATIC.  */
8481
                  || TREE_CODE (exp) == FUNCTION_DECL);
8482
 
8483
      /* This is the case of an array whose size is to be determined
8484
         from its initializer, while the initializer is still being parsed.
8485
         See expand_decl.  */
8486
 
8487
      if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
8488
        temp = validize_mem (decl_rtl);
8489
 
8490
      /* If DECL_RTL is memory, we are in the normal case and the
8491
         address is not valid, get the address into a register.  */
8492
 
8493
      else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
8494
        {
8495
          if (alt_rtl)
8496
            *alt_rtl = decl_rtl;
8497
          decl_rtl = use_anchored_address (decl_rtl);
8498
          if (modifier != EXPAND_CONST_ADDRESS
8499
              && modifier != EXPAND_SUM
8500
              && !memory_address_addr_space_p (DECL_MODE (exp),
8501
                                               XEXP (decl_rtl, 0),
8502
                                               MEM_ADDR_SPACE (decl_rtl)))
8503
            temp = replace_equiv_address (decl_rtl,
8504
                                          copy_rtx (XEXP (decl_rtl, 0)));
8505
        }
8506
 
8507
      /* If we got something, return it.  But first, set the alignment
8508
         if the address is a register.  */
8509
      if (temp != 0)
8510
        {
8511
          if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
8512
            mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
8513
 
8514
          return temp;
8515
        }
8516
 
8517
      /* If the mode of DECL_RTL does not match that of the decl, it
8518
         must be a promoted value.  We return a SUBREG of the wanted mode,
8519
         but mark it so that we know that it was already extended.  */
8520
 
8521
      if (REG_P (decl_rtl)
8522
          && GET_MODE (decl_rtl) != DECL_MODE (exp))
8523
        {
8524
          enum machine_mode pmode;
8525
 
8526
          /* Get the signedness used for this variable.  Ensure we get the
8527
             same mode we got when the variable was declared.  */
8528
          pmode = promote_decl_mode (exp, &unsignedp);
8529
          gcc_assert (GET_MODE (decl_rtl) == pmode);
8530
 
8531
          temp = gen_lowpart_SUBREG (mode, decl_rtl);
8532
          SUBREG_PROMOTED_VAR_P (temp) = 1;
8533
          SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
8534
          return temp;
8535
        }
8536
 
8537
      return decl_rtl;
8538
 
8539
    case INTEGER_CST:
8540
      temp = immed_double_const (TREE_INT_CST_LOW (exp),
8541
                                 TREE_INT_CST_HIGH (exp), mode);
8542
 
8543
      return temp;
8544
 
8545
    case VECTOR_CST:
8546
      {
8547
        tree tmp = NULL_TREE;
8548
        if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
8549
            || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
8550
            || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
8551
            || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
8552
            || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
8553
            || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
8554
          return const_vector_from_tree (exp);
8555
        if (GET_MODE_CLASS (mode) == MODE_INT)
8556
          {
8557
            tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
8558
            if (type_for_mode)
8559
              tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
8560
          }
8561
        if (!tmp)
8562
          tmp = build_constructor_from_list (type,
8563
                                             TREE_VECTOR_CST_ELTS (exp));
8564
        return expand_expr (tmp, ignore ? const0_rtx : target,
8565
                            tmode, modifier);
8566
      }
8567
 
8568
    case CONST_DECL:
8569
      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
8570
 
8571
    case REAL_CST:
8572
      /* If optimized, generate immediate CONST_DOUBLE
8573
         which will be turned into memory by reload if necessary.
8574
 
8575
         We used to force a register so that loop.c could see it.  But
8576
         this does not allow gen_* patterns to perform optimizations with
8577
         the constants.  It also produces two insns in cases like "x = 1.0;".
8578
         On most machines, floating-point constants are not permitted in
8579
         many insns, so we'd end up copying it to a register in any case.
8580
 
8581
         Now, we do the copying in expand_binop, if appropriate.  */
8582
      return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
8583
                                           TYPE_MODE (TREE_TYPE (exp)));
8584
 
8585
    case FIXED_CST:
8586
      return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
8587
                                           TYPE_MODE (TREE_TYPE (exp)));
8588
 
8589
    case COMPLEX_CST:
8590
      /* Handle evaluating a complex constant in a CONCAT target.  */
8591
      if (original_target && GET_CODE (original_target) == CONCAT)
8592
        {
8593
          enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8594
          rtx rtarg, itarg;
8595
 
8596
          rtarg = XEXP (original_target, 0);
8597
          itarg = XEXP (original_target, 1);
8598
 
8599
          /* Move the real and imaginary parts separately.  */
8600
          op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
8601
          op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
8602
 
8603
          if (op0 != rtarg)
8604
            emit_move_insn (rtarg, op0);
8605
          if (op1 != itarg)
8606
            emit_move_insn (itarg, op1);
8607
 
8608
          return original_target;
8609
        }
8610
 
8611
      /* ... fall through ...  */
8612
 
8613
    case STRING_CST:
8614
      temp = expand_expr_constant (exp, 1, modifier);
8615
 
8616
      /* temp contains a constant address.
8617
         On RISC machines where a constant address isn't valid,
8618
         make some insns to get that address into a register.  */
8619
      if (modifier != EXPAND_CONST_ADDRESS
8620
          && modifier != EXPAND_INITIALIZER
8621
          && modifier != EXPAND_SUM
8622
          && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
8623
                                            MEM_ADDR_SPACE (temp)))
8624
        return replace_equiv_address (temp,
8625
                                      copy_rtx (XEXP (temp, 0)));
8626
      return temp;
8627
 
8628
    case SAVE_EXPR:
8629
      {
8630
        tree val = treeop0;
8631
        rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
8632
 
8633
        if (!SAVE_EXPR_RESOLVED_P (exp))
8634
          {
8635
            /* We can indeed still hit this case, typically via builtin
8636
               expanders calling save_expr immediately before expanding
8637
               something.  Assume this means that we only have to deal
8638
               with non-BLKmode values.  */
8639
            gcc_assert (GET_MODE (ret) != BLKmode);
8640
 
8641
            val = build_decl (EXPR_LOCATION (exp),
8642
                              VAR_DECL, NULL, TREE_TYPE (exp));
8643
            DECL_ARTIFICIAL (val) = 1;
8644
            DECL_IGNORED_P (val) = 1;
8645
            treeop0 = val;
8646
            TREE_OPERAND (exp, 0) = treeop0;
8647
            SAVE_EXPR_RESOLVED_P (exp) = 1;
8648
 
8649
            if (!CONSTANT_P (ret))
8650
              ret = copy_to_reg (ret);
8651
            SET_DECL_RTL (val, ret);
8652
          }
8653
 
8654
        return ret;
8655
      }
8656
 
8657
 
8658
    case CONSTRUCTOR:
8659
      /* If we don't need the result, just ensure we evaluate any
8660
         subexpressions.  */
8661
      if (ignore)
8662
        {
8663
          unsigned HOST_WIDE_INT idx;
8664
          tree value;
8665
 
8666
          FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
8667
            expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
8668
 
8669
          return const0_rtx;
8670
        }
8671
 
8672
      return expand_constructor (exp, target, modifier, false);
8673
 
8674
    case MISALIGNED_INDIRECT_REF:
8675
    case ALIGN_INDIRECT_REF:
8676
    case INDIRECT_REF:
8677
      {
8678
        tree exp1 = treeop0;
8679
        addr_space_t as = ADDR_SPACE_GENERIC;
8680
        enum machine_mode address_mode = Pmode;
8681
 
8682
        if (modifier != EXPAND_WRITE)
8683
          {
8684
            tree t;
8685
 
8686
            t = fold_read_from_constant_string (exp);
8687
            if (t)
8688
              return expand_expr (t, target, tmode, modifier);
8689
          }
8690
 
8691
        if (POINTER_TYPE_P (TREE_TYPE (exp1)))
8692
          {
8693
            as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp1)));
8694
            address_mode = targetm.addr_space.address_mode (as);
8695
          }
8696
 
8697
        op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
8698
        op0 = memory_address_addr_space (mode, op0, as);
8699
 
8700
        if (code == ALIGN_INDIRECT_REF)
8701
          {
8702
            int align = TYPE_ALIGN_UNIT (type);
8703
            op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align));
8704
            op0 = memory_address_addr_space (mode, op0, as);
8705
          }
8706
 
8707
        temp = gen_rtx_MEM (mode, op0);
8708
 
8709
        set_mem_attributes (temp, exp, 0);
8710
        set_mem_addr_space (temp, as);
8711
 
8712
        /* Resolve the misalignment now, so that we don't have to remember
8713
           to resolve it later.  Of course, this only works for reads.  */
8714
        if (code == MISALIGNED_INDIRECT_REF)
8715
          {
8716
            int icode;
8717
            rtx reg, insn;
8718
 
8719
            gcc_assert (modifier == EXPAND_NORMAL
8720
                        || modifier == EXPAND_STACK_PARM);
8721
 
8722
            /* The vectorizer should have already checked the mode.  */
8723
            icode = optab_handler (movmisalign_optab, mode)->insn_code;
8724
            gcc_assert (icode != CODE_FOR_nothing);
8725
 
8726
            /* We've already validated the memory, and we're creating a
8727
               new pseudo destination.  The predicates really can't fail.  */
8728
            reg = gen_reg_rtx (mode);
8729
 
8730
            /* Nor can the insn generator.  */
8731
            insn = GEN_FCN (icode) (reg, temp);
8732
            emit_insn (insn);
8733
 
8734
            return reg;
8735
          }
8736
 
8737
        return temp;
8738
      }
8739
 
8740
    case TARGET_MEM_REF:
8741
      {
8742
        addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
8743
        struct mem_address addr;
8744
 
8745
        get_address_description (exp, &addr);
8746
        op0 = addr_for_mem_ref (&addr, as, true);
8747
        op0 = memory_address_addr_space (mode, op0, as);
8748
        temp = gen_rtx_MEM (mode, op0);
8749
        set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
8750
        set_mem_addr_space (temp, as);
8751
      }
8752
      return temp;
8753
 
8754
    case ARRAY_REF:
8755
 
8756
      {
8757
        tree array = treeop0;
8758
        tree index = treeop1;
8759
 
8760
        /* Fold an expression like: "foo"[2].
8761
           This is not done in fold so it won't happen inside &.
8762
           Don't fold if this is for wide characters since it's too
8763
           difficult to do correctly and this is a very rare case.  */
8764
 
8765
        if (modifier != EXPAND_CONST_ADDRESS
8766
            && modifier != EXPAND_INITIALIZER
8767
            && modifier != EXPAND_MEMORY)
8768
          {
8769
            tree t = fold_read_from_constant_string (exp);
8770
 
8771
            if (t)
8772
              return expand_expr (t, target, tmode, modifier);
8773
          }
8774
 
8775
        /* If this is a constant index into a constant array,
8776
           just get the value from the array.  Handle both the cases when
8777
           we have an explicit constructor and when our operand is a variable
8778
           that was declared const.  */
8779
 
8780
        if (modifier != EXPAND_CONST_ADDRESS
8781
            && modifier != EXPAND_INITIALIZER
8782
            && modifier != EXPAND_MEMORY
8783
            && TREE_CODE (array) == CONSTRUCTOR
8784
            && ! TREE_SIDE_EFFECTS (array)
8785
            && TREE_CODE (index) == INTEGER_CST)
8786
          {
8787
            unsigned HOST_WIDE_INT ix;
8788
            tree field, value;
8789
 
8790
            FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
8791
                                      field, value)
8792
              if (tree_int_cst_equal (field, index))
8793
                {
8794
                  if (!TREE_SIDE_EFFECTS (value))
8795
                    return expand_expr (fold (value), target, tmode, modifier);
8796
                  break;
8797
                }
8798
          }
8799
 
8800
        else if (optimize >= 1
8801
                 && modifier != EXPAND_CONST_ADDRESS
8802
                 && modifier != EXPAND_INITIALIZER
8803
                 && modifier != EXPAND_MEMORY
8804
                 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8805
                 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8806
                 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
8807
                 && targetm.binds_local_p (array))
8808
          {
8809
            if (TREE_CODE (index) == INTEGER_CST)
8810
              {
8811
                tree init = DECL_INITIAL (array);
8812
 
8813
                if (TREE_CODE (init) == CONSTRUCTOR)
8814
                  {
8815
                    unsigned HOST_WIDE_INT ix;
8816
                    tree field, value;
8817
 
8818
                    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
8819
                                              field, value)
8820
                      if (tree_int_cst_equal (field, index))
8821
                        {
8822
                          if (TREE_SIDE_EFFECTS (value))
8823
                            break;
8824
 
8825
                          if (TREE_CODE (value) == CONSTRUCTOR)
8826
                            {
8827
                              /* If VALUE is a CONSTRUCTOR, this
8828
                                 optimization is only useful if
8829
                                 this doesn't store the CONSTRUCTOR
8830
                                 into memory.  If it does, it is more
8831
                                 efficient to just load the data from
8832
                                 the array directly.  */
8833
                              rtx ret = expand_constructor (value, target,
8834
                                                            modifier, true);
8835
                              if (ret == NULL_RTX)
8836
                                break;
8837
                            }
8838
 
8839
                          return expand_expr (fold (value), target, tmode,
8840
                                              modifier);
8841
                        }
8842
                  }
8843
                else if(TREE_CODE (init) == STRING_CST)
8844
                  {
8845
                    tree index1 = index;
8846
                    tree low_bound = array_ref_low_bound (exp);
8847
                    index1 = fold_convert_loc (loc, sizetype,
8848
                                               treeop1);
8849
 
8850
                    /* Optimize the special-case of a zero lower bound.
8851
 
8852
                       We convert the low_bound to sizetype to avoid some problems
8853
                       with constant folding.  (E.g. suppose the lower bound is 1,
8854
                       and its mode is QI.  Without the conversion,l (ARRAY
8855
                       +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8856
                       +INDEX), which becomes (ARRAY+255+INDEX).  Opps!)  */
8857
 
8858
                    if (! integer_zerop (low_bound))
8859
                      index1 = size_diffop_loc (loc, index1,
8860
                                            fold_convert_loc (loc, sizetype,
8861
                                                              low_bound));
8862
 
8863
                    if (0 > compare_tree_int (index1,
8864
                                              TREE_STRING_LENGTH (init)))
8865
                      {
8866
                        tree type = TREE_TYPE (TREE_TYPE (init));
8867
                        enum machine_mode mode = TYPE_MODE (type);
8868
 
8869
                        if (GET_MODE_CLASS (mode) == MODE_INT
8870
                            && GET_MODE_SIZE (mode) == 1)
8871
                          return gen_int_mode (TREE_STRING_POINTER (init)
8872
                                               [TREE_INT_CST_LOW (index1)],
8873
                                               mode);
8874
                      }
8875
                  }
8876
              }
8877
          }
8878
      }
8879
      goto normal_inner_ref;
8880
 
8881
    case COMPONENT_REF:
8882
      /* If the operand is a CONSTRUCTOR, we can just extract the
8883
         appropriate field if it is present.  */
8884
      if (TREE_CODE (treeop0) == CONSTRUCTOR)
8885
        {
8886
          unsigned HOST_WIDE_INT idx;
8887
          tree field, value;
8888
 
8889
          FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
8890
                                    idx, field, value)
8891
            if (field == treeop1
8892
                /* We can normally use the value of the field in the
8893
                   CONSTRUCTOR.  However, if this is a bitfield in
8894
                   an integral mode that we can fit in a HOST_WIDE_INT,
8895
                   we must mask only the number of bits in the bitfield,
8896
                   since this is done implicitly by the constructor.  If
8897
                   the bitfield does not meet either of those conditions,
8898
                   we can't do this optimization.  */
8899
                && (! DECL_BIT_FIELD (field)
8900
                    || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
8901
                        && (GET_MODE_BITSIZE (DECL_MODE (field))
8902
                            <= HOST_BITS_PER_WIDE_INT))))
8903
              {
8904
                if (DECL_BIT_FIELD (field)
8905
                    && modifier == EXPAND_STACK_PARM)
8906
                  target = 0;
8907
                op0 = expand_expr (value, target, tmode, modifier);
8908
                if (DECL_BIT_FIELD (field))
8909
                  {
8910
                    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
8911
                    enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
8912
 
8913
                    if (TYPE_UNSIGNED (TREE_TYPE (field)))
8914
                      {
8915
                        op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
8916
                        op0 = expand_and (imode, op0, op1, target);
8917
                      }
8918
                    else
8919
                      {
8920
                        tree count
8921
                          = build_int_cst (NULL_TREE,
8922
                                           GET_MODE_BITSIZE (imode) - bitsize);
8923
 
8924
                        op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
8925
                                            target, 0);
8926
                        op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
8927
                                            target, 0);
8928
                      }
8929
                  }
8930
 
8931
                return op0;
8932
              }
8933
        }
8934
      goto normal_inner_ref;
8935
 
8936
    case BIT_FIELD_REF:
8937
    case ARRAY_RANGE_REF:
8938
    normal_inner_ref:
8939
      {
8940
        enum machine_mode mode1, mode2;
8941
        HOST_WIDE_INT bitsize, bitpos;
8942
        tree offset;
8943
        int volatilep = 0, must_force_mem;
8944
        tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8945
                                        &mode1, &unsignedp, &volatilep, true);
8946
        rtx orig_op0, memloc;
8947
 
8948
        /* If we got back the original object, something is wrong.  Perhaps
8949
           we are evaluating an expression too early.  In any event, don't
8950
           infinitely recurse.  */
8951
        gcc_assert (tem != exp);
8952
 
8953
        /* If TEM's type is a union of variable size, pass TARGET to the inner
8954
           computation, since it will need a temporary and TARGET is known
8955
           to have to do.  This occurs in unchecked conversion in Ada.  */
8956
        orig_op0 = op0
8957
          = expand_expr (tem,
8958
                         (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
8959
                          && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
8960
                              != INTEGER_CST)
8961
                          && modifier != EXPAND_STACK_PARM
8962
                          ? target : NULL_RTX),
8963
                         VOIDmode,
8964
                         (modifier == EXPAND_INITIALIZER
8965
                          || modifier == EXPAND_CONST_ADDRESS
8966
                          || modifier == EXPAND_STACK_PARM)
8967
                         ? modifier : EXPAND_NORMAL);
8968
 
8969
        mode2
8970
          = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
8971
 
8972
        /* If we have either an offset, a BLKmode result, or a reference
8973
           outside the underlying object, we must force it to memory.
8974
           Such a case can occur in Ada if we have unchecked conversion
8975
           of an expression from a scalar type to an aggregate type or
8976
           for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
8977
           passed a partially uninitialized object or a view-conversion
8978
           to a larger size.  */
8979
        must_force_mem = (offset
8980
                          || mode1 == BLKmode
8981
                          || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
8982
 
8983
        /* Handle CONCAT first.  */
8984
        if (GET_CODE (op0) == CONCAT && !must_force_mem)
8985
          {
8986
            if (bitpos == 0
8987
                && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
8988
              return op0;
8989
            if (bitpos == 0
8990
                && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8991
                && bitsize)
8992
              {
8993
                op0 = XEXP (op0, 0);
8994
                mode2 = GET_MODE (op0);
8995
              }
8996
            else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8997
                     && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
8998
                     && bitpos
8999
                     && bitsize)
9000
              {
9001
                op0 = XEXP (op0, 1);
9002
                bitpos = 0;
9003
                mode2 = GET_MODE (op0);
9004
              }
9005
            else
9006
              /* Otherwise force into memory.  */
9007
              must_force_mem = 1;
9008
          }
9009
 
9010
        /* If this is a constant, put it in a register if it is a legitimate
9011
           constant and we don't need a memory reference.  */
9012
        if (CONSTANT_P (op0)
9013
            && mode2 != BLKmode
9014
            && LEGITIMATE_CONSTANT_P (op0)
9015
            && !must_force_mem)
9016
          op0 = force_reg (mode2, op0);
9017
 
9018
        /* Otherwise, if this is a constant, try to force it to the constant
9019
           pool.  Note that back-ends, e.g. MIPS, may refuse to do so if it
9020
           is a legitimate constant.  */
9021
        else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9022
          op0 = validize_mem (memloc);
9023
 
9024
        /* Otherwise, if this is a constant or the object is not in memory
9025
           and need be, put it there.  */
9026
        else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9027
          {
9028
            tree nt = build_qualified_type (TREE_TYPE (tem),
9029
                                            (TYPE_QUALS (TREE_TYPE (tem))
9030
                                             | TYPE_QUAL_CONST));
9031
            memloc = assign_temp (nt, 1, 1, 1);
9032
            emit_move_insn (memloc, op0);
9033
            op0 = memloc;
9034
          }
9035
 
9036
        if (offset)
9037
          {
9038
            enum machine_mode address_mode;
9039
            rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9040
                                          EXPAND_SUM);
9041
 
9042
            gcc_assert (MEM_P (op0));
9043
 
9044
            address_mode
9045
              = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9046
            if (GET_MODE (offset_rtx) != address_mode)
9047
              offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9048
 
9049
            if (GET_MODE (op0) == BLKmode
9050
                /* A constant address in OP0 can have VOIDmode, we must
9051
                   not try to call force_reg in that case.  */
9052
                && GET_MODE (XEXP (op0, 0)) != VOIDmode
9053
                && bitsize != 0
9054
                && (bitpos % bitsize) == 0
9055
                && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9056
                && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9057
              {
9058
                op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9059
                bitpos = 0;
9060
              }
9061
 
9062
            op0 = offset_address (op0, offset_rtx,
9063
                                  highest_pow2_factor (offset));
9064
          }
9065
 
9066
        /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9067
           record its alignment as BIGGEST_ALIGNMENT.  */
9068
        if (MEM_P (op0) && bitpos == 0 && offset != 0
9069
            && is_aligning_offset (offset, tem))
9070
          set_mem_align (op0, BIGGEST_ALIGNMENT);
9071
 
9072
        /* Don't forget about volatility even if this is a bitfield.  */
9073
        if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9074
          {
9075
            if (op0 == orig_op0)
9076
              op0 = copy_rtx (op0);
9077
 
9078
            MEM_VOLATILE_P (op0) = 1;
9079
          }
9080
 
9081
        /* In cases where an aligned union has an unaligned object
9082
           as a field, we might be extracting a BLKmode value from
9083
           an integer-mode (e.g., SImode) object.  Handle this case
9084
           by doing the extract into an object as wide as the field
9085
           (which we know to be the width of a basic mode), then
9086
           storing into memory, and changing the mode to BLKmode.  */
9087
        if (mode1 == VOIDmode
9088
            || REG_P (op0) || GET_CODE (op0) == SUBREG
9089
            || (mode1 != BLKmode && ! direct_load[(int) mode1]
9090
                && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9091
                && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9092
                && modifier != EXPAND_CONST_ADDRESS
9093
                && modifier != EXPAND_INITIALIZER)
9094
            /* If the field isn't aligned enough to fetch as a memref,
9095
               fetch it as a bit field.  */
9096
            || (mode1 != BLKmode
9097
                && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9098
                      || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9099
                      || (MEM_P (op0)
9100
                          && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9101
                              || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9102
                     && ((modifier == EXPAND_CONST_ADDRESS
9103
                          || modifier == EXPAND_INITIALIZER)
9104
                         ? STRICT_ALIGNMENT
9105
                         : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9106
                    || (bitpos % BITS_PER_UNIT != 0)))
9107
            /* If the type and the field are a constant size and the
9108
               size of the type isn't the same size as the bitfield,
9109
               we must use bitfield operations.  */
9110
            || (bitsize >= 0
9111
                && TYPE_SIZE (TREE_TYPE (exp))
9112
                && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9113
                && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9114
                                          bitsize)))
9115
          {
9116
            enum machine_mode ext_mode = mode;
9117
 
9118
            if (ext_mode == BLKmode
9119
                && ! (target != 0 && MEM_P (op0)
9120
                      && MEM_P (target)
9121
                      && bitpos % BITS_PER_UNIT == 0))
9122
              ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9123
 
9124
            if (ext_mode == BLKmode)
9125
              {
9126
                if (target == 0)
9127
                  target = assign_temp (type, 0, 1, 1);
9128
 
9129
                if (bitsize == 0)
9130
                  return target;
9131
 
9132
                /* In this case, BITPOS must start at a byte boundary and
9133
                   TARGET, if specified, must be a MEM.  */
9134
                gcc_assert (MEM_P (op0)
9135
                            && (!target || MEM_P (target))
9136
                            && !(bitpos % BITS_PER_UNIT));
9137
 
9138
                emit_block_move (target,
9139
                                 adjust_address (op0, VOIDmode,
9140
                                                 bitpos / BITS_PER_UNIT),
9141
                                 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9142
                                          / BITS_PER_UNIT),
9143
                                 (modifier == EXPAND_STACK_PARM
9144
                                  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9145
 
9146
                return target;
9147
              }
9148
 
9149
            op0 = validize_mem (op0);
9150
 
9151
            if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9152
              mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9153
 
9154
            op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
9155
                                     (modifier == EXPAND_STACK_PARM
9156
                                      ? NULL_RTX : target),
9157
                                     ext_mode, ext_mode);
9158
 
9159
            /* If the result is a record type and BITSIZE is narrower than
9160
               the mode of OP0, an integral mode, and this is a big endian
9161
               machine, we must put the field into the high-order bits.  */
9162
            if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9163
                && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9164
                && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9165
              op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9166
                                  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
9167
                                            - bitsize),
9168
                                  op0, 1);
9169
 
9170
            /* If the result type is BLKmode, store the data into a temporary
9171
               of the appropriate type, but with the mode corresponding to the
9172
               mode for the data we have (op0's mode).  It's tempting to make
9173
               this a constant type, since we know it's only being stored once,
9174
               but that can cause problems if we are taking the address of this
9175
               COMPONENT_REF because the MEM of any reference via that address
9176
               will have flags corresponding to the type, which will not
9177
               necessarily be constant.  */
9178
            if (mode == BLKmode)
9179
              {
9180
                HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9181
                rtx new_rtx;
9182
 
9183
                /* If the reference doesn't use the alias set of its type,
9184
                   we cannot create the temporary using that type.  */
9185
                if (component_uses_parent_alias_set (exp))
9186
                  {
9187
                    new_rtx = assign_stack_local (ext_mode, size, 0);
9188
                    set_mem_alias_set (new_rtx, get_alias_set (exp));
9189
                  }
9190
                else
9191
                  new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9192
 
9193
                emit_move_insn (new_rtx, op0);
9194
                op0 = copy_rtx (new_rtx);
9195
                PUT_MODE (op0, BLKmode);
9196
                set_mem_attributes (op0, exp, 1);
9197
              }
9198
 
9199
            return op0;
9200
          }
9201
 
9202
        /* If the result is BLKmode, use that to access the object
9203
           now as well.  */
9204
        if (mode == BLKmode)
9205
          mode1 = BLKmode;
9206
 
9207
        /* Get a reference to just this component.  */
9208
        if (modifier == EXPAND_CONST_ADDRESS
9209
            || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9210
          op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9211
        else
9212
          op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9213
 
9214
        if (op0 == orig_op0)
9215
          op0 = copy_rtx (op0);
9216
 
9217
        set_mem_attributes (op0, exp, 0);
9218
        if (REG_P (XEXP (op0, 0)))
9219
          mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9220
 
9221
        MEM_VOLATILE_P (op0) |= volatilep;
9222
        if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9223
            || modifier == EXPAND_CONST_ADDRESS
9224
            || modifier == EXPAND_INITIALIZER)
9225
          return op0;
9226
        else if (target == 0)
9227
          target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9228
 
9229
        convert_move (target, op0, unsignedp);
9230
        return target;
9231
      }
9232
 
9233
    case OBJ_TYPE_REF:
9234
      return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9235
 
9236
    case CALL_EXPR:
9237
      /* All valid uses of __builtin_va_arg_pack () are removed during
9238
         inlining.  */
9239
      if (CALL_EXPR_VA_ARG_PACK (exp))
9240
        error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9241
      {
9242
        tree fndecl = get_callee_fndecl (exp), attr;
9243
 
9244
        if (fndecl
9245
            && (attr = lookup_attribute ("error",
9246
                                         DECL_ATTRIBUTES (fndecl))) != NULL)
9247
          error ("%Kcall to %qs declared with attribute error: %s",
9248
                 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9249
                 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9250
        if (fndecl
9251
            && (attr = lookup_attribute ("warning",
9252
                                         DECL_ATTRIBUTES (fndecl))) != NULL)
9253
          warning_at (tree_nonartificial_location (exp),
9254
                      0, "%Kcall to %qs declared with attribute warning: %s",
9255
                      exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9256
                      TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9257
 
9258
        /* Check for a built-in function.  */
9259
        if (fndecl && DECL_BUILT_IN (fndecl))
9260
          {
9261
            gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9262
            return expand_builtin (exp, target, subtarget, tmode, ignore);
9263
          }
9264
      }
9265
      return expand_call (exp, target, ignore);
9266
 
9267
    case VIEW_CONVERT_EXPR:
9268
      op0 = NULL_RTX;
9269
 
9270
      /* If we are converting to BLKmode, try to avoid an intermediate
9271
         temporary by fetching an inner memory reference.  */
9272
      if (mode == BLKmode
9273
          && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9274
          && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9275
          && handled_component_p (treeop0))
9276
      {
9277
        enum machine_mode mode1;
9278
        HOST_WIDE_INT bitsize, bitpos;
9279
        tree offset;
9280
        int unsignedp;
9281
        int volatilep = 0;
9282
        tree tem
9283
          = get_inner_reference (treeop0, &bitsize, &bitpos,
9284
                                 &offset, &mode1, &unsignedp, &volatilep,
9285
                                 true);
9286
        rtx orig_op0;
9287
 
9288
        /* ??? We should work harder and deal with non-zero offsets.  */
9289
        if (!offset
9290
            && (bitpos % BITS_PER_UNIT) == 0
9291
            && bitsize >= 0
9292
            && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9293
          {
9294
            /* See the normal_inner_ref case for the rationale.  */
9295
            orig_op0
9296
              = expand_expr (tem,
9297
                             (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9298
                              && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9299
                                  != INTEGER_CST)
9300
                              && modifier != EXPAND_STACK_PARM
9301
                              ? target : NULL_RTX),
9302
                             VOIDmode,
9303
                             (modifier == EXPAND_INITIALIZER
9304
                              || modifier == EXPAND_CONST_ADDRESS
9305
                              || modifier == EXPAND_STACK_PARM)
9306
                             ? modifier : EXPAND_NORMAL);
9307
 
9308
            if (MEM_P (orig_op0))
9309
              {
9310
                op0 = orig_op0;
9311
 
9312
                /* Get a reference to just this component.  */
9313
                if (modifier == EXPAND_CONST_ADDRESS
9314
                    || modifier == EXPAND_SUM
9315
                    || modifier == EXPAND_INITIALIZER)
9316
                  op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9317
                else
9318
                  op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9319
 
9320
                if (op0 == orig_op0)
9321
                  op0 = copy_rtx (op0);
9322
 
9323
                set_mem_attributes (op0, treeop0, 0);
9324
                if (REG_P (XEXP (op0, 0)))
9325
                  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9326
 
9327
                MEM_VOLATILE_P (op0) |= volatilep;
9328
              }
9329
          }
9330
      }
9331
 
9332
      if (!op0)
9333
        op0 = expand_expr (treeop0,
9334
                           NULL_RTX, VOIDmode, modifier);
9335
 
9336
      /* If the input and output modes are both the same, we are done.  */
9337
      if (mode == GET_MODE (op0))
9338
        ;
9339
      /* If neither mode is BLKmode, and both modes are the same size
9340
         then we can use gen_lowpart.  */
9341
      else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9342
               && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
9343
               && !COMPLEX_MODE_P (GET_MODE (op0)))
9344
        {
9345
          if (GET_CODE (op0) == SUBREG)
9346
            op0 = force_reg (GET_MODE (op0), op0);
9347
          op0 = gen_lowpart (mode, op0);
9348
        }
9349
      /* If both modes are integral, then we can convert from one to the
9350
         other.  */
9351
      else if (SCALAR_INT_MODE_P (GET_MODE (op0)) && SCALAR_INT_MODE_P (mode))
9352
        op0 = convert_modes (mode, GET_MODE (op0), op0,
9353
                             TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9354
      /* As a last resort, spill op0 to memory, and reload it in a
9355
         different mode.  */
9356
      else if (!MEM_P (op0))
9357
        {
9358
          /* If the operand is not a MEM, force it into memory.  Since we
9359
             are going to be changing the mode of the MEM, don't call
9360
             force_const_mem for constants because we don't allow pool
9361
             constants to change mode.  */
9362
          tree inner_type = TREE_TYPE (treeop0);
9363
 
9364
          gcc_assert (!TREE_ADDRESSABLE (exp));
9365
 
9366
          if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
9367
            target
9368
              = assign_stack_temp_for_type
9369
                (TYPE_MODE (inner_type),
9370
                 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
9371
 
9372
          emit_move_insn (target, op0);
9373
          op0 = target;
9374
        }
9375
 
9376
      /* At this point, OP0 is in the correct mode.  If the output type is
9377
         such that the operand is known to be aligned, indicate that it is.
9378
         Otherwise, we need only be concerned about alignment for non-BLKmode
9379
         results.  */
9380
      if (MEM_P (op0))
9381
        {
9382
          op0 = copy_rtx (op0);
9383
 
9384
          if (TYPE_ALIGN_OK (type))
9385
            set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
9386
          else if (STRICT_ALIGNMENT
9387
                   && mode != BLKmode
9388
                   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
9389
            {
9390
              tree inner_type = TREE_TYPE (treeop0);
9391
              HOST_WIDE_INT temp_size
9392
                = MAX (int_size_in_bytes (inner_type),
9393
                       (HOST_WIDE_INT) GET_MODE_SIZE (mode));
9394
              rtx new_rtx
9395
                = assign_stack_temp_for_type (mode, temp_size, 0, type);
9396
              rtx new_with_op0_mode
9397
                = adjust_address (new_rtx, GET_MODE (op0), 0);
9398
 
9399
              gcc_assert (!TREE_ADDRESSABLE (exp));
9400
 
9401
              if (GET_MODE (op0) == BLKmode)
9402
                emit_block_move (new_with_op0_mode, op0,
9403
                                 GEN_INT (GET_MODE_SIZE (mode)),
9404
                                 (modifier == EXPAND_STACK_PARM
9405
                                  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9406
              else
9407
                emit_move_insn (new_with_op0_mode, op0);
9408
 
9409
              op0 = new_rtx;
9410
            }
9411
 
9412
          op0 = adjust_address (op0, mode, 0);
9413
        }
9414
 
9415
      return op0;
9416
 
9417
      /* Use a compare and a jump for BLKmode comparisons, or for function
9418
         type comparisons is HAVE_canonicalize_funcptr_for_compare.  */
9419
 
9420
      /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9421
         are occassionally created by folding during expansion.  */
9422
    case TRUTH_ANDIF_EXPR:
9423
    case TRUTH_ORIF_EXPR:
9424
      if (! ignore
9425
          && (target == 0
9426
              || modifier == EXPAND_STACK_PARM
9427
              || ! safe_from_p (target, treeop0, 1)
9428
              || ! safe_from_p (target, treeop1, 1)
9429
              /* Make sure we don't have a hard reg (such as function's return
9430
                 value) live across basic blocks, if not optimizing.  */
9431
              || (!optimize && REG_P (target)
9432
                  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9433
        target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9434
 
9435
      if (target)
9436
        emit_move_insn (target, const0_rtx);
9437
 
9438
      op1 = gen_label_rtx ();
9439
      jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9440
 
9441
      if (target)
9442
        emit_move_insn (target, const1_rtx);
9443
 
9444
      emit_label (op1);
9445
      return ignore ? const0_rtx : target;
9446
 
9447
    case STATEMENT_LIST:
9448
      {
9449
        tree_stmt_iterator iter;
9450
 
9451
        gcc_assert (ignore);
9452
 
9453
        for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9454
          expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9455
      }
9456
      return const0_rtx;
9457
 
9458
    case COND_EXPR:
9459
      /* A COND_EXPR with its type being VOID_TYPE represents a
9460
         conditional jump and is handled in
9461
         expand_gimple_cond_expr.  */
9462
      gcc_assert (!VOID_TYPE_P (type));
9463
 
9464
        /* Note that COND_EXPRs whose type is a structure or union
9465
         are required to be constructed to contain assignments of
9466
         a temporary variable, so that we can evaluate them here
9467
         for side effect only.  If type is void, we must do likewise.  */
9468
 
9469
        gcc_assert (!TREE_ADDRESSABLE (type)
9470
                    && !ignore
9471
                    && TREE_TYPE (treeop1) != void_type_node
9472
                    && TREE_TYPE (treeop2) != void_type_node);
9473
 
9474
       /* If we are not to produce a result, we have no target.  Otherwise,
9475
         if a target was specified use it; it will not be used as an
9476
         intermediate target unless it is safe.  If no target, use a
9477
         temporary.  */
9478
 
9479
       if (modifier != EXPAND_STACK_PARM
9480
          && original_target
9481
          && safe_from_p (original_target, treeop0, 1)
9482
          && GET_MODE (original_target) == mode
9483
#ifdef HAVE_conditional_move
9484
          && (! can_conditionally_move_p (mode)
9485
              || REG_P (original_target))
9486
#endif
9487
          && !MEM_P (original_target))
9488
        temp = original_target;
9489
       else
9490
        temp = assign_temp (type, 0, 0, 1);
9491
 
9492
       do_pending_stack_adjust ();
9493
       NO_DEFER_POP;
9494
       op0 = gen_label_rtx ();
9495
       op1 = gen_label_rtx ();
9496
       jumpifnot (treeop0, op0, -1);
9497
       store_expr (treeop1, temp,
9498
                  modifier == EXPAND_STACK_PARM,
9499
                  false);
9500
 
9501
       emit_jump_insn (gen_jump (op1));
9502
       emit_barrier ();
9503
       emit_label (op0);
9504
       store_expr (treeop2, temp,
9505
                  modifier == EXPAND_STACK_PARM,
9506
                  false);
9507
 
9508
       emit_label (op1);
9509
       OK_DEFER_POP;
9510
       return temp;
9511
 
9512
    case VEC_COND_EXPR:
9513
      target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9514
      return target;
9515
 
9516
    case MODIFY_EXPR:
9517
      {
9518
        tree lhs = treeop0;
9519
        tree rhs = treeop1;
9520
        gcc_assert (ignore);
9521
 
9522
        /* Check for |= or &= of a bitfield of size one into another bitfield
9523
           of size 1.  In this case, (unless we need the result of the
9524
           assignment) we can do this more efficiently with a
9525
           test followed by an assignment, if necessary.
9526
 
9527
           ??? At this point, we can't get a BIT_FIELD_REF here.  But if
9528
           things change so we do, this code should be enhanced to
9529
           support it.  */
9530
        if (TREE_CODE (lhs) == COMPONENT_REF
9531
            && (TREE_CODE (rhs) == BIT_IOR_EXPR
9532
                || TREE_CODE (rhs) == BIT_AND_EXPR)
9533
            && TREE_OPERAND (rhs, 0) == lhs
9534
            && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9535
            && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9536
            && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9537
          {
9538
            rtx label = gen_label_rtx ();
9539
            int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9540
            do_jump (TREE_OPERAND (rhs, 1),
9541
                     value ? label : 0,
9542
                     value ? 0 : label, -1);
9543
            expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9544
                               MOVE_NONTEMPORAL (exp));
9545
            do_pending_stack_adjust ();
9546
            emit_label (label);
9547
            return const0_rtx;
9548
          }
9549
 
9550
        expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9551
        return const0_rtx;
9552
      }
9553
 
9554
    case ADDR_EXPR:
9555
      return expand_expr_addr_expr (exp, target, tmode, modifier);
9556
 
9557
    case REALPART_EXPR:
9558
      op0 = expand_normal (treeop0);
9559
      return read_complex_part (op0, false);
9560
 
9561
    case IMAGPART_EXPR:
9562
      op0 = expand_normal (treeop0);
9563
      return read_complex_part (op0, true);
9564
 
9565
    case RETURN_EXPR:
9566
    case LABEL_EXPR:
9567
    case GOTO_EXPR:
9568
    case SWITCH_EXPR:
9569
    case ASM_EXPR:
9570
      /* Expanded in cfgexpand.c.  */
9571
      gcc_unreachable ();
9572
 
9573
    case TRY_CATCH_EXPR:
9574
    case CATCH_EXPR:
9575
    case EH_FILTER_EXPR:
9576
    case TRY_FINALLY_EXPR:
9577
      /* Lowered by tree-eh.c.  */
9578
      gcc_unreachable ();
9579
 
9580
    case WITH_CLEANUP_EXPR:
9581
    case CLEANUP_POINT_EXPR:
9582
    case TARGET_EXPR:
9583
    case CASE_LABEL_EXPR:
9584
    case VA_ARG_EXPR:
9585
    case BIND_EXPR:
9586
    case INIT_EXPR:
9587
    case CONJ_EXPR:
9588
    case COMPOUND_EXPR:
9589
    case PREINCREMENT_EXPR:
9590
    case PREDECREMENT_EXPR:
9591
    case POSTINCREMENT_EXPR:
9592
    case POSTDECREMENT_EXPR:
9593
    case LOOP_EXPR:
9594
    case EXIT_EXPR:
9595
      /* Lowered by gimplify.c.  */
9596
      gcc_unreachable ();
9597
 
9598
    case FDESC_EXPR:
9599
      /* Function descriptors are not valid except for as
9600
         initialization constants, and should not be expanded.  */
9601
      gcc_unreachable ();
9602
 
9603
    case WITH_SIZE_EXPR:
9604
      /* WITH_SIZE_EXPR expands to its first argument.  The caller should
9605
         have pulled out the size to use in whatever context it needed.  */
9606
      return expand_expr_real (treeop0, original_target, tmode,
9607
                               modifier, alt_rtl);
9608
 
9609
    case REALIGN_LOAD_EXPR:
9610
      {
9611
        tree oprnd0 = treeop0;
9612
        tree oprnd1 = treeop1;
9613
        tree oprnd2 = treeop2;
9614
        rtx op2;
9615
 
9616
        this_optab = optab_for_tree_code (code, type, optab_default);
9617
        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9618
        op2 = expand_normal (oprnd2);
9619
        temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9620
                                  target, unsignedp);
9621
        gcc_assert (temp);
9622
        return temp;
9623
      }
9624
 
9625
    case DOT_PROD_EXPR:
9626
      {
9627
        tree oprnd0 = treeop0;
9628
        tree oprnd1 = treeop1;
9629
        tree oprnd2 = treeop2;
9630
        rtx op2;
9631
 
9632
        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9633
        op2 = expand_normal (oprnd2);
9634
        target = expand_widen_pattern_expr (&ops, op0, op1, op2,
9635
                                            target, unsignedp);
9636
        return target;
9637
      }
9638
 
9639
    case COMPOUND_LITERAL_EXPR:
9640
      {
9641
        /* Initialize the anonymous variable declared in the compound
9642
           literal, then return the variable.  */
9643
        tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9644
 
9645
        /* Create RTL for this variable.  */
9646
        if (!DECL_RTL_SET_P (decl))
9647
          {
9648
            if (DECL_HARD_REGISTER (decl))
9649
              /* The user specified an assembler name for this variable.
9650
                 Set that up now.  */
9651
              rest_of_decl_compilation (decl, 0, 0);
9652
            else
9653
              expand_decl (decl);
9654
          }
9655
 
9656
        return expand_expr_real (decl, original_target, tmode,
9657
                                 modifier, alt_rtl);
9658
      }
9659
 
9660
    default:
9661
      return expand_expr_real_2 (&ops, target, tmode, modifier);
9662
    }
9663
}
9664
 
9665
/* Subroutine of above: reduce EXP to the precision of TYPE (in the
9666
   signedness of TYPE), possibly returning the result in TARGET.  */
9667
static rtx
9668
reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9669
{
9670
  HOST_WIDE_INT prec = TYPE_PRECISION (type);
9671
  if (target && GET_MODE (target) != GET_MODE (exp))
9672
    target = 0;
9673
  /* For constant values, reduce using build_int_cst_type. */
9674
  if (CONST_INT_P (exp))
9675
    {
9676
      HOST_WIDE_INT value = INTVAL (exp);
9677
      tree t = build_int_cst_type (type, value);
9678
      return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9679
    }
9680
  else if (TYPE_UNSIGNED (type))
9681
    {
9682
      rtx mask;
9683
      if (prec < HOST_BITS_PER_WIDE_INT)
9684
        mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9685
                                   GET_MODE (exp));
9686
      else
9687
        mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9688
                                   ((unsigned HOST_WIDE_INT) 1
9689
                                    << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9690
                                   GET_MODE (exp));
9691
      return expand_and (GET_MODE (exp), exp, mask, target);
9692
    }
9693
  else
9694
    {
9695
      tree count = build_int_cst (NULL_TREE,
9696
                                  GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9697
      exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9698
      return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9699
    }
9700
}
9701
 
9702
/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9703
   when applied to the address of EXP produces an address known to be
9704
   aligned more than BIGGEST_ALIGNMENT.  */
9705
 
9706
static int
9707
is_aligning_offset (const_tree offset, const_tree exp)
9708
{
9709
  /* Strip off any conversions.  */
9710
  while (CONVERT_EXPR_P (offset))
9711
    offset = TREE_OPERAND (offset, 0);
9712
 
9713
  /* We must now have a BIT_AND_EXPR with a constant that is one less than
9714
     power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
9715
  if (TREE_CODE (offset) != BIT_AND_EXPR
9716
      || !host_integerp (TREE_OPERAND (offset, 1), 1)
9717
      || compare_tree_int (TREE_OPERAND (offset, 1),
9718
                           BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9719
      || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9720
    return 0;
9721
 
9722
  /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9723
     It must be NEGATE_EXPR.  Then strip any more conversions.  */
9724
  offset = TREE_OPERAND (offset, 0);
9725
  while (CONVERT_EXPR_P (offset))
9726
    offset = TREE_OPERAND (offset, 0);
9727
 
9728
  if (TREE_CODE (offset) != NEGATE_EXPR)
9729
    return 0;
9730
 
9731
  offset = TREE_OPERAND (offset, 0);
9732
  while (CONVERT_EXPR_P (offset))
9733
    offset = TREE_OPERAND (offset, 0);
9734
 
9735
  /* This must now be the address of EXP.  */
9736
  return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9737
}
9738
 
9739
/* Return the tree node if an ARG corresponds to a string constant or zero
9740
   if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
9741
   in bytes within the string that ARG is accessing.  The type of the
9742
   offset will be `sizetype'.  */
9743
 
9744
tree
9745
string_constant (tree arg, tree *ptr_offset)
9746
{
9747
  tree array, offset, lower_bound;
9748
  STRIP_NOPS (arg);
9749
 
9750
  if (TREE_CODE (arg) == ADDR_EXPR)
9751
    {
9752
      if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9753
        {
9754
          *ptr_offset = size_zero_node;
9755
          return TREE_OPERAND (arg, 0);
9756
        }
9757
      else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9758
        {
9759
          array = TREE_OPERAND (arg, 0);
9760
          offset = size_zero_node;
9761
        }
9762
      else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9763
        {
9764
          array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9765
          offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9766
          if (TREE_CODE (array) != STRING_CST
9767
              && TREE_CODE (array) != VAR_DECL)
9768
            return 0;
9769
 
9770
          /* Check if the array has a nonzero lower bound.  */
9771
          lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9772
          if (!integer_zerop (lower_bound))
9773
            {
9774
              /* If the offset and base aren't both constants, return 0.  */
9775
              if (TREE_CODE (lower_bound) != INTEGER_CST)
9776
                return 0;
9777
              if (TREE_CODE (offset) != INTEGER_CST)
9778
                return 0;
9779
              /* Adjust offset by the lower bound.  */
9780
              offset = size_diffop (fold_convert (sizetype, offset),
9781
                                    fold_convert (sizetype, lower_bound));
9782
            }
9783
        }
9784
      else
9785
        return 0;
9786
    }
9787
  else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9788
    {
9789
      tree arg0 = TREE_OPERAND (arg, 0);
9790
      tree arg1 = TREE_OPERAND (arg, 1);
9791
 
9792
      STRIP_NOPS (arg0);
9793
      STRIP_NOPS (arg1);
9794
 
9795
      if (TREE_CODE (arg0) == ADDR_EXPR
9796
          && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9797
              || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9798
        {
9799
          array = TREE_OPERAND (arg0, 0);
9800
          offset = arg1;
9801
        }
9802
      else if (TREE_CODE (arg1) == ADDR_EXPR
9803
               && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9804
                   || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9805
        {
9806
          array = TREE_OPERAND (arg1, 0);
9807
          offset = arg0;
9808
        }
9809
      else
9810
        return 0;
9811
    }
9812
  else
9813
    return 0;
9814
 
9815
  if (TREE_CODE (array) == STRING_CST)
9816
    {
9817
      *ptr_offset = fold_convert (sizetype, offset);
9818
      return array;
9819
    }
9820
  else if (TREE_CODE (array) == VAR_DECL)
9821
    {
9822
      int length;
9823
 
9824
      /* Variables initialized to string literals can be handled too.  */
9825
      if (DECL_INITIAL (array) == NULL_TREE
9826
          || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9827
        return 0;
9828
 
9829
      /* If they are read-only, non-volatile and bind locally.  */
9830
      if (! TREE_READONLY (array)
9831
          || TREE_SIDE_EFFECTS (array)
9832
          || ! targetm.binds_local_p (array))
9833
        return 0;
9834
 
9835
      /* Avoid const char foo[4] = "abcde";  */
9836
      if (DECL_SIZE_UNIT (array) == NULL_TREE
9837
          || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9838
          || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9839
          || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9840
        return 0;
9841
 
9842
      /* If variable is bigger than the string literal, OFFSET must be constant
9843
         and inside of the bounds of the string literal.  */
9844
      offset = fold_convert (sizetype, offset);
9845
      if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9846
          && (! host_integerp (offset, 1)
9847
              || compare_tree_int (offset, length) >= 0))
9848
        return 0;
9849
 
9850
      *ptr_offset = offset;
9851
      return DECL_INITIAL (array);
9852
    }
9853
 
9854
  return 0;
9855
}
9856
 
9857
/* Generate code to calculate OPS, and exploded expression
9858
   using a store-flag instruction and return an rtx for the result.
9859
   OPS reflects a comparison.
9860
 
9861
   If TARGET is nonzero, store the result there if convenient.
9862
 
9863
   Return zero if there is no suitable set-flag instruction
9864
   available on this machine.
9865
 
9866
   Once expand_expr has been called on the arguments of the comparison,
9867
   we are committed to doing the store flag, since it is not safe to
9868
   re-evaluate the expression.  We emit the store-flag insn by calling
9869
   emit_store_flag, but only expand the arguments if we have a reason
9870
   to believe that emit_store_flag will be successful.  If we think that
9871
   it will, but it isn't, we have to simulate the store-flag with a
9872
   set/jump/set sequence.  */
9873
 
9874
static rtx
9875
do_store_flag (sepops ops, rtx target, enum machine_mode mode)
9876
{
9877
  enum rtx_code code;
9878
  tree arg0, arg1, type;
9879
  tree tem;
9880
  enum machine_mode operand_mode;
9881
  int unsignedp;
9882
  rtx op0, op1;
9883
  rtx subtarget = target;
9884
  location_t loc = ops->location;
9885
 
9886
  arg0 = ops->op0;
9887
  arg1 = ops->op1;
9888
 
9889
  /* Don't crash if the comparison was erroneous.  */
9890
  if (arg0 == error_mark_node || arg1 == error_mark_node)
9891
    return const0_rtx;
9892
 
9893
  type = TREE_TYPE (arg0);
9894
  operand_mode = TYPE_MODE (type);
9895
  unsignedp = TYPE_UNSIGNED (type);
9896
 
9897
  /* We won't bother with BLKmode store-flag operations because it would mean
9898
     passing a lot of information to emit_store_flag.  */
9899
  if (operand_mode == BLKmode)
9900
    return 0;
9901
 
9902
  /* We won't bother with store-flag operations involving function pointers
9903
     when function pointers must be canonicalized before comparisons.  */
9904
#ifdef HAVE_canonicalize_funcptr_for_compare
9905
  if (HAVE_canonicalize_funcptr_for_compare
9906
      && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
9907
           && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
9908
               == FUNCTION_TYPE))
9909
          || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
9910
              && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
9911
                  == FUNCTION_TYPE))))
9912
    return 0;
9913
#endif
9914
 
9915
  STRIP_NOPS (arg0);
9916
  STRIP_NOPS (arg1);
9917
 
9918
  /* Get the rtx comparison code to use.  We know that EXP is a comparison
9919
     operation of some type.  Some comparisons against 1 and -1 can be
9920
     converted to comparisons with zero.  Do so here so that the tests
9921
     below will be aware that we have a comparison with zero.   These
9922
     tests will not catch constants in the first operand, but constants
9923
     are rarely passed as the first operand.  */
9924
 
9925
  switch (ops->code)
9926
    {
9927
    case EQ_EXPR:
9928
      code = EQ;
9929
      break;
9930
    case NE_EXPR:
9931
      code = NE;
9932
      break;
9933
    case LT_EXPR:
9934
      if (integer_onep (arg1))
9935
        arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9936
      else
9937
        code = unsignedp ? LTU : LT;
9938
      break;
9939
    case LE_EXPR:
9940
      if (! unsignedp && integer_all_onesp (arg1))
9941
        arg1 = integer_zero_node, code = LT;
9942
      else
9943
        code = unsignedp ? LEU : LE;
9944
      break;
9945
    case GT_EXPR:
9946
      if (! unsignedp && integer_all_onesp (arg1))
9947
        arg1 = integer_zero_node, code = GE;
9948
      else
9949
        code = unsignedp ? GTU : GT;
9950
      break;
9951
    case GE_EXPR:
9952
      if (integer_onep (arg1))
9953
        arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9954
      else
9955
        code = unsignedp ? GEU : GE;
9956
      break;
9957
 
9958
    case UNORDERED_EXPR:
9959
      code = UNORDERED;
9960
      break;
9961
    case ORDERED_EXPR:
9962
      code = ORDERED;
9963
      break;
9964
    case UNLT_EXPR:
9965
      code = UNLT;
9966
      break;
9967
    case UNLE_EXPR:
9968
      code = UNLE;
9969
      break;
9970
    case UNGT_EXPR:
9971
      code = UNGT;
9972
      break;
9973
    case UNGE_EXPR:
9974
      code = UNGE;
9975
      break;
9976
    case UNEQ_EXPR:
9977
      code = UNEQ;
9978
      break;
9979
    case LTGT_EXPR:
9980
      code = LTGT;
9981
      break;
9982
 
9983
    default:
9984
      gcc_unreachable ();
9985
    }
9986
 
9987
  /* Put a constant second.  */
9988
  if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9989
      || TREE_CODE (arg0) == FIXED_CST)
9990
    {
9991
      tem = arg0; arg0 = arg1; arg1 = tem;
9992
      code = swap_condition (code);
9993
    }
9994
 
9995
  /* If this is an equality or inequality test of a single bit, we can
9996
     do this by shifting the bit being tested to the low-order bit and
9997
     masking the result with the constant 1.  If the condition was EQ,
9998
     we xor it with 1.  This does not require an scc insn and is faster
9999
     than an scc insn even if we have it.
10000
 
10001
     The code to make this transformation was moved into fold_single_bit_test,
10002
     so we just call into the folder and expand its result.  */
10003
 
10004
  if ((code == NE || code == EQ)
10005
      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10006
      && integer_pow2p (TREE_OPERAND (arg0, 1)))
10007
    {
10008
      tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10009
      return expand_expr (fold_single_bit_test (loc,
10010
                                                code == NE ? NE_EXPR : EQ_EXPR,
10011
                                                arg0, arg1, type),
10012
                          target, VOIDmode, EXPAND_NORMAL);
10013
    }
10014
 
10015
  if (! get_subtarget (target)
10016
      || GET_MODE (subtarget) != operand_mode)
10017
    subtarget = 0;
10018
 
10019
  expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10020
 
10021
  if (target == 0)
10022
    target = gen_reg_rtx (mode);
10023
 
10024
  /* Try a cstore if possible.  */
10025
  return emit_store_flag_force (target, code, op0, op1,
10026
                                operand_mode, unsignedp, 1);
10027
}
10028
 
10029
 
10030
/* Stubs in case we haven't got a casesi insn.  */
10031
#ifndef HAVE_casesi
10032
# define HAVE_casesi 0
10033
# define gen_casesi(a, b, c, d, e) (0)
10034
# define CODE_FOR_casesi CODE_FOR_nothing
10035
#endif
10036
 
10037
/* Attempt to generate a casesi instruction.  Returns 1 if successful,
10038
 
10039
int
10040
try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10041
            rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10042
            rtx fallback_label ATTRIBUTE_UNUSED)
10043
{
10044
  enum machine_mode index_mode = SImode;
10045
  int index_bits = GET_MODE_BITSIZE (index_mode);
10046
  rtx op1, op2, index;
10047
  enum machine_mode op_mode;
10048
 
10049
  if (! HAVE_casesi)
10050
    return 0;
10051
 
10052
  /* Convert the index to SImode.  */
10053
  if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10054
    {
10055
      enum machine_mode omode = TYPE_MODE (index_type);
10056
      rtx rangertx = expand_normal (range);
10057
 
10058
      /* We must handle the endpoints in the original mode.  */
10059
      index_expr = build2 (MINUS_EXPR, index_type,
10060
                           index_expr, minval);
10061
      minval = integer_zero_node;
10062
      index = expand_normal (index_expr);
10063
      if (default_label)
10064
        emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10065
                                 omode, 1, default_label);
10066
      /* Now we can safely truncate.  */
10067
      index = convert_to_mode (index_mode, index, 0);
10068
    }
10069
  else
10070
    {
10071
      if (TYPE_MODE (index_type) != index_mode)
10072
        {
10073
          index_type = lang_hooks.types.type_for_size (index_bits, 0);
10074
          index_expr = fold_convert (index_type, index_expr);
10075
        }
10076
 
10077
      index = expand_normal (index_expr);
10078
    }
10079
 
10080
  do_pending_stack_adjust ();
10081
 
10082
  op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10083
  if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10084
      (index, op_mode))
10085
    index = copy_to_mode_reg (op_mode, index);
10086
 
10087
  op1 = expand_normal (minval);
10088
 
10089
  op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10090
  op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10091
                       op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
10092
  if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10093
      (op1, op_mode))
10094
    op1 = copy_to_mode_reg (op_mode, op1);
10095
 
10096
  op2 = expand_normal (range);
10097
 
10098
  op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10099
  op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10100
                       op2, TYPE_UNSIGNED (TREE_TYPE (range)));
10101
  if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10102
      (op2, op_mode))
10103
    op2 = copy_to_mode_reg (op_mode, op2);
10104
 
10105
  emit_jump_insn (gen_casesi (index, op1, op2,
10106
                              table_label, !default_label
10107
                                           ? fallback_label : default_label));
10108
  return 1;
10109
}
10110
 
10111
/* Attempt to generate a tablejump instruction; same concept.  */
10112
#ifndef HAVE_tablejump
10113
#define HAVE_tablejump 0
10114
#define gen_tablejump(x, y) (0)
10115
#endif
10116
 
10117
/* Subroutine of the next function.
10118
 
10119
   INDEX is the value being switched on, with the lowest value
10120
   in the table already subtracted.
10121
   MODE is its expected mode (needed if INDEX is constant).
10122
   RANGE is the length of the jump table.
10123
   TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10124
 
10125
   DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10126
   index value is out of range.  */
10127
 
10128
static void
10129
do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10130
              rtx default_label)
10131
{
10132
  rtx temp, vector;
10133
 
10134
  if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10135
    cfun->cfg->max_jumptable_ents = INTVAL (range);
10136
 
10137
  /* Do an unsigned comparison (in the proper mode) between the index
10138
     expression and the value which represents the length of the range.
10139
     Since we just finished subtracting the lower bound of the range
10140
     from the index expression, this comparison allows us to simultaneously
10141
     check that the original index expression value is both greater than
10142
     or equal to the minimum value of the range and less than or equal to
10143
     the maximum value of the range.  */
10144
 
10145
  if (default_label)
10146
    emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10147
                             default_label);
10148
 
10149
  /* If index is in range, it must fit in Pmode.
10150
     Convert to Pmode so we can index with it.  */
10151
  if (mode != Pmode)
10152
    index = convert_to_mode (Pmode, index, 1);
10153
 
10154
  /* Don't let a MEM slip through, because then INDEX that comes
10155
     out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10156
     and break_out_memory_refs will go to work on it and mess it up.  */
10157
#ifdef PIC_CASE_VECTOR_ADDRESS
10158
  if (flag_pic && !REG_P (index))
10159
    index = copy_to_mode_reg (Pmode, index);
10160
#endif
10161
 
10162
  /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10163
     GET_MODE_SIZE, because this indicates how large insns are.  The other
10164
     uses should all be Pmode, because they are addresses.  This code
10165
     could fail if addresses and insns are not the same size.  */
10166
  index = gen_rtx_PLUS (Pmode,
10167
                        gen_rtx_MULT (Pmode, index,
10168
                                      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10169
                        gen_rtx_LABEL_REF (Pmode, table_label));
10170
#ifdef PIC_CASE_VECTOR_ADDRESS
10171
  if (flag_pic)
10172
    index = PIC_CASE_VECTOR_ADDRESS (index);
10173
  else
10174
#endif
10175
    index = memory_address (CASE_VECTOR_MODE, index);
10176
  temp = gen_reg_rtx (CASE_VECTOR_MODE);
10177
  vector = gen_const_mem (CASE_VECTOR_MODE, index);
10178
  convert_move (temp, vector, 0);
10179
 
10180
  emit_jump_insn (gen_tablejump (temp, table_label));
10181
 
10182
  /* If we are generating PIC code or if the table is PC-relative, the
10183
     table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
10184
  if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10185
    emit_barrier ();
10186
}
10187
 
10188
int
10189
try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10190
               rtx table_label, rtx default_label)
10191
{
10192
  rtx index;
10193
 
10194
  if (! HAVE_tablejump)
10195
    return 0;
10196
 
10197
  index_expr = fold_build2 (MINUS_EXPR, index_type,
10198
                            fold_convert (index_type, index_expr),
10199
                            fold_convert (index_type, minval));
10200
  index = expand_normal (index_expr);
10201
  do_pending_stack_adjust ();
10202
 
10203
  do_tablejump (index, TYPE_MODE (index_type),
10204
                convert_modes (TYPE_MODE (index_type),
10205
                               TYPE_MODE (TREE_TYPE (range)),
10206
                               expand_normal (range),
10207
                               TYPE_UNSIGNED (TREE_TYPE (range))),
10208
                table_label, default_label);
10209
  return 1;
10210
}
10211
 
10212
/* Nonzero if the mode is a valid vector mode for this architecture.
10213
   This returns nonzero even if there is no hardware support for the
10214
   vector mode, but we can emulate with narrower modes.  */
10215
 
10216
int
10217
vector_mode_valid_p (enum machine_mode mode)
10218
{
10219
  enum mode_class mclass = GET_MODE_CLASS (mode);
10220
  enum machine_mode innermode;
10221
 
10222
  /* Doh!  What's going on?  */
10223
  if (mclass != MODE_VECTOR_INT
10224
      && mclass != MODE_VECTOR_FLOAT
10225
      && mclass != MODE_VECTOR_FRACT
10226
      && mclass != MODE_VECTOR_UFRACT
10227
      && mclass != MODE_VECTOR_ACCUM
10228
      && mclass != MODE_VECTOR_UACCUM)
10229
    return 0;
10230
 
10231
  /* Hardware support.  Woo hoo!  */
10232
  if (targetm.vector_mode_supported_p (mode))
10233
    return 1;
10234
 
10235
  innermode = GET_MODE_INNER (mode);
10236
 
10237
  /* We should probably return 1 if requesting V4DI and we have no DI,
10238
     but we have V2DI, but this is probably very unlikely.  */
10239
 
10240
  /* If we have support for the inner mode, we can safely emulate it.
10241
     We may not have V2DI, but me can emulate with a pair of DIs.  */
10242
  return targetm.scalar_mode_supported_p (innermode);
10243
}
10244
 
10245
/* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
10246
static rtx
10247
const_vector_from_tree (tree exp)
10248
{
10249
  rtvec v;
10250
  int units, i;
10251
  tree link, elt;
10252
  enum machine_mode inner, mode;
10253
 
10254
  mode = TYPE_MODE (TREE_TYPE (exp));
10255
 
10256
  if (initializer_zerop (exp))
10257
    return CONST0_RTX (mode);
10258
 
10259
  units = GET_MODE_NUNITS (mode);
10260
  inner = GET_MODE_INNER (mode);
10261
 
10262
  v = rtvec_alloc (units);
10263
 
10264
  link = TREE_VECTOR_CST_ELTS (exp);
10265
  for (i = 0; link; link = TREE_CHAIN (link), ++i)
10266
    {
10267
      elt = TREE_VALUE (link);
10268
 
10269
      if (TREE_CODE (elt) == REAL_CST)
10270
        RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10271
                                                         inner);
10272
      else if (TREE_CODE (elt) == FIXED_CST)
10273
        RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10274
                                                         inner);
10275
      else
10276
        RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10277
                                               TREE_INT_CST_HIGH (elt),
10278
                                               inner);
10279
    }
10280
 
10281
  /* Initialize remaining elements to 0.  */
10282
  for (; i < units; ++i)
10283
    RTVEC_ELT (v, i) = CONST0_RTX (inner);
10284
 
10285
  return gen_rtx_CONST_VECTOR (mode, v);
10286
}
10287
 
10288
 
10289
/* Build a decl for a EH personality function named NAME. */
10290
 
10291
tree
10292
build_personality_function (const char *name)
10293
{
10294
  tree decl, type;
10295
 
10296
  type = build_function_type_list (integer_type_node, integer_type_node,
10297
                                   long_long_unsigned_type_node,
10298
                                   ptr_type_node, ptr_type_node, NULL_TREE);
10299
  decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10300
                     get_identifier (name), type);
10301
  DECL_ARTIFICIAL (decl) = 1;
10302
  DECL_EXTERNAL (decl) = 1;
10303
  TREE_PUBLIC (decl) = 1;
10304
 
10305
  /* Zap the nonsensical SYMBOL_REF_DECL for this.  What we're left with
10306
     are the flags assigned by targetm.encode_section_info.  */
10307
  SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10308
 
10309
  return decl;
10310
}
10311
 
10312
/* Extracts the personality function of DECL and returns the corresponding
10313
   libfunc.  */
10314
 
10315
rtx
10316
get_personality_function (tree decl)
10317
{
10318
  tree personality = DECL_FUNCTION_PERSONALITY (decl);
10319
  enum eh_personality_kind pk;
10320
 
10321
  pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10322
  if (pk == eh_personality_none)
10323
    return NULL;
10324
 
10325
  if (!personality
10326
      && pk == eh_personality_any)
10327
    personality = lang_hooks.eh_personality ();
10328
 
10329
  if (pk == eh_personality_lang)
10330
    gcc_assert (personality != NULL_TREE);
10331
 
10332
  return XEXP (DECL_RTL (personality), 0);
10333
}
10334
 
10335
#include "gt-expr.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.