OpenCores
URL https://opencores.org/ocsvn/openrisc_2011-10-31/openrisc_2011-10-31/trunk

Subversion Repositories openrisc_2011-10-31

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [builtins.c] - Blame information for rev 333

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Expand builtin functions.
2
   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3
   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "machmode.h"
27
#include "real.h"
28
#include "rtl.h"
29
#include "tree.h"
30
#include "gimple.h"
31
#include "flags.h"
32
#include "regs.h"
33
#include "hard-reg-set.h"
34
#include "except.h"
35
#include "function.h"
36
#include "insn-config.h"
37
#include "expr.h"
38
#include "optabs.h"
39
#include "libfuncs.h"
40
#include "recog.h"
41
#include "output.h"
42
#include "typeclass.h"
43
#include "toplev.h"
44
#include "predict.h"
45
#include "tm_p.h"
46
#include "target.h"
47
#include "langhooks.h"
48
#include "basic-block.h"
49
#include "tree-mudflap.h"
50
#include "tree-flow.h"
51
#include "value-prof.h"
52
#include "diagnostic.h"
53
 
54
#ifndef SLOW_UNALIGNED_ACCESS
55
#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56
#endif
57
 
58
#ifndef PAD_VARARGS_DOWN
59
#define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60
#endif
61
static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
62
 
63
/* Define the names of the builtin function types and codes.  */
64
const char *const built_in_class_names[4]
65
  = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66
 
67
#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68
const char * built_in_names[(int) END_BUILTINS] =
69
{
70
#include "builtins.def"
71
};
72
#undef DEF_BUILTIN
73
 
74
/* Setup an array of _DECL trees, make sure each element is
75
   initialized to NULL_TREE.  */
76
tree built_in_decls[(int) END_BUILTINS];
77
/* Declarations used when constructing the builtin implicitly in the compiler.
78
   It may be NULL_TREE when this is invalid (for instance runtime is not
79
   required to implement the function call in all cases).  */
80
tree implicit_built_in_decls[(int) END_BUILTINS];
81
 
82
static const char *c_getstr (tree);
83
static rtx c_readstr (const char *, enum machine_mode);
84
static int target_char_cast (tree, char *);
85
static rtx get_memory_rtx (tree, tree);
86
static int apply_args_size (void);
87
static int apply_result_size (void);
88
#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89
static rtx result_vector (int, rtx);
90
#endif
91
static void expand_builtin_update_setjmp_buf (rtx);
92
static void expand_builtin_prefetch (tree);
93
static rtx expand_builtin_apply_args (void);
94
static rtx expand_builtin_apply_args_1 (void);
95
static rtx expand_builtin_apply (rtx, rtx, rtx);
96
static void expand_builtin_return (rtx);
97
static enum type_class type_to_class (tree);
98
static rtx expand_builtin_classify_type (tree);
99
static void expand_errno_check (tree, rtx);
100
static rtx expand_builtin_mathfn (tree, rtx, rtx);
101
static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102
static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103
static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104
static rtx expand_builtin_sincos (tree);
105
static rtx expand_builtin_cexpi (tree, rtx, rtx);
106
static rtx expand_builtin_int_roundingfn (tree, rtx);
107
static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108
static rtx expand_builtin_args_info (tree);
109
static rtx expand_builtin_next_arg (void);
110
static rtx expand_builtin_va_start (tree);
111
static rtx expand_builtin_va_end (tree);
112
static rtx expand_builtin_va_copy (tree);
113
static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114
static rtx expand_builtin_strcmp (tree, rtx);
115
static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116
static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117
static rtx expand_builtin_memcpy (tree, rtx);
118
static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119
static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120
                                        enum machine_mode, int);
121
static rtx expand_builtin_strcpy (tree, rtx);
122
static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123
static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124
static rtx expand_builtin_strncpy (tree, rtx);
125
static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126
static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127
static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128
static rtx expand_builtin_bzero (tree);
129
static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130
static rtx expand_builtin_alloca (tree, rtx);
131
static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132
static rtx expand_builtin_frame_address (tree, tree);
133
static tree stabilize_va_list_loc (location_t, tree, int);
134
static rtx expand_builtin_expect (tree, rtx);
135
static tree fold_builtin_constant_p (tree);
136
static tree fold_builtin_expect (location_t, tree, tree);
137
static tree fold_builtin_classify_type (tree);
138
static tree fold_builtin_strlen (location_t, tree, tree);
139
static tree fold_builtin_inf (location_t, tree, int);
140
static tree fold_builtin_nan (tree, tree, int);
141
static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142
static bool validate_arg (const_tree, enum tree_code code);
143
static bool integer_valued_real_p (tree);
144
static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145
static bool readonly_data_expr (tree);
146
static rtx expand_builtin_fabs (tree, rtx, rtx);
147
static rtx expand_builtin_signbit (tree, rtx);
148
static tree fold_builtin_sqrt (location_t, tree, tree);
149
static tree fold_builtin_cbrt (location_t, tree, tree);
150
static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151
static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152
static tree fold_builtin_cos (location_t, tree, tree, tree);
153
static tree fold_builtin_cosh (location_t, tree, tree, tree);
154
static tree fold_builtin_tan (tree, tree);
155
static tree fold_builtin_trunc (location_t, tree, tree);
156
static tree fold_builtin_floor (location_t, tree, tree);
157
static tree fold_builtin_ceil (location_t, tree, tree);
158
static tree fold_builtin_round (location_t, tree, tree);
159
static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160
static tree fold_builtin_bitop (tree, tree);
161
static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162
static tree fold_builtin_strchr (location_t, tree, tree, tree);
163
static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164
static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165
static tree fold_builtin_strcmp (location_t, tree, tree);
166
static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167
static tree fold_builtin_signbit (location_t, tree, tree);
168
static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169
static tree fold_builtin_isascii (location_t, tree);
170
static tree fold_builtin_toascii (location_t, tree);
171
static tree fold_builtin_isdigit (location_t, tree);
172
static tree fold_builtin_fabs (location_t, tree, tree);
173
static tree fold_builtin_abs (location_t, tree, tree);
174
static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175
                                        enum tree_code);
176
static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177
static tree fold_builtin_0 (location_t, tree, bool);
178
static tree fold_builtin_1 (location_t, tree, tree, bool);
179
static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180
static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181
static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182
static tree fold_builtin_varargs (location_t, tree, tree, bool);
183
 
184
static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185
static tree fold_builtin_strstr (location_t, tree, tree, tree);
186
static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187
static tree fold_builtin_strcat (location_t, tree, tree);
188
static tree fold_builtin_strncat (location_t, tree, tree, tree);
189
static tree fold_builtin_strspn (location_t, tree, tree);
190
static tree fold_builtin_strcspn (location_t, tree, tree);
191
static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
192
 
193
static rtx expand_builtin_object_size (tree);
194
static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195
                                      enum built_in_function);
196
static void maybe_emit_chk_warning (tree, enum built_in_function);
197
static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198
static void maybe_emit_free_warning (tree);
199
static tree fold_builtin_object_size (tree, tree);
200
static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201
static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202
static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203
static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204
static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205
                                  enum built_in_function);
206
static bool init_target_chars (void);
207
 
208
static unsigned HOST_WIDE_INT target_newline;
209
static unsigned HOST_WIDE_INT target_percent;
210
static unsigned HOST_WIDE_INT target_c;
211
static unsigned HOST_WIDE_INT target_s;
212
static char target_percent_c[3];
213
static char target_percent_s[3];
214
static char target_percent_s_newline[4];
215
static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216
                          const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217
static tree do_mpfr_arg2 (tree, tree, tree,
218
                          int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219
static tree do_mpfr_arg3 (tree, tree, tree, tree,
220
                          int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221
static tree do_mpfr_sincos (tree, tree, tree);
222
static tree do_mpfr_bessel_n (tree, tree, tree,
223
                              int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224
                              const REAL_VALUE_TYPE *, bool);
225
static tree do_mpfr_remquo (tree, tree, tree);
226
static tree do_mpfr_lgamma_r (tree, tree, tree);
227
 
228
/* Return true if NAME starts with __builtin_ or __sync_.  */
229
 
230
bool
231
is_builtin_name (const char *name)
232
{
233
  if (strncmp (name, "__builtin_", 10) == 0)
234
    return true;
235
  if (strncmp (name, "__sync_", 7) == 0)
236
    return true;
237
  return false;
238
}
239
 
240
 
241
/* Return true if DECL is a function symbol representing a built-in.  */
242
 
243
bool
244
is_builtin_fn (tree decl)
245
{
246
  return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
247
}
248
 
249
 
250
/* Return true if NODE should be considered for inline expansion regardless
251
   of the optimization level.  This means whenever a function is invoked with
252
   its "internal" name, which normally contains the prefix "__builtin".  */
253
 
254
static bool
255
called_as_built_in (tree node)
256
{
257
  /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258
     we want the name used to call the function, not the name it
259
     will have. */
260
  const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261
  return is_builtin_name (name);
262
}
263
 
264
/* Return the alignment in bits of EXP, an object.
265
   Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266
   guessed alignment e.g. from type alignment.  */
267
 
268
int
269
get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
270
{
271
  unsigned int inner;
272
 
273
  inner = max_align;
274
  if (handled_component_p (exp))
275
   {
276
      HOST_WIDE_INT bitsize, bitpos;
277
      tree offset;
278
      enum machine_mode mode;
279
      int unsignedp, volatilep;
280
 
281
      exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282
                                 &mode, &unsignedp, &volatilep, true);
283
      if (bitpos)
284
        inner = MIN (inner, (unsigned) (bitpos & -bitpos));
285
      while (offset)
286
        {
287
          tree next_offset;
288
 
289
          if (TREE_CODE (offset) == PLUS_EXPR)
290
            {
291
              next_offset = TREE_OPERAND (offset, 0);
292
              offset = TREE_OPERAND (offset, 1);
293
            }
294
          else
295
            next_offset = NULL;
296
          if (host_integerp (offset, 1))
297
            {
298
              /* Any overflow in calculating offset_bits won't change
299
                 the alignment.  */
300
              unsigned offset_bits
301
                = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
302
 
303
              if (offset_bits)
304
                inner = MIN (inner, (offset_bits & -offset_bits));
305
            }
306
          else if (TREE_CODE (offset) == MULT_EXPR
307
                   && host_integerp (TREE_OPERAND (offset, 1), 1))
308
            {
309
              /* Any overflow in calculating offset_factor won't change
310
                 the alignment.  */
311
              unsigned offset_factor
312
                = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
313
                   * BITS_PER_UNIT);
314
 
315
              if (offset_factor)
316
                inner = MIN (inner, (offset_factor & -offset_factor));
317
            }
318
          else
319
            {
320
              inner = MIN (inner, BITS_PER_UNIT);
321
              break;
322
            }
323
          offset = next_offset;
324
        }
325
    }
326
  if (TREE_CODE (exp) == CONST_DECL)
327
    exp = DECL_INITIAL (exp);
328
  if (DECL_P (exp)
329
      && TREE_CODE (exp) != LABEL_DECL)
330
    align = MIN (inner, DECL_ALIGN (exp));
331
#ifdef CONSTANT_ALIGNMENT
332
  else if (CONSTANT_CLASS_P (exp))
333
    align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
334
#endif
335
  else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
336
           || TREE_CODE (exp) == INDIRECT_REF)
337
    align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
338
  else
339
    align = MIN (align, inner);
340
  return MIN (align, max_align);
341
}
342
 
343
/* Returns true iff we can trust that alignment information has been
344
   calculated properly.  */
345
 
346
bool
347
can_trust_pointer_alignment (void)
348
{
349
  /* We rely on TER to compute accurate alignment information.  */
350
  return (optimize && flag_tree_ter);
351
}
352
 
353
/* Return the alignment in bits of EXP, a pointer valued expression.
354
   But don't return more than MAX_ALIGN no matter what.
355
   The alignment returned is, by default, the alignment of the thing that
356
   EXP points to.  If it is not a POINTER_TYPE, 0 is returned.
357
 
358
   Otherwise, look at the expression to see if we can do better, i.e., if the
359
   expression is actually pointing at an object whose alignment is tighter.  */
360
 
361
int
362
get_pointer_alignment (tree exp, unsigned int max_align)
363
{
364
  unsigned int align, inner;
365
 
366
  if (!can_trust_pointer_alignment ())
367
    return 0;
368
 
369
  if (!POINTER_TYPE_P (TREE_TYPE (exp)))
370
    return 0;
371
 
372
  align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
373
  align = MIN (align, max_align);
374
 
375
  while (1)
376
    {
377
      switch (TREE_CODE (exp))
378
        {
379
        CASE_CONVERT:
380
          exp = TREE_OPERAND (exp, 0);
381
          if (! POINTER_TYPE_P (TREE_TYPE (exp)))
382
            return align;
383
 
384
          inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
385
          align = MIN (inner, max_align);
386
          break;
387
 
388
        case POINTER_PLUS_EXPR:
389
          /* If sum of pointer + int, restrict our maximum alignment to that
390
             imposed by the integer.  If not, we can't do any better than
391
             ALIGN.  */
392
          if (! host_integerp (TREE_OPERAND (exp, 1), 1))
393
            return align;
394
 
395
          while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
396
                  & (max_align / BITS_PER_UNIT - 1))
397
                 != 0)
398
            max_align >>= 1;
399
 
400
          exp = TREE_OPERAND (exp, 0);
401
          break;
402
 
403
        case ADDR_EXPR:
404
          /* See what we are pointing at and look at its alignment.  */
405
          return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
406
 
407
        default:
408
          return align;
409
        }
410
    }
411
}
412
 
413
/* Compute the length of a C string.  TREE_STRING_LENGTH is not the right
414
   way, because it could contain a zero byte in the middle.
415
   TREE_STRING_LENGTH is the size of the character array, not the string.
416
 
417
   ONLY_VALUE should be nonzero if the result is not going to be emitted
418
   into the instruction stream and zero if it is going to be expanded.
419
   E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
420
   is returned, otherwise NULL, since
421
   len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
422
   evaluate the side-effects.
423
 
424
   The value returned is of type `ssizetype'.
425
 
426
   Unfortunately, string_constant can't access the values of const char
427
   arrays with initializers, so neither can we do so here.  */
428
 
429
tree
430
c_strlen (tree src, int only_value)
431
{
432
  tree offset_node;
433
  HOST_WIDE_INT offset;
434
  int max;
435
  const char *ptr;
436
  location_t loc;
437
 
438
  STRIP_NOPS (src);
439
  if (TREE_CODE (src) == COND_EXPR
440
      && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
441
    {
442
      tree len1, len2;
443
 
444
      len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
445
      len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
446
      if (tree_int_cst_equal (len1, len2))
447
        return len1;
448
    }
449
 
450
  if (TREE_CODE (src) == COMPOUND_EXPR
451
      && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
452
    return c_strlen (TREE_OPERAND (src, 1), only_value);
453
 
454
  if (EXPR_HAS_LOCATION (src))
455
    loc = EXPR_LOCATION (src);
456
  else
457
    loc = input_location;
458
 
459
  src = string_constant (src, &offset_node);
460
  if (src == 0)
461
    return NULL_TREE;
462
 
463
  max = TREE_STRING_LENGTH (src) - 1;
464
  ptr = TREE_STRING_POINTER (src);
465
 
466
  if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
467
    {
468
      /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
469
         compute the offset to the following null if we don't know where to
470
         start searching for it.  */
471
      int i;
472
 
473
      for (i = 0; i < max; i++)
474
        if (ptr[i] == 0)
475
          return NULL_TREE;
476
 
477
      /* We don't know the starting offset, but we do know that the string
478
         has no internal zero bytes.  We can assume that the offset falls
479
         within the bounds of the string; otherwise, the programmer deserves
480
         what he gets.  Subtract the offset from the length of the string,
481
         and return that.  This would perhaps not be valid if we were dealing
482
         with named arrays in addition to literal string constants.  */
483
 
484
      return size_diffop_loc (loc, size_int (max), offset_node);
485
    }
486
 
487
  /* We have a known offset into the string.  Start searching there for
488
     a null character if we can represent it as a single HOST_WIDE_INT.  */
489
  if (offset_node == 0)
490
    offset = 0;
491
  else if (! host_integerp (offset_node, 0))
492
    offset = -1;
493
  else
494
    offset = tree_low_cst (offset_node, 0);
495
 
496
  /* If the offset is known to be out of bounds, warn, and call strlen at
497
     runtime.  */
498
  if (offset < 0 || offset > max)
499
    {
500
     /* Suppress multiple warnings for propagated constant strings.  */
501
      if (! TREE_NO_WARNING (src))
502
        {
503
          warning_at (loc, 0, "offset outside bounds of constant string");
504
          TREE_NO_WARNING (src) = 1;
505
        }
506
      return NULL_TREE;
507
    }
508
 
509
  /* Use strlen to search for the first zero byte.  Since any strings
510
     constructed with build_string will have nulls appended, we win even
511
     if we get handed something like (char[4])"abcd".
512
 
513
     Since OFFSET is our starting index into the string, no further
514
     calculation is needed.  */
515
  return ssize_int (strlen (ptr + offset));
516
}
517
 
518
/* Return a char pointer for a C string if it is a string constant
519
   or sum of string constant and integer constant.  */
520
 
521
static const char *
522
c_getstr (tree src)
523
{
524
  tree offset_node;
525
 
526
  src = string_constant (src, &offset_node);
527
  if (src == 0)
528
    return 0;
529
 
530
  if (offset_node == 0)
531
    return TREE_STRING_POINTER (src);
532
  else if (!host_integerp (offset_node, 1)
533
           || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
534
    return 0;
535
 
536
  return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
537
}
538
 
539
/* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
540
   GET_MODE_BITSIZE (MODE) bits from string constant STR.  */
541
 
542
static rtx
543
c_readstr (const char *str, enum machine_mode mode)
544
{
545
  HOST_WIDE_INT c[2];
546
  HOST_WIDE_INT ch;
547
  unsigned int i, j;
548
 
549
  gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
550
 
551
  c[0] = 0;
552
  c[1] = 0;
553
  ch = 1;
554
  for (i = 0; i < GET_MODE_SIZE (mode); i++)
555
    {
556
      j = i;
557
      if (WORDS_BIG_ENDIAN)
558
        j = GET_MODE_SIZE (mode) - i - 1;
559
      if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
560
          && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
561
        j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
562
      j *= BITS_PER_UNIT;
563
      gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
564
 
565
      if (ch)
566
        ch = (unsigned char) str[i];
567
      c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
568
    }
569
  return immed_double_const (c[0], c[1], mode);
570
}
571
 
572
/* Cast a target constant CST to target CHAR and if that value fits into
573
   host char type, return zero and put that value into variable pointed to by
574
   P.  */
575
 
576
static int
577
target_char_cast (tree cst, char *p)
578
{
579
  unsigned HOST_WIDE_INT val, hostval;
580
 
581
  if (!host_integerp (cst, 1)
582
      || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
583
    return 1;
584
 
585
  val = tree_low_cst (cst, 1);
586
  if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
587
    val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
588
 
589
  hostval = val;
590
  if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
591
    hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
592
 
593
  if (val != hostval)
594
    return 1;
595
 
596
  *p = hostval;
597
  return 0;
598
}
599
 
600
/* Similar to save_expr, but assumes that arbitrary code is not executed
601
   in between the multiple evaluations.  In particular, we assume that a
602
   non-addressable local variable will not be modified.  */
603
 
604
static tree
605
builtin_save_expr (tree exp)
606
{
607
  if (TREE_ADDRESSABLE (exp) == 0
608
      && (TREE_CODE (exp) == PARM_DECL
609
          || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
610
    return exp;
611
 
612
  return save_expr (exp);
613
}
614
 
615
/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
616
   times to get the address of either a higher stack frame, or a return
617
   address located within it (depending on FNDECL_CODE).  */
618
 
619
static rtx
620
expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
621
{
622
  int i;
623
 
624
#ifdef INITIAL_FRAME_ADDRESS_RTX
625
  rtx tem = INITIAL_FRAME_ADDRESS_RTX;
626
#else
627
  rtx tem;
628
 
629
  /* For a zero count with __builtin_return_address, we don't care what
630
     frame address we return, because target-specific definitions will
631
     override us.  Therefore frame pointer elimination is OK, and using
632
     the soft frame pointer is OK.
633
 
634
     For a nonzero count, or a zero count with __builtin_frame_address,
635
     we require a stable offset from the current frame pointer to the
636
     previous one, so we must use the hard frame pointer, and
637
     we must disable frame pointer elimination.  */
638
  if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
639
    tem = frame_pointer_rtx;
640
  else
641
    {
642
      tem = hard_frame_pointer_rtx;
643
 
644
      /* Tell reload not to eliminate the frame pointer.  */
645
      crtl->accesses_prior_frames = 1;
646
    }
647
#endif
648
 
649
  /* Some machines need special handling before we can access
650
     arbitrary frames.  For example, on the SPARC, we must first flush
651
     all register windows to the stack.  */
652
#ifdef SETUP_FRAME_ADDRESSES
653
  if (count > 0)
654
    SETUP_FRAME_ADDRESSES ();
655
#endif
656
 
657
  /* On the SPARC, the return address is not in the frame, it is in a
658
     register.  There is no way to access it off of the current frame
659
     pointer, but it can be accessed off the previous frame pointer by
660
     reading the value from the register window save area.  */
661
#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
662
  if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
663
    count--;
664
#endif
665
 
666
  /* Scan back COUNT frames to the specified frame.  */
667
  for (i = 0; i < count; i++)
668
    {
669
      /* Assume the dynamic chain pointer is in the word that the
670
         frame address points to, unless otherwise specified.  */
671
#ifdef DYNAMIC_CHAIN_ADDRESS
672
      tem = DYNAMIC_CHAIN_ADDRESS (tem);
673
#endif
674
      tem = memory_address (Pmode, tem);
675
      tem = gen_frame_mem (Pmode, tem);
676
      tem = copy_to_reg (tem);
677
    }
678
 
679
  /* For __builtin_frame_address, return what we've got.  But, on
680
     the SPARC for example, we may have to add a bias.  */
681
  if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
682
#ifdef FRAME_ADDR_RTX
683
    return FRAME_ADDR_RTX (tem);
684
#else
685
    return tem;
686
#endif
687
 
688
  /* For __builtin_return_address, get the return address from that frame.  */
689
#ifdef RETURN_ADDR_RTX
690
  tem = RETURN_ADDR_RTX (count, tem);
691
#else
692
  tem = memory_address (Pmode,
693
                        plus_constant (tem, GET_MODE_SIZE (Pmode)));
694
  tem = gen_frame_mem (Pmode, tem);
695
#endif
696
  return tem;
697
}
698
 
699
/* Alias set used for setjmp buffer.  */
700
static alias_set_type setjmp_alias_set = -1;
701
 
702
/* Construct the leading half of a __builtin_setjmp call.  Control will
703
   return to RECEIVER_LABEL.  This is also called directly by the SJLJ
704
   exception handling code.  */
705
 
706
void
707
expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
708
{
709
  enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
710
  rtx stack_save;
711
  rtx mem;
712
 
713
  if (setjmp_alias_set == -1)
714
    setjmp_alias_set = new_alias_set ();
715
 
716
  buf_addr = convert_memory_address (Pmode, buf_addr);
717
 
718
  buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
719
 
720
  /* We store the frame pointer and the address of receiver_label in
721
     the buffer and use the rest of it for the stack save area, which
722
     is machine-dependent.  */
723
 
724
  mem = gen_rtx_MEM (Pmode, buf_addr);
725
  set_mem_alias_set (mem, setjmp_alias_set);
726
  emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
727
 
728
  mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
729
  set_mem_alias_set (mem, setjmp_alias_set);
730
 
731
  emit_move_insn (validize_mem (mem),
732
                  force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
733
 
734
  stack_save = gen_rtx_MEM (sa_mode,
735
                            plus_constant (buf_addr,
736
                                           2 * GET_MODE_SIZE (Pmode)));
737
  set_mem_alias_set (stack_save, setjmp_alias_set);
738
  emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
739
 
740
  /* If there is further processing to do, do it.  */
741
#ifdef HAVE_builtin_setjmp_setup
742
  if (HAVE_builtin_setjmp_setup)
743
    emit_insn (gen_builtin_setjmp_setup (buf_addr));
744
#endif
745
 
746
  /* Tell optimize_save_area_alloca that extra work is going to
747
     need to go on during alloca.  */
748
  cfun->calls_setjmp = 1;
749
 
750
  /* We have a nonlocal label.   */
751
  cfun->has_nonlocal_label = 1;
752
}
753
 
754
/* Construct the trailing part of a __builtin_setjmp call.  This is
755
   also called directly by the SJLJ exception handling code.  */
756
 
757
void
758
expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
759
{
760
  rtx chain;
761
 
762
  /* Clobber the FP when we get here, so we have to make sure it's
763
     marked as used by this function.  */
764
  emit_use (hard_frame_pointer_rtx);
765
 
766
  /* Mark the static chain as clobbered here so life information
767
     doesn't get messed up for it.  */
768
  chain = targetm.calls.static_chain (current_function_decl, true);
769
  if (chain && REG_P (chain))
770
    emit_clobber (chain);
771
 
772
  /* Now put in the code to restore the frame pointer, and argument
773
     pointer, if needed.  */
774
#ifdef HAVE_nonlocal_goto
775
  if (! HAVE_nonlocal_goto)
776
#endif
777
    {
778
      emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
779
      /* This might change the hard frame pointer in ways that aren't
780
         apparent to early optimization passes, so force a clobber.  */
781
      emit_clobber (hard_frame_pointer_rtx);
782
    }
783
 
784
#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
785
  if (fixed_regs[ARG_POINTER_REGNUM])
786
    {
787
#ifdef ELIMINABLE_REGS
788
      size_t i;
789
      static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
790
 
791
      for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
792
        if (elim_regs[i].from == ARG_POINTER_REGNUM
793
            && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
794
          break;
795
 
796
      if (i == ARRAY_SIZE (elim_regs))
797
#endif
798
        {
799
          /* Now restore our arg pointer from the address at which it
800
             was saved in our stack frame.  */
801
          emit_move_insn (crtl->args.internal_arg_pointer,
802
                          copy_to_reg (get_arg_pointer_save_area ()));
803
        }
804
    }
805
#endif
806
 
807
#ifdef HAVE_builtin_setjmp_receiver
808
  if (HAVE_builtin_setjmp_receiver)
809
    emit_insn (gen_builtin_setjmp_receiver (receiver_label));
810
  else
811
#endif
812
#ifdef HAVE_nonlocal_goto_receiver
813
    if (HAVE_nonlocal_goto_receiver)
814
      emit_insn (gen_nonlocal_goto_receiver ());
815
    else
816
#endif
817
      { /* Nothing */ }
818
 
819
  /* We must not allow the code we just generated to be reordered by
820
     scheduling.  Specifically, the update of the frame pointer must
821
     happen immediately, not later.  */
822
  emit_insn (gen_blockage ());
823
}
824
 
825
/* __builtin_longjmp is passed a pointer to an array of five words (not
826
   all will be used on all machines).  It operates similarly to the C
827
   library function of the same name, but is more efficient.  Much of
828
   the code below is copied from the handling of non-local gotos.  */
829
 
830
static void
831
expand_builtin_longjmp (rtx buf_addr, rtx value)
832
{
833
  rtx fp, lab, stack, insn, last;
834
  enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
835
 
836
  /* DRAP is needed for stack realign if longjmp is expanded to current
837
     function  */
838
  if (SUPPORTS_STACK_ALIGNMENT)
839
    crtl->need_drap = true;
840
 
841
  if (setjmp_alias_set == -1)
842
    setjmp_alias_set = new_alias_set ();
843
 
844
  buf_addr = convert_memory_address (Pmode, buf_addr);
845
 
846
  buf_addr = force_reg (Pmode, buf_addr);
847
 
848
  /* We require that the user must pass a second argument of 1, because
849
     that is what builtin_setjmp will return.  */
850
  gcc_assert (value == const1_rtx);
851
 
852
  last = get_last_insn ();
853
#ifdef HAVE_builtin_longjmp
854
  if (HAVE_builtin_longjmp)
855
    emit_insn (gen_builtin_longjmp (buf_addr));
856
  else
857
#endif
858
    {
859
      fp = gen_rtx_MEM (Pmode, buf_addr);
860
      lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
861
                                               GET_MODE_SIZE (Pmode)));
862
 
863
      stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
864
                                                   2 * GET_MODE_SIZE (Pmode)));
865
      set_mem_alias_set (fp, setjmp_alias_set);
866
      set_mem_alias_set (lab, setjmp_alias_set);
867
      set_mem_alias_set (stack, setjmp_alias_set);
868
 
869
      /* Pick up FP, label, and SP from the block and jump.  This code is
870
         from expand_goto in stmt.c; see there for detailed comments.  */
871
#ifdef HAVE_nonlocal_goto
872
      if (HAVE_nonlocal_goto)
873
        /* We have to pass a value to the nonlocal_goto pattern that will
874
           get copied into the static_chain pointer, but it does not matter
875
           what that value is, because builtin_setjmp does not use it.  */
876
        emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
877
      else
878
#endif
879
        {
880
          lab = copy_to_reg (lab);
881
 
882
          emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
883
          emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
884
 
885
          emit_move_insn (hard_frame_pointer_rtx, fp);
886
          emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
887
 
888
          emit_use (hard_frame_pointer_rtx);
889
          emit_use (stack_pointer_rtx);
890
          emit_indirect_jump (lab);
891
        }
892
    }
893
 
894
  /* Search backwards and mark the jump insn as a non-local goto.
895
     Note that this precludes the use of __builtin_longjmp to a
896
     __builtin_setjmp target in the same function.  However, we've
897
     already cautioned the user that these functions are for
898
     internal exception handling use only.  */
899
  for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
900
    {
901
      gcc_assert (insn != last);
902
 
903
      if (JUMP_P (insn))
904
        {
905
          add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
906
          break;
907
        }
908
      else if (CALL_P (insn))
909
        break;
910
    }
911
}
912
 
913
/* Expand a call to __builtin_nonlocal_goto.  We're passed the target label
914
   and the address of the save area.  */
915
 
916
static rtx
917
expand_builtin_nonlocal_goto (tree exp)
918
{
919
  tree t_label, t_save_area;
920
  rtx r_label, r_save_area, r_fp, r_sp, insn;
921
 
922
  if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
923
    return NULL_RTX;
924
 
925
  t_label = CALL_EXPR_ARG (exp, 0);
926
  t_save_area = CALL_EXPR_ARG (exp, 1);
927
 
928
  r_label = expand_normal (t_label);
929
  r_label = convert_memory_address (Pmode, r_label);
930
  r_save_area = expand_normal (t_save_area);
931
  r_save_area = convert_memory_address (Pmode, r_save_area);
932
  /* Copy the address of the save location to a register just in case it was based
933
    on the frame pointer.   */
934
  r_save_area = copy_to_reg (r_save_area);
935
  r_fp = gen_rtx_MEM (Pmode, r_save_area);
936
  r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
937
                      plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
938
 
939
  crtl->has_nonlocal_goto = 1;
940
 
941
#ifdef HAVE_nonlocal_goto
942
  /* ??? We no longer need to pass the static chain value, afaik.  */
943
  if (HAVE_nonlocal_goto)
944
    emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
945
  else
946
#endif
947
    {
948
      r_label = copy_to_reg (r_label);
949
 
950
      emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951
      emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
952
 
953
      /* Restore frame pointer for containing function.
954
         This sets the actual hard register used for the frame pointer
955
         to the location of the function's incoming static chain info.
956
         The non-local goto handler will then adjust it to contain the
957
         proper value and reload the argument pointer, if needed.  */
958
      emit_move_insn (hard_frame_pointer_rtx, r_fp);
959
      emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
960
 
961
      /* USE of hard_frame_pointer_rtx added for consistency;
962
         not clear if really needed.  */
963
      emit_use (hard_frame_pointer_rtx);
964
      emit_use (stack_pointer_rtx);
965
 
966
      /* If the architecture is using a GP register, we must
967
         conservatively assume that the target function makes use of it.
968
         The prologue of functions with nonlocal gotos must therefore
969
         initialize the GP register to the appropriate value, and we
970
         must then make sure that this value is live at the point
971
         of the jump.  (Note that this doesn't necessarily apply
972
         to targets with a nonlocal_goto pattern; they are free
973
         to implement it in their own way.  Note also that this is
974
         a no-op if the GP register is a global invariant.)  */
975
      if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
976
          && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
977
        emit_use (pic_offset_table_rtx);
978
 
979
      emit_indirect_jump (r_label);
980
    }
981
 
982
  /* Search backwards to the jump insn and mark it as a
983
     non-local goto.  */
984
  for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
985
    {
986
      if (JUMP_P (insn))
987
        {
988
          add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
989
          break;
990
        }
991
      else if (CALL_P (insn))
992
        break;
993
    }
994
 
995
  return const0_rtx;
996
}
997
 
998
/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
999
   (not all will be used on all machines) that was passed to __builtin_setjmp.
1000
   It updates the stack pointer in that block to correspond to the current
1001
   stack pointer.  */
1002
 
1003
static void
1004
expand_builtin_update_setjmp_buf (rtx buf_addr)
1005
{
1006
  enum machine_mode sa_mode = Pmode;
1007
  rtx stack_save;
1008
 
1009
 
1010
#ifdef HAVE_save_stack_nonlocal
1011
  if (HAVE_save_stack_nonlocal)
1012
    sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1013
#endif
1014
#ifdef STACK_SAVEAREA_MODE
1015
  sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1016
#endif
1017
 
1018
  stack_save
1019
    = gen_rtx_MEM (sa_mode,
1020
                   memory_address
1021
                   (sa_mode,
1022
                    plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1023
 
1024
#ifdef HAVE_setjmp
1025
  if (HAVE_setjmp)
1026
    emit_insn (gen_setjmp ());
1027
#endif
1028
 
1029
  emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1030
}
1031
 
1032
/* Expand a call to __builtin_prefetch.  For a target that does not support
1033
   data prefetch, evaluate the memory address argument in case it has side
1034
   effects.  */
1035
 
1036
static void
1037
expand_builtin_prefetch (tree exp)
1038
{
1039
  tree arg0, arg1, arg2;
1040
  int nargs;
1041
  rtx op0, op1, op2;
1042
 
1043
  if (!validate_arglist (exp, POINTER_TYPE, 0))
1044
    return;
1045
 
1046
  arg0 = CALL_EXPR_ARG (exp, 0);
1047
 
1048
  /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1049
     zero (read) and argument 2 (locality) defaults to 3 (high degree of
1050
     locality).  */
1051
  nargs = call_expr_nargs (exp);
1052
  if (nargs > 1)
1053
    arg1 = CALL_EXPR_ARG (exp, 1);
1054
  else
1055
    arg1 = integer_zero_node;
1056
  if (nargs > 2)
1057
    arg2 = CALL_EXPR_ARG (exp, 2);
1058
  else
1059
    arg2 = build_int_cst (NULL_TREE, 3);
1060
 
1061
  /* Argument 0 is an address.  */
1062
  op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1063
 
1064
  /* Argument 1 (read/write flag) must be a compile-time constant int.  */
1065
  if (TREE_CODE (arg1) != INTEGER_CST)
1066
    {
1067
      error ("second argument to %<__builtin_prefetch%> must be a constant");
1068
      arg1 = integer_zero_node;
1069
    }
1070
  op1 = expand_normal (arg1);
1071
  /* Argument 1 must be either zero or one.  */
1072
  if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1073
    {
1074
      warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1075
               " using zero");
1076
      op1 = const0_rtx;
1077
    }
1078
 
1079
  /* Argument 2 (locality) must be a compile-time constant int.  */
1080
  if (TREE_CODE (arg2) != INTEGER_CST)
1081
    {
1082
      error ("third argument to %<__builtin_prefetch%> must be a constant");
1083
      arg2 = integer_zero_node;
1084
    }
1085
  op2 = expand_normal (arg2);
1086
  /* Argument 2 must be 0, 1, 2, or 3.  */
1087
  if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1088
    {
1089
      warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1090
      op2 = const0_rtx;
1091
    }
1092
 
1093
#ifdef HAVE_prefetch
1094
  if (HAVE_prefetch)
1095
    {
1096
      if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1097
             (op0,
1098
              insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1099
          || (GET_MODE (op0) != Pmode))
1100
        {
1101
          op0 = convert_memory_address (Pmode, op0);
1102
          op0 = force_reg (Pmode, op0);
1103
        }
1104
      emit_insn (gen_prefetch (op0, op1, op2));
1105
    }
1106
#endif
1107
 
1108
  /* Don't do anything with direct references to volatile memory, but
1109
     generate code to handle other side effects.  */
1110
  if (!MEM_P (op0) && side_effects_p (op0))
1111
    emit_insn (op0);
1112
}
1113
 
1114
/* Get a MEM rtx for expression EXP which is the address of an operand
1115
   to be used in a string instruction (cmpstrsi, movmemsi, ..).  LEN is
1116
   the maximum length of the block of memory that might be accessed or
1117
   NULL if unknown.  */
1118
 
1119
static rtx
1120
get_memory_rtx (tree exp, tree len)
1121
{
1122
  tree orig_exp = exp;
1123
  rtx addr, mem;
1124
  HOST_WIDE_INT off;
1125
 
1126
  /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1127
     from its expression, for expr->a.b only <variable>.a.b is recorded.  */
1128
  if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1129
    exp = TREE_OPERAND (exp, 0);
1130
 
1131
  addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1132
  mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1133
 
1134
  /* Get an expression we can use to find the attributes to assign to MEM.
1135
     If it is an ADDR_EXPR, use the operand.  Otherwise, dereference it if
1136
     we can.  First remove any nops.  */
1137
  while (CONVERT_EXPR_P (exp)
1138
         && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1139
    exp = TREE_OPERAND (exp, 0);
1140
 
1141
  off = 0;
1142
  if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1143
      && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1144
      && host_integerp (TREE_OPERAND (exp, 1), 0)
1145
      && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1146
    exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1147
  else if (TREE_CODE (exp) == ADDR_EXPR)
1148
    exp = TREE_OPERAND (exp, 0);
1149
  else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1150
    exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1151
  else
1152
    exp = NULL;
1153
 
1154
  /* Honor attributes derived from exp, except for the alias set
1155
     (as builtin stringops may alias with anything) and the size
1156
     (as stringops may access multiple array elements).  */
1157
  if (exp)
1158
    {
1159
      set_mem_attributes (mem, exp, 0);
1160
 
1161
      if (off)
1162
        mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1163
 
1164
      /* Allow the string and memory builtins to overflow from one
1165
         field into another, see http://gcc.gnu.org/PR23561.
1166
         Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1167
         memory accessed by the string or memory builtin will fit
1168
         within the field.  */
1169
      if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1170
        {
1171
          tree mem_expr = MEM_EXPR (mem);
1172
          HOST_WIDE_INT offset = -1, length = -1;
1173
          tree inner = exp;
1174
 
1175
          while (TREE_CODE (inner) == ARRAY_REF
1176
                 || CONVERT_EXPR_P (inner)
1177
                 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1178
                 || TREE_CODE (inner) == SAVE_EXPR)
1179
            inner = TREE_OPERAND (inner, 0);
1180
 
1181
          gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1182
 
1183
          if (MEM_OFFSET (mem)
1184
              && CONST_INT_P (MEM_OFFSET (mem)))
1185
            offset = INTVAL (MEM_OFFSET (mem));
1186
 
1187
          if (offset >= 0 && len && host_integerp (len, 0))
1188
            length = tree_low_cst (len, 0);
1189
 
1190
          while (TREE_CODE (inner) == COMPONENT_REF)
1191
            {
1192
              tree field = TREE_OPERAND (inner, 1);
1193
              gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1194
              gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1195
 
1196
              /* Bitfields are generally not byte-addressable.  */
1197
              gcc_assert (!DECL_BIT_FIELD (field)
1198
                          || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199
                               % BITS_PER_UNIT) == 0
1200
                              && host_integerp (DECL_SIZE (field), 0)
1201
                              && (TREE_INT_CST_LOW (DECL_SIZE (field))
1202
                                  % BITS_PER_UNIT) == 0));
1203
 
1204
              /* If we can prove that the memory starting at XEXP (mem, 0) and
1205
                 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1206
                 can keep the COMPONENT_REF in MEM_EXPR.  But be careful with
1207
                 fields without DECL_SIZE_UNIT like flexible array members.  */
1208
              if (length >= 0
1209
                  && DECL_SIZE_UNIT (field)
1210
                  && host_integerp (DECL_SIZE_UNIT (field), 0))
1211
                {
1212
                  HOST_WIDE_INT size
1213
                    = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1214
                  if (offset <= size
1215
                      && length <= size
1216
                      && offset + length <= size)
1217
                    break;
1218
                }
1219
 
1220
              if (offset >= 0
1221
                  && host_integerp (DECL_FIELD_OFFSET (field), 0))
1222
                offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1223
                          + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1224
                            / BITS_PER_UNIT;
1225
              else
1226
                {
1227
                  offset = -1;
1228
                  length = -1;
1229
                }
1230
 
1231
              mem_expr = TREE_OPERAND (mem_expr, 0);
1232
              inner = TREE_OPERAND (inner, 0);
1233
            }
1234
 
1235
          if (mem_expr == NULL)
1236
            offset = -1;
1237
          if (mem_expr != MEM_EXPR (mem))
1238
            {
1239
              set_mem_expr (mem, mem_expr);
1240
              set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1241
            }
1242
        }
1243
      set_mem_alias_set (mem, 0);
1244
      set_mem_size (mem, NULL_RTX);
1245
    }
1246
 
1247
  return mem;
1248
}
1249
 
1250
/* Built-in functions to perform an untyped call and return.  */
1251
 
1252
/* For each register that may be used for calling a function, this
1253
   gives a mode used to copy the register's value.  VOIDmode indicates
1254
   the register is not used for calling a function.  If the machine
1255
   has register windows, this gives only the outbound registers.
1256
   INCOMING_REGNO gives the corresponding inbound register.  */
1257
static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1258
 
1259
/* For each register that may be used for returning values, this gives
1260
   a mode used to copy the register's value.  VOIDmode indicates the
1261
   register is not used for returning values.  If the machine has
1262
   register windows, this gives only the outbound registers.
1263
   INCOMING_REGNO gives the corresponding inbound register.  */
1264
static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1265
 
1266
/* Return the size required for the block returned by __builtin_apply_args,
1267
   and initialize apply_args_mode.  */
1268
 
1269
static int
1270
apply_args_size (void)
1271
{
1272
  static int size = -1;
1273
  int align;
1274
  unsigned int regno;
1275
  enum machine_mode mode;
1276
 
1277
  /* The values computed by this function never change.  */
1278
  if (size < 0)
1279
    {
1280
      /* The first value is the incoming arg-pointer.  */
1281
      size = GET_MODE_SIZE (Pmode);
1282
 
1283
      /* The second value is the structure value address unless this is
1284
         passed as an "invisible" first argument.  */
1285
      if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1286
        size += GET_MODE_SIZE (Pmode);
1287
 
1288
      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1289
        if (FUNCTION_ARG_REGNO_P (regno))
1290
          {
1291
            mode = reg_raw_mode[regno];
1292
 
1293
            gcc_assert (mode != VOIDmode);
1294
 
1295
            align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1296
            if (size % align != 0)
1297
              size = CEIL (size, align) * align;
1298
            size += GET_MODE_SIZE (mode);
1299
            apply_args_mode[regno] = mode;
1300
          }
1301
        else
1302
          {
1303
            apply_args_mode[regno] = VOIDmode;
1304
          }
1305
    }
1306
  return size;
1307
}
1308
 
1309
/* Return the size required for the block returned by __builtin_apply,
1310
   and initialize apply_result_mode.  */
1311
 
1312
static int
1313
apply_result_size (void)
1314
{
1315
  static int size = -1;
1316
  int align, regno;
1317
  enum machine_mode mode;
1318
 
1319
  /* The values computed by this function never change.  */
1320
  if (size < 0)
1321
    {
1322
      size = 0;
1323
 
1324
      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325
        if (FUNCTION_VALUE_REGNO_P (regno))
1326
          {
1327
            mode = reg_raw_mode[regno];
1328
 
1329
            gcc_assert (mode != VOIDmode);
1330
 
1331
            align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1332
            if (size % align != 0)
1333
              size = CEIL (size, align) * align;
1334
            size += GET_MODE_SIZE (mode);
1335
            apply_result_mode[regno] = mode;
1336
          }
1337
        else
1338
          apply_result_mode[regno] = VOIDmode;
1339
 
1340
      /* Allow targets that use untyped_call and untyped_return to override
1341
         the size so that machine-specific information can be stored here.  */
1342
#ifdef APPLY_RESULT_SIZE
1343
      size = APPLY_RESULT_SIZE;
1344
#endif
1345
    }
1346
  return size;
1347
}
1348
 
1349
#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1350
/* Create a vector describing the result block RESULT.  If SAVEP is true,
1351
   the result block is used to save the values; otherwise it is used to
1352
   restore the values.  */
1353
 
1354
static rtx
1355
result_vector (int savep, rtx result)
1356
{
1357
  int regno, size, align, nelts;
1358
  enum machine_mode mode;
1359
  rtx reg, mem;
1360
  rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1361
 
1362
  size = nelts = 0;
1363
  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1364
    if ((mode = apply_result_mode[regno]) != VOIDmode)
1365
      {
1366
        align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1367
        if (size % align != 0)
1368
          size = CEIL (size, align) * align;
1369
        reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1370
        mem = adjust_address (result, mode, size);
1371
        savevec[nelts++] = (savep
1372
                            ? gen_rtx_SET (VOIDmode, mem, reg)
1373
                            : gen_rtx_SET (VOIDmode, reg, mem));
1374
        size += GET_MODE_SIZE (mode);
1375
      }
1376
  return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1377
}
1378
#endif /* HAVE_untyped_call or HAVE_untyped_return */
1379
 
1380
/* Save the state required to perform an untyped call with the same
1381
   arguments as were passed to the current function.  */
1382
 
1383
static rtx
1384
expand_builtin_apply_args_1 (void)
1385
{
1386
  rtx registers, tem;
1387
  int size, align, regno;
1388
  enum machine_mode mode;
1389
  rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1390
 
1391
  /* Create a block where the arg-pointer, structure value address,
1392
     and argument registers can be saved.  */
1393
  registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1394
 
1395
  /* Walk past the arg-pointer and structure value address.  */
1396
  size = GET_MODE_SIZE (Pmode);
1397
  if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1398
    size += GET_MODE_SIZE (Pmode);
1399
 
1400
  /* Save each register used in calling a function to the block.  */
1401
  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402
    if ((mode = apply_args_mode[regno]) != VOIDmode)
1403
      {
1404
        align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1405
        if (size % align != 0)
1406
          size = CEIL (size, align) * align;
1407
 
1408
        tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1409
 
1410
        emit_move_insn (adjust_address (registers, mode, size), tem);
1411
        size += GET_MODE_SIZE (mode);
1412
      }
1413
 
1414
  /* Save the arg pointer to the block.  */
1415
  tem = copy_to_reg (crtl->args.internal_arg_pointer);
1416
#ifdef STACK_GROWS_DOWNWARD
1417
  /* We need the pointer as the caller actually passed them to us, not
1418
     as we might have pretended they were passed.  Make sure it's a valid
1419
     operand, as emit_move_insn isn't expected to handle a PLUS.  */
1420
  tem
1421
    = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1422
                     NULL_RTX);
1423
#endif
1424
  emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1425
 
1426
  size = GET_MODE_SIZE (Pmode);
1427
 
1428
  /* Save the structure value address unless this is passed as an
1429
     "invisible" first argument.  */
1430
  if (struct_incoming_value)
1431
    {
1432
      emit_move_insn (adjust_address (registers, Pmode, size),
1433
                      copy_to_reg (struct_incoming_value));
1434
      size += GET_MODE_SIZE (Pmode);
1435
    }
1436
 
1437
  /* Return the address of the block.  */
1438
  return copy_addr_to_reg (XEXP (registers, 0));
1439
}
1440
 
1441
/* __builtin_apply_args returns block of memory allocated on
1442
   the stack into which is stored the arg pointer, structure
1443
   value address, static chain, and all the registers that might
1444
   possibly be used in performing a function call.  The code is
1445
   moved to the start of the function so the incoming values are
1446
   saved.  */
1447
 
1448
static rtx
1449
expand_builtin_apply_args (void)
1450
{
1451
  /* Don't do __builtin_apply_args more than once in a function.
1452
     Save the result of the first call and reuse it.  */
1453
  if (apply_args_value != 0)
1454
    return apply_args_value;
1455
  {
1456
    /* When this function is called, it means that registers must be
1457
       saved on entry to this function.  So we migrate the
1458
       call to the first insn of this function.  */
1459
    rtx temp;
1460
    rtx seq;
1461
 
1462
    start_sequence ();
1463
    temp = expand_builtin_apply_args_1 ();
1464
    seq = get_insns ();
1465
    end_sequence ();
1466
 
1467
    apply_args_value = temp;
1468
 
1469
    /* Put the insns after the NOTE that starts the function.
1470
       If this is inside a start_sequence, make the outer-level insn
1471
       chain current, so the code is placed at the start of the
1472
       function.  If internal_arg_pointer is a non-virtual pseudo,
1473
       it needs to be placed after the function that initializes
1474
       that pseudo.  */
1475
    push_topmost_sequence ();
1476
    if (REG_P (crtl->args.internal_arg_pointer)
1477
        && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1478
      emit_insn_before (seq, parm_birth_insn);
1479
    else
1480
      emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1481
    pop_topmost_sequence ();
1482
    return temp;
1483
  }
1484
}
1485
 
1486
/* Perform an untyped call and save the state required to perform an
1487
   untyped return of whatever value was returned by the given function.  */
1488
 
1489
static rtx
1490
expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1491
{
1492
  int size, align, regno;
1493
  enum machine_mode mode;
1494
  rtx incoming_args, result, reg, dest, src, call_insn;
1495
  rtx old_stack_level = 0;
1496
  rtx call_fusage = 0;
1497
  rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1498
 
1499
  arguments = convert_memory_address (Pmode, arguments);
1500
 
1501
  /* Create a block where the return registers can be saved.  */
1502
  result = assign_stack_local (BLKmode, apply_result_size (), -1);
1503
 
1504
  /* Fetch the arg pointer from the ARGUMENTS block.  */
1505
  incoming_args = gen_reg_rtx (Pmode);
1506
  emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1507
#ifndef STACK_GROWS_DOWNWARD
1508
  incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1509
                                       incoming_args, 0, OPTAB_LIB_WIDEN);
1510
#endif
1511
 
1512
  /* Push a new argument block and copy the arguments.  Do not allow
1513
     the (potential) memcpy call below to interfere with our stack
1514
     manipulations.  */
1515
  do_pending_stack_adjust ();
1516
  NO_DEFER_POP;
1517
 
1518
  /* Save the stack with nonlocal if available.  */
1519
#ifdef HAVE_save_stack_nonlocal
1520
  if (HAVE_save_stack_nonlocal)
1521
    emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1522
  else
1523
#endif
1524
    emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1525
 
1526
  /* Allocate a block of memory onto the stack and copy the memory
1527
     arguments to the outgoing arguments address.  */
1528
  allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1529
 
1530
  /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1531
     may have already set current_function_calls_alloca to true.
1532
     current_function_calls_alloca won't be set if argsize is zero,
1533
     so we have to guarantee need_drap is true here.  */
1534
  if (SUPPORTS_STACK_ALIGNMENT)
1535
    crtl->need_drap = true;
1536
 
1537
  dest = virtual_outgoing_args_rtx;
1538
#ifndef STACK_GROWS_DOWNWARD
1539
  if (CONST_INT_P (argsize))
1540
    dest = plus_constant (dest, -INTVAL (argsize));
1541
  else
1542
    dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1543
#endif
1544
  dest = gen_rtx_MEM (BLKmode, dest);
1545
  set_mem_align (dest, PARM_BOUNDARY);
1546
  src = gen_rtx_MEM (BLKmode, incoming_args);
1547
  set_mem_align (src, PARM_BOUNDARY);
1548
  emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1549
 
1550
  /* Refer to the argument block.  */
1551
  apply_args_size ();
1552
  arguments = gen_rtx_MEM (BLKmode, arguments);
1553
  set_mem_align (arguments, PARM_BOUNDARY);
1554
 
1555
  /* Walk past the arg-pointer and structure value address.  */
1556
  size = GET_MODE_SIZE (Pmode);
1557
  if (struct_value)
1558
    size += GET_MODE_SIZE (Pmode);
1559
 
1560
  /* Restore each of the registers previously saved.  Make USE insns
1561
     for each of these registers for use in making the call.  */
1562
  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1563
    if ((mode = apply_args_mode[regno]) != VOIDmode)
1564
      {
1565
        align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1566
        if (size % align != 0)
1567
          size = CEIL (size, align) * align;
1568
        reg = gen_rtx_REG (mode, regno);
1569
        emit_move_insn (reg, adjust_address (arguments, mode, size));
1570
        use_reg (&call_fusage, reg);
1571
        size += GET_MODE_SIZE (mode);
1572
      }
1573
 
1574
  /* Restore the structure value address unless this is passed as an
1575
     "invisible" first argument.  */
1576
  size = GET_MODE_SIZE (Pmode);
1577
  if (struct_value)
1578
    {
1579
      rtx value = gen_reg_rtx (Pmode);
1580
      emit_move_insn (value, adjust_address (arguments, Pmode, size));
1581
      emit_move_insn (struct_value, value);
1582
      if (REG_P (struct_value))
1583
        use_reg (&call_fusage, struct_value);
1584
      size += GET_MODE_SIZE (Pmode);
1585
    }
1586
 
1587
  /* All arguments and registers used for the call are set up by now!  */
1588
  function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1589
 
1590
  /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
1591
     and we don't want to load it into a register as an optimization,
1592
     because prepare_call_address already did it if it should be done.  */
1593
  if (GET_CODE (function) != SYMBOL_REF)
1594
    function = memory_address (FUNCTION_MODE, function);
1595
 
1596
  /* Generate the actual call instruction and save the return value.  */
1597
#ifdef HAVE_untyped_call
1598
  if (HAVE_untyped_call)
1599
    emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1600
                                      result, result_vector (1, result)));
1601
  else
1602
#endif
1603
#ifdef HAVE_call_value
1604
  if (HAVE_call_value)
1605
    {
1606
      rtx valreg = 0;
1607
 
1608
      /* Locate the unique return register.  It is not possible to
1609
         express a call that sets more than one return register using
1610
         call_value; use untyped_call for that.  In fact, untyped_call
1611
         only needs to save the return registers in the given block.  */
1612
      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1613
        if ((mode = apply_result_mode[regno]) != VOIDmode)
1614
          {
1615
            gcc_assert (!valreg); /* HAVE_untyped_call required.  */
1616
 
1617
            valreg = gen_rtx_REG (mode, regno);
1618
          }
1619
 
1620
      emit_call_insn (GEN_CALL_VALUE (valreg,
1621
                                      gen_rtx_MEM (FUNCTION_MODE, function),
1622
                                      const0_rtx, NULL_RTX, const0_rtx));
1623
 
1624
      emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1625
    }
1626
  else
1627
#endif
1628
    gcc_unreachable ();
1629
 
1630
  /* Find the CALL insn we just emitted, and attach the register usage
1631
     information.  */
1632
  call_insn = last_call_insn ();
1633
  add_function_usage_to (call_insn, call_fusage);
1634
 
1635
  /* Restore the stack.  */
1636
#ifdef HAVE_save_stack_nonlocal
1637
  if (HAVE_save_stack_nonlocal)
1638
    emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1639
  else
1640
#endif
1641
    emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1642
 
1643
  OK_DEFER_POP;
1644
 
1645
  /* Return the address of the result block.  */
1646
  result = copy_addr_to_reg (XEXP (result, 0));
1647
  return convert_memory_address (ptr_mode, result);
1648
}
1649
 
1650
/* Perform an untyped return.  */
1651
 
1652
static void
1653
expand_builtin_return (rtx result)
1654
{
1655
  int size, align, regno;
1656
  enum machine_mode mode;
1657
  rtx reg;
1658
  rtx call_fusage = 0;
1659
 
1660
  result = convert_memory_address (Pmode, result);
1661
 
1662
  apply_result_size ();
1663
  result = gen_rtx_MEM (BLKmode, result);
1664
 
1665
#ifdef HAVE_untyped_return
1666
  if (HAVE_untyped_return)
1667
    {
1668
      emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1669
      emit_barrier ();
1670
      return;
1671
    }
1672
#endif
1673
 
1674
  /* Restore the return value and note that each value is used.  */
1675
  size = 0;
1676
  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677
    if ((mode = apply_result_mode[regno]) != VOIDmode)
1678
      {
1679
        align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1680
        if (size % align != 0)
1681
          size = CEIL (size, align) * align;
1682
        reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1683
        emit_move_insn (reg, adjust_address (result, mode, size));
1684
 
1685
        push_to_sequence (call_fusage);
1686
        emit_use (reg);
1687
        call_fusage = get_insns ();
1688
        end_sequence ();
1689
        size += GET_MODE_SIZE (mode);
1690
      }
1691
 
1692
  /* Put the USE insns before the return.  */
1693
  emit_insn (call_fusage);
1694
 
1695
  /* Return whatever values was restored by jumping directly to the end
1696
     of the function.  */
1697
  expand_naked_return ();
1698
}
1699
 
1700
/* Used by expand_builtin_classify_type and fold_builtin_classify_type.  */
1701
 
1702
static enum type_class
1703
type_to_class (tree type)
1704
{
1705
  switch (TREE_CODE (type))
1706
    {
1707
    case VOID_TYPE:        return void_type_class;
1708
    case INTEGER_TYPE:     return integer_type_class;
1709
    case ENUMERAL_TYPE:    return enumeral_type_class;
1710
    case BOOLEAN_TYPE:     return boolean_type_class;
1711
    case POINTER_TYPE:     return pointer_type_class;
1712
    case REFERENCE_TYPE:   return reference_type_class;
1713
    case OFFSET_TYPE:      return offset_type_class;
1714
    case REAL_TYPE:        return real_type_class;
1715
    case COMPLEX_TYPE:     return complex_type_class;
1716
    case FUNCTION_TYPE:    return function_type_class;
1717
    case METHOD_TYPE:      return method_type_class;
1718
    case RECORD_TYPE:      return record_type_class;
1719
    case UNION_TYPE:
1720
    case QUAL_UNION_TYPE:  return union_type_class;
1721
    case ARRAY_TYPE:       return (TYPE_STRING_FLAG (type)
1722
                                   ? string_type_class : array_type_class);
1723
    case LANG_TYPE:        return lang_type_class;
1724
    default:               return no_type_class;
1725
    }
1726
}
1727
 
1728
/* Expand a call EXP to __builtin_classify_type.  */
1729
 
1730
static rtx
1731
expand_builtin_classify_type (tree exp)
1732
{
1733
  if (call_expr_nargs (exp))
1734
    return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1735
  return GEN_INT (no_type_class);
1736
}
1737
 
1738
/* This helper macro, meant to be used in mathfn_built_in below,
1739
   determines which among a set of three builtin math functions is
1740
   appropriate for a given type mode.  The `F' and `L' cases are
1741
   automatically generated from the `double' case.  */
1742
#define CASE_MATHFN(BUILT_IN_MATHFN) \
1743
  case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1744
  fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1745
  fcodel = BUILT_IN_MATHFN##L ; break;
1746
/* Similar to above, but appends _R after any F/L suffix.  */
1747
#define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1748
  case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1749
  fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1750
  fcodel = BUILT_IN_MATHFN##L_R ; break;
1751
 
1752
/* Return mathematic function equivalent to FN but operating directly
1753
   on TYPE, if available.  If IMPLICIT is true find the function in
1754
   implicit_built_in_decls[], otherwise use built_in_decls[].  If we
1755
   can't do the conversion, return zero.  */
1756
 
1757
static tree
1758
mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1759
{
1760
  tree const *const fn_arr
1761
    = implicit ? implicit_built_in_decls : built_in_decls;
1762
  enum built_in_function fcode, fcodef, fcodel;
1763
 
1764
  switch (fn)
1765
    {
1766
      CASE_MATHFN (BUILT_IN_ACOS)
1767
      CASE_MATHFN (BUILT_IN_ACOSH)
1768
      CASE_MATHFN (BUILT_IN_ASIN)
1769
      CASE_MATHFN (BUILT_IN_ASINH)
1770
      CASE_MATHFN (BUILT_IN_ATAN)
1771
      CASE_MATHFN (BUILT_IN_ATAN2)
1772
      CASE_MATHFN (BUILT_IN_ATANH)
1773
      CASE_MATHFN (BUILT_IN_CBRT)
1774
      CASE_MATHFN (BUILT_IN_CEIL)
1775
      CASE_MATHFN (BUILT_IN_CEXPI)
1776
      CASE_MATHFN (BUILT_IN_COPYSIGN)
1777
      CASE_MATHFN (BUILT_IN_COS)
1778
      CASE_MATHFN (BUILT_IN_COSH)
1779
      CASE_MATHFN (BUILT_IN_DREM)
1780
      CASE_MATHFN (BUILT_IN_ERF)
1781
      CASE_MATHFN (BUILT_IN_ERFC)
1782
      CASE_MATHFN (BUILT_IN_EXP)
1783
      CASE_MATHFN (BUILT_IN_EXP10)
1784
      CASE_MATHFN (BUILT_IN_EXP2)
1785
      CASE_MATHFN (BUILT_IN_EXPM1)
1786
      CASE_MATHFN (BUILT_IN_FABS)
1787
      CASE_MATHFN (BUILT_IN_FDIM)
1788
      CASE_MATHFN (BUILT_IN_FLOOR)
1789
      CASE_MATHFN (BUILT_IN_FMA)
1790
      CASE_MATHFN (BUILT_IN_FMAX)
1791
      CASE_MATHFN (BUILT_IN_FMIN)
1792
      CASE_MATHFN (BUILT_IN_FMOD)
1793
      CASE_MATHFN (BUILT_IN_FREXP)
1794
      CASE_MATHFN (BUILT_IN_GAMMA)
1795
      CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1796
      CASE_MATHFN (BUILT_IN_HUGE_VAL)
1797
      CASE_MATHFN (BUILT_IN_HYPOT)
1798
      CASE_MATHFN (BUILT_IN_ILOGB)
1799
      CASE_MATHFN (BUILT_IN_INF)
1800
      CASE_MATHFN (BUILT_IN_ISINF)
1801
      CASE_MATHFN (BUILT_IN_J0)
1802
      CASE_MATHFN (BUILT_IN_J1)
1803
      CASE_MATHFN (BUILT_IN_JN)
1804
      CASE_MATHFN (BUILT_IN_LCEIL)
1805
      CASE_MATHFN (BUILT_IN_LDEXP)
1806
      CASE_MATHFN (BUILT_IN_LFLOOR)
1807
      CASE_MATHFN (BUILT_IN_LGAMMA)
1808
      CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1809
      CASE_MATHFN (BUILT_IN_LLCEIL)
1810
      CASE_MATHFN (BUILT_IN_LLFLOOR)
1811
      CASE_MATHFN (BUILT_IN_LLRINT)
1812
      CASE_MATHFN (BUILT_IN_LLROUND)
1813
      CASE_MATHFN (BUILT_IN_LOG)
1814
      CASE_MATHFN (BUILT_IN_LOG10)
1815
      CASE_MATHFN (BUILT_IN_LOG1P)
1816
      CASE_MATHFN (BUILT_IN_LOG2)
1817
      CASE_MATHFN (BUILT_IN_LOGB)
1818
      CASE_MATHFN (BUILT_IN_LRINT)
1819
      CASE_MATHFN (BUILT_IN_LROUND)
1820
      CASE_MATHFN (BUILT_IN_MODF)
1821
      CASE_MATHFN (BUILT_IN_NAN)
1822
      CASE_MATHFN (BUILT_IN_NANS)
1823
      CASE_MATHFN (BUILT_IN_NEARBYINT)
1824
      CASE_MATHFN (BUILT_IN_NEXTAFTER)
1825
      CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1826
      CASE_MATHFN (BUILT_IN_POW)
1827
      CASE_MATHFN (BUILT_IN_POWI)
1828
      CASE_MATHFN (BUILT_IN_POW10)
1829
      CASE_MATHFN (BUILT_IN_REMAINDER)
1830
      CASE_MATHFN (BUILT_IN_REMQUO)
1831
      CASE_MATHFN (BUILT_IN_RINT)
1832
      CASE_MATHFN (BUILT_IN_ROUND)
1833
      CASE_MATHFN (BUILT_IN_SCALB)
1834
      CASE_MATHFN (BUILT_IN_SCALBLN)
1835
      CASE_MATHFN (BUILT_IN_SCALBN)
1836
      CASE_MATHFN (BUILT_IN_SIGNBIT)
1837
      CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1838
      CASE_MATHFN (BUILT_IN_SIN)
1839
      CASE_MATHFN (BUILT_IN_SINCOS)
1840
      CASE_MATHFN (BUILT_IN_SINH)
1841
      CASE_MATHFN (BUILT_IN_SQRT)
1842
      CASE_MATHFN (BUILT_IN_TAN)
1843
      CASE_MATHFN (BUILT_IN_TANH)
1844
      CASE_MATHFN (BUILT_IN_TGAMMA)
1845
      CASE_MATHFN (BUILT_IN_TRUNC)
1846
      CASE_MATHFN (BUILT_IN_Y0)
1847
      CASE_MATHFN (BUILT_IN_Y1)
1848
      CASE_MATHFN (BUILT_IN_YN)
1849
 
1850
      default:
1851
        return NULL_TREE;
1852
      }
1853
 
1854
  if (TYPE_MAIN_VARIANT (type) == double_type_node)
1855
    return fn_arr[fcode];
1856
  else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1857
    return fn_arr[fcodef];
1858
  else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1859
    return fn_arr[fcodel];
1860
  else
1861
    return NULL_TREE;
1862
}
1863
 
1864
/* Like mathfn_built_in_1(), but always use the implicit array.  */
1865
 
1866
tree
1867
mathfn_built_in (tree type, enum built_in_function fn)
1868
{
1869
  return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1870
}
1871
 
1872
/* If errno must be maintained, expand the RTL to check if the result,
1873
   TARGET, of a built-in function call, EXP, is NaN, and if so set
1874
   errno to EDOM.  */
1875
 
1876
static void
1877
expand_errno_check (tree exp, rtx target)
1878
{
1879
  rtx lab = gen_label_rtx ();
1880
 
1881
  /* Test the result; if it is NaN, set errno=EDOM because
1882
     the argument was not in the domain.  */
1883
  do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1884
                           NULL_RTX, NULL_RTX, lab,
1885
                           /* The jump is very likely.  */
1886
                           REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1887
 
1888
#ifdef TARGET_EDOM
1889
  /* If this built-in doesn't throw an exception, set errno directly.  */
1890
  if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1891
    {
1892
#ifdef GEN_ERRNO_RTX
1893
      rtx errno_rtx = GEN_ERRNO_RTX;
1894
#else
1895
      rtx errno_rtx
1896
          = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1897
#endif
1898
      emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1899
      emit_label (lab);
1900
      return;
1901
    }
1902
#endif
1903
 
1904
  /* Make sure the library call isn't expanded as a tail call.  */
1905
  CALL_EXPR_TAILCALL (exp) = 0;
1906
 
1907
  /* We can't set errno=EDOM directly; let the library call do it.
1908
     Pop the arguments right away in case the call gets deleted.  */
1909
  NO_DEFER_POP;
1910
  expand_call (exp, target, 0);
1911
  OK_DEFER_POP;
1912
  emit_label (lab);
1913
}
1914
 
1915
/* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1916
   Return NULL_RTX if a normal call should be emitted rather than expanding
1917
   the function in-line.  EXP is the expression that is a call to the builtin
1918
   function; if convenient, the result should be placed in TARGET.
1919
   SUBTARGET may be used as the target for computing one of EXP's operands.  */
1920
 
1921
static rtx
1922
expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1923
{
1924
  optab builtin_optab;
1925
  rtx op0, insns;
1926
  tree fndecl = get_callee_fndecl (exp);
1927
  enum machine_mode mode;
1928
  bool errno_set = false;
1929
  tree arg;
1930
 
1931
  if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1932
    return NULL_RTX;
1933
 
1934
  arg = CALL_EXPR_ARG (exp, 0);
1935
 
1936
  switch (DECL_FUNCTION_CODE (fndecl))
1937
    {
1938
    CASE_FLT_FN (BUILT_IN_SQRT):
1939
      errno_set = ! tree_expr_nonnegative_p (arg);
1940
      builtin_optab = sqrt_optab;
1941
      break;
1942
    CASE_FLT_FN (BUILT_IN_EXP):
1943
      errno_set = true; builtin_optab = exp_optab; break;
1944
    CASE_FLT_FN (BUILT_IN_EXP10):
1945
    CASE_FLT_FN (BUILT_IN_POW10):
1946
      errno_set = true; builtin_optab = exp10_optab; break;
1947
    CASE_FLT_FN (BUILT_IN_EXP2):
1948
      errno_set = true; builtin_optab = exp2_optab; break;
1949
    CASE_FLT_FN (BUILT_IN_EXPM1):
1950
      errno_set = true; builtin_optab = expm1_optab; break;
1951
    CASE_FLT_FN (BUILT_IN_LOGB):
1952
      errno_set = true; builtin_optab = logb_optab; break;
1953
    CASE_FLT_FN (BUILT_IN_LOG):
1954
      errno_set = true; builtin_optab = log_optab; break;
1955
    CASE_FLT_FN (BUILT_IN_LOG10):
1956
      errno_set = true; builtin_optab = log10_optab; break;
1957
    CASE_FLT_FN (BUILT_IN_LOG2):
1958
      errno_set = true; builtin_optab = log2_optab; break;
1959
    CASE_FLT_FN (BUILT_IN_LOG1P):
1960
      errno_set = true; builtin_optab = log1p_optab; break;
1961
    CASE_FLT_FN (BUILT_IN_ASIN):
1962
      builtin_optab = asin_optab; break;
1963
    CASE_FLT_FN (BUILT_IN_ACOS):
1964
      builtin_optab = acos_optab; break;
1965
    CASE_FLT_FN (BUILT_IN_TAN):
1966
      builtin_optab = tan_optab; break;
1967
    CASE_FLT_FN (BUILT_IN_ATAN):
1968
      builtin_optab = atan_optab; break;
1969
    CASE_FLT_FN (BUILT_IN_FLOOR):
1970
      builtin_optab = floor_optab; break;
1971
    CASE_FLT_FN (BUILT_IN_CEIL):
1972
      builtin_optab = ceil_optab; break;
1973
    CASE_FLT_FN (BUILT_IN_TRUNC):
1974
      builtin_optab = btrunc_optab; break;
1975
    CASE_FLT_FN (BUILT_IN_ROUND):
1976
      builtin_optab = round_optab; break;
1977
    CASE_FLT_FN (BUILT_IN_NEARBYINT):
1978
      builtin_optab = nearbyint_optab;
1979
      if (flag_trapping_math)
1980
        break;
1981
      /* Else fallthrough and expand as rint.  */
1982
    CASE_FLT_FN (BUILT_IN_RINT):
1983
      builtin_optab = rint_optab; break;
1984
    CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1985
      builtin_optab = significand_optab; break;
1986
    default:
1987
      gcc_unreachable ();
1988
    }
1989
 
1990
  /* Make a suitable register to place result in.  */
1991
  mode = TYPE_MODE (TREE_TYPE (exp));
1992
 
1993
  if (! flag_errno_math || ! HONOR_NANS (mode))
1994
    errno_set = false;
1995
 
1996
  /* Before working hard, check whether the instruction is available.  */
1997
  if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1998
    {
1999
      target = gen_reg_rtx (mode);
2000
 
2001
      /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2002
         need to expand the argument again.  This way, we will not perform
2003
         side-effects more the once.  */
2004
      CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2005
 
2006
      op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2007
 
2008
      start_sequence ();
2009
 
2010
      /* Compute into TARGET.
2011
         Set TARGET to wherever the result comes back.  */
2012
      target = expand_unop (mode, builtin_optab, op0, target, 0);
2013
 
2014
      if (target != 0)
2015
        {
2016
          if (errno_set)
2017
            expand_errno_check (exp, target);
2018
 
2019
          /* Output the entire sequence.  */
2020
          insns = get_insns ();
2021
          end_sequence ();
2022
          emit_insn (insns);
2023
          return target;
2024
        }
2025
 
2026
      /* If we were unable to expand via the builtin, stop the sequence
2027
         (without outputting the insns) and call to the library function
2028
         with the stabilized argument list.  */
2029
      end_sequence ();
2030
    }
2031
 
2032
  return expand_call (exp, target, target == const0_rtx);
2033
}
2034
 
2035
/* Expand a call to the builtin binary math functions (pow and atan2).
2036
   Return NULL_RTX if a normal call should be emitted rather than expanding the
2037
   function in-line.  EXP is the expression that is a call to the builtin
2038
   function; if convenient, the result should be placed in TARGET.
2039
   SUBTARGET may be used as the target for computing one of EXP's
2040
   operands.  */
2041
 
2042
static rtx
2043
expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2044
{
2045
  optab builtin_optab;
2046
  rtx op0, op1, insns;
2047
  int op1_type = REAL_TYPE;
2048
  tree fndecl = get_callee_fndecl (exp);
2049
  tree arg0, arg1;
2050
  enum machine_mode mode;
2051
  bool errno_set = true;
2052
 
2053
  switch (DECL_FUNCTION_CODE (fndecl))
2054
    {
2055
    CASE_FLT_FN (BUILT_IN_SCALBN):
2056
    CASE_FLT_FN (BUILT_IN_SCALBLN):
2057
    CASE_FLT_FN (BUILT_IN_LDEXP):
2058
      op1_type = INTEGER_TYPE;
2059
    default:
2060
      break;
2061
    }
2062
 
2063
  if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2064
    return NULL_RTX;
2065
 
2066
  arg0 = CALL_EXPR_ARG (exp, 0);
2067
  arg1 = CALL_EXPR_ARG (exp, 1);
2068
 
2069
  switch (DECL_FUNCTION_CODE (fndecl))
2070
    {
2071
    CASE_FLT_FN (BUILT_IN_POW):
2072
      builtin_optab = pow_optab; break;
2073
    CASE_FLT_FN (BUILT_IN_ATAN2):
2074
      builtin_optab = atan2_optab; break;
2075
    CASE_FLT_FN (BUILT_IN_SCALB):
2076
      if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2077
        return 0;
2078
      builtin_optab = scalb_optab; break;
2079
    CASE_FLT_FN (BUILT_IN_SCALBN):
2080
    CASE_FLT_FN (BUILT_IN_SCALBLN):
2081
      if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2082
        return 0;
2083
    /* Fall through... */
2084
    CASE_FLT_FN (BUILT_IN_LDEXP):
2085
      builtin_optab = ldexp_optab; break;
2086
    CASE_FLT_FN (BUILT_IN_FMOD):
2087
      builtin_optab = fmod_optab; break;
2088
    CASE_FLT_FN (BUILT_IN_REMAINDER):
2089
    CASE_FLT_FN (BUILT_IN_DREM):
2090
      builtin_optab = remainder_optab; break;
2091
    default:
2092
      gcc_unreachable ();
2093
    }
2094
 
2095
  /* Make a suitable register to place result in.  */
2096
  mode = TYPE_MODE (TREE_TYPE (exp));
2097
 
2098
  /* Before working hard, check whether the instruction is available.  */
2099
  if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2100
    return NULL_RTX;
2101
 
2102
  target = gen_reg_rtx (mode);
2103
 
2104
  if (! flag_errno_math || ! HONOR_NANS (mode))
2105
    errno_set = false;
2106
 
2107
  /* Always stabilize the argument list.  */
2108
  CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2109
  CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2110
 
2111
  op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2112
  op1 = expand_normal (arg1);
2113
 
2114
  start_sequence ();
2115
 
2116
  /* Compute into TARGET.
2117
     Set TARGET to wherever the result comes back.  */
2118
  target = expand_binop (mode, builtin_optab, op0, op1,
2119
                         target, 0, OPTAB_DIRECT);
2120
 
2121
  /* If we were unable to expand via the builtin, stop the sequence
2122
     (without outputting the insns) and call to the library function
2123
     with the stabilized argument list.  */
2124
  if (target == 0)
2125
    {
2126
      end_sequence ();
2127
      return expand_call (exp, target, target == const0_rtx);
2128
    }
2129
 
2130
  if (errno_set)
2131
    expand_errno_check (exp, target);
2132
 
2133
  /* Output the entire sequence.  */
2134
  insns = get_insns ();
2135
  end_sequence ();
2136
  emit_insn (insns);
2137
 
2138
  return target;
2139
}
2140
 
2141
/* Expand a call to the builtin sin and cos math functions.
2142
   Return NULL_RTX if a normal call should be emitted rather than expanding the
2143
   function in-line.  EXP is the expression that is a call to the builtin
2144
   function; if convenient, the result should be placed in TARGET.
2145
   SUBTARGET may be used as the target for computing one of EXP's
2146
   operands.  */
2147
 
2148
static rtx
2149
expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2150
{
2151
  optab builtin_optab;
2152
  rtx op0, insns;
2153
  tree fndecl = get_callee_fndecl (exp);
2154
  enum machine_mode mode;
2155
  tree arg;
2156
 
2157
  if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2158
    return NULL_RTX;
2159
 
2160
  arg = CALL_EXPR_ARG (exp, 0);
2161
 
2162
  switch (DECL_FUNCTION_CODE (fndecl))
2163
    {
2164
    CASE_FLT_FN (BUILT_IN_SIN):
2165
    CASE_FLT_FN (BUILT_IN_COS):
2166
      builtin_optab = sincos_optab; break;
2167
    default:
2168
      gcc_unreachable ();
2169
    }
2170
 
2171
  /* Make a suitable register to place result in.  */
2172
  mode = TYPE_MODE (TREE_TYPE (exp));
2173
 
2174
  /* Check if sincos insn is available, otherwise fallback
2175
     to sin or cos insn.  */
2176
  if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2177
    switch (DECL_FUNCTION_CODE (fndecl))
2178
      {
2179
      CASE_FLT_FN (BUILT_IN_SIN):
2180
        builtin_optab = sin_optab; break;
2181
      CASE_FLT_FN (BUILT_IN_COS):
2182
        builtin_optab = cos_optab; break;
2183
      default:
2184
        gcc_unreachable ();
2185
      }
2186
 
2187
  /* Before working hard, check whether the instruction is available.  */
2188
  if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2189
    {
2190
      target = gen_reg_rtx (mode);
2191
 
2192
      /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2193
         need to expand the argument again.  This way, we will not perform
2194
         side-effects more the once.  */
2195
      CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2196
 
2197
      op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2198
 
2199
      start_sequence ();
2200
 
2201
      /* Compute into TARGET.
2202
         Set TARGET to wherever the result comes back.  */
2203
      if (builtin_optab == sincos_optab)
2204
        {
2205
          int result;
2206
 
2207
          switch (DECL_FUNCTION_CODE (fndecl))
2208
            {
2209
            CASE_FLT_FN (BUILT_IN_SIN):
2210
              result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2211
              break;
2212
            CASE_FLT_FN (BUILT_IN_COS):
2213
              result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2214
              break;
2215
            default:
2216
              gcc_unreachable ();
2217
            }
2218
          gcc_assert (result);
2219
        }
2220
      else
2221
        {
2222
          target = expand_unop (mode, builtin_optab, op0, target, 0);
2223
        }
2224
 
2225
      if (target != 0)
2226
        {
2227
          /* Output the entire sequence.  */
2228
          insns = get_insns ();
2229
          end_sequence ();
2230
          emit_insn (insns);
2231
          return target;
2232
        }
2233
 
2234
      /* If we were unable to expand via the builtin, stop the sequence
2235
         (without outputting the insns) and call to the library function
2236
         with the stabilized argument list.  */
2237
      end_sequence ();
2238
    }
2239
 
2240
  target = expand_call (exp, target, target == const0_rtx);
2241
 
2242
  return target;
2243
}
2244
 
2245
/* Given an interclass math builtin decl FNDECL and it's argument ARG
2246
   return an RTL instruction code that implements the functionality.
2247
   If that isn't possible or available return CODE_FOR_nothing.  */
2248
 
2249
static enum insn_code
2250
interclass_mathfn_icode (tree arg, tree fndecl)
2251
{
2252
  bool errno_set = false;
2253
  optab builtin_optab = 0;
2254
  enum machine_mode mode;
2255
 
2256
  switch (DECL_FUNCTION_CODE (fndecl))
2257
    {
2258
    CASE_FLT_FN (BUILT_IN_ILOGB):
2259
      errno_set = true; builtin_optab = ilogb_optab; break;
2260
    CASE_FLT_FN (BUILT_IN_ISINF):
2261
      builtin_optab = isinf_optab; break;
2262
    case BUILT_IN_ISNORMAL:
2263
    case BUILT_IN_ISFINITE:
2264
    CASE_FLT_FN (BUILT_IN_FINITE):
2265
    case BUILT_IN_FINITED32:
2266
    case BUILT_IN_FINITED64:
2267
    case BUILT_IN_FINITED128:
2268
    case BUILT_IN_ISINFD32:
2269
    case BUILT_IN_ISINFD64:
2270
    case BUILT_IN_ISINFD128:
2271
      /* These builtins have no optabs (yet).  */
2272
      break;
2273
    default:
2274
      gcc_unreachable ();
2275
    }
2276
 
2277
  /* There's no easy way to detect the case we need to set EDOM.  */
2278
  if (flag_errno_math && errno_set)
2279
    return CODE_FOR_nothing;
2280
 
2281
  /* Optab mode depends on the mode of the input argument.  */
2282
  mode = TYPE_MODE (TREE_TYPE (arg));
2283
 
2284
  if (builtin_optab)
2285
    return optab_handler (builtin_optab, mode)->insn_code;
2286
  return CODE_FOR_nothing;
2287
}
2288
 
2289
/* Expand a call to one of the builtin math functions that operate on
2290
   floating point argument and output an integer result (ilogb, isinf,
2291
   isnan, etc).
2292
   Return 0 if a normal call should be emitted rather than expanding the
2293
   function in-line.  EXP is the expression that is a call to the builtin
2294
   function; if convenient, the result should be placed in TARGET.
2295
   SUBTARGET may be used as the target for computing one of EXP's operands.  */
2296
 
2297
static rtx
2298
expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2299
{
2300
  enum insn_code icode = CODE_FOR_nothing;
2301
  rtx op0;
2302
  tree fndecl = get_callee_fndecl (exp);
2303
  enum machine_mode mode;
2304
  tree arg;
2305
 
2306
  if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2307
    return NULL_RTX;
2308
 
2309
  arg = CALL_EXPR_ARG (exp, 0);
2310
  icode = interclass_mathfn_icode (arg, fndecl);
2311
  mode = TYPE_MODE (TREE_TYPE (arg));
2312
 
2313
  if (icode != CODE_FOR_nothing)
2314
    {
2315
      rtx last = get_last_insn ();
2316
      tree orig_arg = arg;
2317
      /* Make a suitable register to place result in.  */
2318
      if (!target
2319
          || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2320
          || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2321
         target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2322
 
2323
      gcc_assert (insn_data[icode].operand[0].predicate
2324
                  (target, GET_MODE (target)));
2325
 
2326
      /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2327
         need to expand the argument again.  This way, we will not perform
2328
         side-effects more the once.  */
2329
      CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2330
 
2331
      op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2332
 
2333
      if (mode != GET_MODE (op0))
2334
        op0 = convert_to_mode (mode, op0, 0);
2335
 
2336
      /* Compute into TARGET.
2337
         Set TARGET to wherever the result comes back.  */
2338
      if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2339
        return target;
2340
      delete_insns_since (last);
2341
      CALL_EXPR_ARG (exp, 0) = orig_arg;
2342
    }
2343
 
2344
  return NULL_RTX;
2345
}
2346
 
2347
/* Expand a call to the builtin sincos math function.
2348
   Return NULL_RTX if a normal call should be emitted rather than expanding the
2349
   function in-line.  EXP is the expression that is a call to the builtin
2350
   function.  */
2351
 
2352
static rtx
2353
expand_builtin_sincos (tree exp)
2354
{
2355
  rtx op0, op1, op2, target1, target2;
2356
  enum machine_mode mode;
2357
  tree arg, sinp, cosp;
2358
  int result;
2359
  location_t loc = EXPR_LOCATION (exp);
2360
 
2361
  if (!validate_arglist (exp, REAL_TYPE,
2362
                         POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2363
    return NULL_RTX;
2364
 
2365
  arg = CALL_EXPR_ARG (exp, 0);
2366
  sinp = CALL_EXPR_ARG (exp, 1);
2367
  cosp = CALL_EXPR_ARG (exp, 2);
2368
 
2369
  /* Make a suitable register to place result in.  */
2370
  mode = TYPE_MODE (TREE_TYPE (arg));
2371
 
2372
  /* Check if sincos insn is available, otherwise emit the call.  */
2373
  if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2374
    return NULL_RTX;
2375
 
2376
  target1 = gen_reg_rtx (mode);
2377
  target2 = gen_reg_rtx (mode);
2378
 
2379
  op0 = expand_normal (arg);
2380
  op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2381
  op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2382
 
2383
  /* Compute into target1 and target2.
2384
     Set TARGET to wherever the result comes back.  */
2385
  result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2386
  gcc_assert (result);
2387
 
2388
  /* Move target1 and target2 to the memory locations indicated
2389
     by op1 and op2.  */
2390
  emit_move_insn (op1, target1);
2391
  emit_move_insn (op2, target2);
2392
 
2393
  return const0_rtx;
2394
}
2395
 
2396
/* Expand a call to the internal cexpi builtin to the sincos math function.
2397
   EXP is the expression that is a call to the builtin function; if convenient,
2398
   the result should be placed in TARGET.  SUBTARGET may be used as the target
2399
   for computing one of EXP's operands.  */
2400
 
2401
static rtx
2402
expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2403
{
2404
  tree fndecl = get_callee_fndecl (exp);
2405
  tree arg, type;
2406
  enum machine_mode mode;
2407
  rtx op0, op1, op2;
2408
  location_t loc = EXPR_LOCATION (exp);
2409
 
2410
  if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2411
    return NULL_RTX;
2412
 
2413
  arg = CALL_EXPR_ARG (exp, 0);
2414
  type = TREE_TYPE (arg);
2415
  mode = TYPE_MODE (TREE_TYPE (arg));
2416
 
2417
  /* Try expanding via a sincos optab, fall back to emitting a libcall
2418
     to sincos or cexp.  We are sure we have sincos or cexp because cexpi
2419
     is only generated from sincos, cexp or if we have either of them.  */
2420
  if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2421
    {
2422
      op1 = gen_reg_rtx (mode);
2423
      op2 = gen_reg_rtx (mode);
2424
 
2425
      op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2426
 
2427
      /* Compute into op1 and op2.  */
2428
      expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2429
    }
2430
  else if (TARGET_HAS_SINCOS)
2431
    {
2432
      tree call, fn = NULL_TREE;
2433
      tree top1, top2;
2434
      rtx op1a, op2a;
2435
 
2436
      if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2437
        fn = built_in_decls[BUILT_IN_SINCOSF];
2438
      else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2439
        fn = built_in_decls[BUILT_IN_SINCOS];
2440
      else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2441
        fn = built_in_decls[BUILT_IN_SINCOSL];
2442
      else
2443
        gcc_unreachable ();
2444
 
2445
      op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2446
      op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2447
      op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2448
      op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2449
      top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2450
      top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2451
 
2452
      /* Make sure not to fold the sincos call again.  */
2453
      call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2454
      expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2455
                                      call, 3, arg, top1, top2));
2456
    }
2457
  else
2458
    {
2459
      tree call, fn = NULL_TREE, narg;
2460
      tree ctype = build_complex_type (type);
2461
 
2462
      if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2463
        fn = built_in_decls[BUILT_IN_CEXPF];
2464
      else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2465
        fn = built_in_decls[BUILT_IN_CEXP];
2466
      else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2467
        fn = built_in_decls[BUILT_IN_CEXPL];
2468
      else
2469
        gcc_unreachable ();
2470
 
2471
      /* If we don't have a decl for cexp create one.  This is the
2472
         friendliest fallback if the user calls __builtin_cexpi
2473
         without full target C99 function support.  */
2474
      if (fn == NULL_TREE)
2475
        {
2476
          tree fntype;
2477
          const char *name = NULL;
2478
 
2479
          if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2480
            name = "cexpf";
2481
          else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2482
            name = "cexp";
2483
          else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2484
            name = "cexpl";
2485
 
2486
          fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2487
          fn = build_fn_decl (name, fntype);
2488
        }
2489
 
2490
      narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2491
                          build_real (type, dconst0), arg);
2492
 
2493
      /* Make sure not to fold the cexp call again.  */
2494
      call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2495
      return expand_expr (build_call_nary (ctype, call, 1, narg),
2496
                          target, VOIDmode, EXPAND_NORMAL);
2497
    }
2498
 
2499
  /* Now build the proper return type.  */
2500
  return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2501
                              make_tree (TREE_TYPE (arg), op2),
2502
                              make_tree (TREE_TYPE (arg), op1)),
2503
                      target, VOIDmode, EXPAND_NORMAL);
2504
}
2505
 
2506
/* Conveniently construct a function call expression.  FNDECL names the
2507
   function to be called, N is the number of arguments, and the "..."
2508
   parameters are the argument expressions.  Unlike build_call_exr
2509
   this doesn't fold the call, hence it will always return a CALL_EXPR.  */
2510
 
2511
static tree
2512
build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2513
{
2514
  va_list ap;
2515
  tree fntype = TREE_TYPE (fndecl);
2516
  tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2517
 
2518
  va_start (ap, n);
2519
  fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2520
  va_end (ap);
2521
  SET_EXPR_LOCATION (fn, loc);
2522
  return fn;
2523
}
2524
 
2525
/* Expand a call to one of the builtin rounding functions gcc defines
2526
   as an extension (lfloor and lceil).  As these are gcc extensions we
2527
   do not need to worry about setting errno to EDOM.
2528
   If expanding via optab fails, lower expression to (int)(floor(x)).
2529
   EXP is the expression that is a call to the builtin function;
2530
   if convenient, the result should be placed in TARGET.  */
2531
 
2532
static rtx
2533
expand_builtin_int_roundingfn (tree exp, rtx target)
2534
{
2535
  convert_optab builtin_optab;
2536
  rtx op0, insns, tmp;
2537
  tree fndecl = get_callee_fndecl (exp);
2538
  enum built_in_function fallback_fn;
2539
  tree fallback_fndecl;
2540
  enum machine_mode mode;
2541
  tree arg;
2542
 
2543
  if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2544
    gcc_unreachable ();
2545
 
2546
  arg = CALL_EXPR_ARG (exp, 0);
2547
 
2548
  switch (DECL_FUNCTION_CODE (fndecl))
2549
    {
2550
    CASE_FLT_FN (BUILT_IN_LCEIL):
2551
    CASE_FLT_FN (BUILT_IN_LLCEIL):
2552
      builtin_optab = lceil_optab;
2553
      fallback_fn = BUILT_IN_CEIL;
2554
      break;
2555
 
2556
    CASE_FLT_FN (BUILT_IN_LFLOOR):
2557
    CASE_FLT_FN (BUILT_IN_LLFLOOR):
2558
      builtin_optab = lfloor_optab;
2559
      fallback_fn = BUILT_IN_FLOOR;
2560
      break;
2561
 
2562
    default:
2563
      gcc_unreachable ();
2564
    }
2565
 
2566
  /* Make a suitable register to place result in.  */
2567
  mode = TYPE_MODE (TREE_TYPE (exp));
2568
 
2569
  target = gen_reg_rtx (mode);
2570
 
2571
  /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2572
     need to expand the argument again.  This way, we will not perform
2573
     side-effects more the once.  */
2574
  CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2575
 
2576
  op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2577
 
2578
  start_sequence ();
2579
 
2580
  /* Compute into TARGET.  */
2581
  if (expand_sfix_optab (target, op0, builtin_optab))
2582
    {
2583
      /* Output the entire sequence.  */
2584
      insns = get_insns ();
2585
      end_sequence ();
2586
      emit_insn (insns);
2587
      return target;
2588
    }
2589
 
2590
  /* If we were unable to expand via the builtin, stop the sequence
2591
     (without outputting the insns).  */
2592
  end_sequence ();
2593
 
2594
  /* Fall back to floating point rounding optab.  */
2595
  fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2596
 
2597
  /* For non-C99 targets we may end up without a fallback fndecl here
2598
     if the user called __builtin_lfloor directly.  In this case emit
2599
     a call to the floor/ceil variants nevertheless.  This should result
2600
     in the best user experience for not full C99 targets.  */
2601
  if (fallback_fndecl == NULL_TREE)
2602
    {
2603
      tree fntype;
2604
      const char *name = NULL;
2605
 
2606
      switch (DECL_FUNCTION_CODE (fndecl))
2607
        {
2608
        case BUILT_IN_LCEIL:
2609
        case BUILT_IN_LLCEIL:
2610
          name = "ceil";
2611
          break;
2612
        case BUILT_IN_LCEILF:
2613
        case BUILT_IN_LLCEILF:
2614
          name = "ceilf";
2615
          break;
2616
        case BUILT_IN_LCEILL:
2617
        case BUILT_IN_LLCEILL:
2618
          name = "ceill";
2619
          break;
2620
        case BUILT_IN_LFLOOR:
2621
        case BUILT_IN_LLFLOOR:
2622
          name = "floor";
2623
          break;
2624
        case BUILT_IN_LFLOORF:
2625
        case BUILT_IN_LLFLOORF:
2626
          name = "floorf";
2627
          break;
2628
        case BUILT_IN_LFLOORL:
2629
        case BUILT_IN_LLFLOORL:
2630
          name = "floorl";
2631
          break;
2632
        default:
2633
          gcc_unreachable ();
2634
        }
2635
 
2636
      fntype = build_function_type_list (TREE_TYPE (arg),
2637
                                         TREE_TYPE (arg), NULL_TREE);
2638
      fallback_fndecl = build_fn_decl (name, fntype);
2639
    }
2640
 
2641
  exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2642
 
2643
  tmp = expand_normal (exp);
2644
 
2645
  /* Truncate the result of floating point optab to integer
2646
     via expand_fix ().  */
2647
  target = gen_reg_rtx (mode);
2648
  expand_fix (target, tmp, 0);
2649
 
2650
  return target;
2651
}
2652
 
2653
/* Expand a call to one of the builtin math functions doing integer
2654
   conversion (lrint).
2655
   Return 0 if a normal call should be emitted rather than expanding the
2656
   function in-line.  EXP is the expression that is a call to the builtin
2657
   function; if convenient, the result should be placed in TARGET.  */
2658
 
2659
static rtx
2660
expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2661
{
2662
  convert_optab builtin_optab;
2663
  rtx op0, insns;
2664
  tree fndecl = get_callee_fndecl (exp);
2665
  tree arg;
2666
  enum machine_mode mode;
2667
 
2668
  /* There's no easy way to detect the case we need to set EDOM.  */
2669
  if (flag_errno_math)
2670
    return NULL_RTX;
2671
 
2672
  if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2673
     gcc_unreachable ();
2674
 
2675
  arg = CALL_EXPR_ARG (exp, 0);
2676
 
2677
  switch (DECL_FUNCTION_CODE (fndecl))
2678
    {
2679
    CASE_FLT_FN (BUILT_IN_LRINT):
2680
    CASE_FLT_FN (BUILT_IN_LLRINT):
2681
      builtin_optab = lrint_optab; break;
2682
    CASE_FLT_FN (BUILT_IN_LROUND):
2683
    CASE_FLT_FN (BUILT_IN_LLROUND):
2684
      builtin_optab = lround_optab; break;
2685
    default:
2686
      gcc_unreachable ();
2687
    }
2688
 
2689
  /* Make a suitable register to place result in.  */
2690
  mode = TYPE_MODE (TREE_TYPE (exp));
2691
 
2692
  target = gen_reg_rtx (mode);
2693
 
2694
  /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2695
     need to expand the argument again.  This way, we will not perform
2696
     side-effects more the once.  */
2697
  CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2698
 
2699
  op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2700
 
2701
  start_sequence ();
2702
 
2703
  if (expand_sfix_optab (target, op0, builtin_optab))
2704
    {
2705
      /* Output the entire sequence.  */
2706
      insns = get_insns ();
2707
      end_sequence ();
2708
      emit_insn (insns);
2709
      return target;
2710
    }
2711
 
2712
  /* If we were unable to expand via the builtin, stop the sequence
2713
     (without outputting the insns) and call to the library function
2714
     with the stabilized argument list.  */
2715
  end_sequence ();
2716
 
2717
  target = expand_call (exp, target, target == const0_rtx);
2718
 
2719
  return target;
2720
}
2721
 
2722
/* To evaluate powi(x,n), the floating point value x raised to the
2723
   constant integer exponent n, we use a hybrid algorithm that
2724
   combines the "window method" with look-up tables.  For an
2725
   introduction to exponentiation algorithms and "addition chains",
2726
   see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2727
   "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2728
   3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2729
   Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998.  */
2730
 
2731
/* Provide a default value for POWI_MAX_MULTS, the maximum number of
2732
   multiplications to inline before calling the system library's pow
2733
   function.  powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2734
   so this default never requires calling pow, powf or powl.  */
2735
 
2736
#ifndef POWI_MAX_MULTS
2737
#define POWI_MAX_MULTS  (2*HOST_BITS_PER_WIDE_INT-2)
2738
#endif
2739
 
2740
/* The size of the "optimal power tree" lookup table.  All
2741
   exponents less than this value are simply looked up in the
2742
   powi_table below.  This threshold is also used to size the
2743
   cache of pseudo registers that hold intermediate results.  */
2744
#define POWI_TABLE_SIZE 256
2745
 
2746
/* The size, in bits of the window, used in the "window method"
2747
   exponentiation algorithm.  This is equivalent to a radix of
2748
   (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method".  */
2749
#define POWI_WINDOW_SIZE 3
2750
 
2751
/* The following table is an efficient representation of an
2752
   "optimal power tree".  For each value, i, the corresponding
2753
   value, j, in the table states than an optimal evaluation
2754
   sequence for calculating pow(x,i) can be found by evaluating
2755
   pow(x,j)*pow(x,i-j).  An optimal power tree for the first
2756
   100 integers is given in Knuth's "Seminumerical algorithms".  */
2757
 
2758
static const unsigned char powi_table[POWI_TABLE_SIZE] =
2759
  {
2760
      0,   1,   1,   2,   2,   3,   3,   4,  /*   0 -   7 */
2761
      4,   6,   5,   6,   6,  10,   7,   9,  /*   8 -  15 */
2762
      8,  16,   9,  16,  10,  12,  11,  13,  /*  16 -  23 */
2763
     12,  17,  13,  18,  14,  24,  15,  26,  /*  24 -  31 */
2764
     16,  17,  17,  19,  18,  33,  19,  26,  /*  32 -  39 */
2765
     20,  25,  21,  40,  22,  27,  23,  44,  /*  40 -  47 */
2766
     24,  32,  25,  34,  26,  29,  27,  44,  /*  48 -  55 */
2767
     28,  31,  29,  34,  30,  60,  31,  36,  /*  56 -  63 */
2768
     32,  64,  33,  34,  34,  46,  35,  37,  /*  64 -  71 */
2769
     36,  65,  37,  50,  38,  48,  39,  69,  /*  72 -  79 */
2770
     40,  49,  41,  43,  42,  51,  43,  58,  /*  80 -  87 */
2771
     44,  64,  45,  47,  46,  59,  47,  76,  /*  88 -  95 */
2772
     48,  65,  49,  66,  50,  67,  51,  66,  /*  96 - 103 */
2773
     52,  70,  53,  74,  54, 104,  55,  74,  /* 104 - 111 */
2774
     56,  64,  57,  69,  58,  78,  59,  68,  /* 112 - 119 */
2775
     60,  61,  61,  80,  62,  75,  63,  68,  /* 120 - 127 */
2776
     64,  65,  65, 128,  66, 129,  67,  90,  /* 128 - 135 */
2777
     68,  73,  69, 131,  70,  94,  71,  88,  /* 136 - 143 */
2778
     72, 128,  73,  98,  74, 132,  75, 121,  /* 144 - 151 */
2779
     76, 102,  77, 124,  78, 132,  79, 106,  /* 152 - 159 */
2780
     80,  97,  81, 160,  82,  99,  83, 134,  /* 160 - 167 */
2781
     84,  86,  85,  95,  86, 160,  87, 100,  /* 168 - 175 */
2782
     88, 113,  89,  98,  90, 107,  91, 122,  /* 176 - 183 */
2783
     92, 111,  93, 102,  94, 126,  95, 150,  /* 184 - 191 */
2784
     96, 128,  97, 130,  98, 133,  99, 195,  /* 192 - 199 */
2785
    100, 128, 101, 123, 102, 164, 103, 138,  /* 200 - 207 */
2786
    104, 145, 105, 146, 106, 109, 107, 149,  /* 208 - 215 */
2787
    108, 200, 109, 146, 110, 170, 111, 157,  /* 216 - 223 */
2788
    112, 128, 113, 130, 114, 182, 115, 132,  /* 224 - 231 */
2789
    116, 200, 117, 132, 118, 158, 119, 206,  /* 232 - 239 */
2790
    120, 240, 121, 162, 122, 147, 123, 152,  /* 240 - 247 */
2791
    124, 166, 125, 214, 126, 138, 127, 153,  /* 248 - 255 */
2792
  };
2793
 
2794
 
2795
/* Return the number of multiplications required to calculate
2796
   powi(x,n) where n is less than POWI_TABLE_SIZE.  This is a
2797
   subroutine of powi_cost.  CACHE is an array indicating
2798
   which exponents have already been calculated.  */
2799
 
2800
static int
2801
powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2802
{
2803
  /* If we've already calculated this exponent, then this evaluation
2804
     doesn't require any additional multiplications.  */
2805
  if (cache[n])
2806
    return 0;
2807
 
2808
  cache[n] = true;
2809
  return powi_lookup_cost (n - powi_table[n], cache)
2810
         + powi_lookup_cost (powi_table[n], cache) + 1;
2811
}
2812
 
2813
/* Return the number of multiplications required to calculate
2814
   powi(x,n) for an arbitrary x, given the exponent N.  This
2815
   function needs to be kept in sync with expand_powi below.  */
2816
 
2817
static int
2818
powi_cost (HOST_WIDE_INT n)
2819
{
2820
  bool cache[POWI_TABLE_SIZE];
2821
  unsigned HOST_WIDE_INT digit;
2822
  unsigned HOST_WIDE_INT val;
2823
  int result;
2824
 
2825
  if (n == 0)
2826
    return 0;
2827
 
2828
  /* Ignore the reciprocal when calculating the cost.  */
2829
  val = (n < 0) ? -n : n;
2830
 
2831
  /* Initialize the exponent cache.  */
2832
  memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2833
  cache[1] = true;
2834
 
2835
  result = 0;
2836
 
2837
  while (val >= POWI_TABLE_SIZE)
2838
    {
2839
      if (val & 1)
2840
        {
2841
          digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2842
          result += powi_lookup_cost (digit, cache)
2843
                    + POWI_WINDOW_SIZE + 1;
2844
          val >>= POWI_WINDOW_SIZE;
2845
        }
2846
      else
2847
        {
2848
          val >>= 1;
2849
          result++;
2850
        }
2851
    }
2852
 
2853
  return result + powi_lookup_cost (val, cache);
2854
}
2855
 
2856
/* Recursive subroutine of expand_powi.  This function takes the array,
2857
   CACHE, of already calculated exponents and an exponent N and returns
2858
   an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE.  */
2859
 
2860
static rtx
2861
expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2862
{
2863
  unsigned HOST_WIDE_INT digit;
2864
  rtx target, result;
2865
  rtx op0, op1;
2866
 
2867
  if (n < POWI_TABLE_SIZE)
2868
    {
2869
      if (cache[n])
2870
        return cache[n];
2871
 
2872
      target = gen_reg_rtx (mode);
2873
      cache[n] = target;
2874
 
2875
      op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2876
      op1 = expand_powi_1 (mode, powi_table[n], cache);
2877
    }
2878
  else if (n & 1)
2879
    {
2880
      target = gen_reg_rtx (mode);
2881
      digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2882
      op0 = expand_powi_1 (mode, n - digit, cache);
2883
      op1 = expand_powi_1 (mode, digit, cache);
2884
    }
2885
  else
2886
    {
2887
      target = gen_reg_rtx (mode);
2888
      op0 = expand_powi_1 (mode, n >> 1, cache);
2889
      op1 = op0;
2890
    }
2891
 
2892
  result = expand_mult (mode, op0, op1, target, 0);
2893
  if (result != target)
2894
    emit_move_insn (target, result);
2895
  return target;
2896
}
2897
 
2898
/* Expand the RTL to evaluate powi(x,n) in mode MODE.  X is the
2899
   floating point operand in mode MODE, and N is the exponent.  This
2900
   function needs to be kept in sync with powi_cost above.  */
2901
 
2902
static rtx
2903
expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2904
{
2905
  rtx cache[POWI_TABLE_SIZE];
2906
  rtx result;
2907
 
2908
  if (n == 0)
2909
    return CONST1_RTX (mode);
2910
 
2911
  memset (cache, 0, sizeof (cache));
2912
  cache[1] = x;
2913
 
2914
  result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2915
 
2916
  /* If the original exponent was negative, reciprocate the result.  */
2917
  if (n < 0)
2918
    result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2919
                           result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2920
 
2921
  return result;
2922
}
2923
 
2924
/* Expand a call to the pow built-in mathematical function.  Return NULL_RTX if
2925
   a normal call should be emitted rather than expanding the function
2926
   in-line.  EXP is the expression that is a call to the builtin
2927
   function; if convenient, the result should be placed in TARGET.  */
2928
 
2929
static rtx
2930
expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2931
{
2932
  tree arg0, arg1;
2933
  tree fn, narg0;
2934
  tree type = TREE_TYPE (exp);
2935
  REAL_VALUE_TYPE cint, c, c2;
2936
  HOST_WIDE_INT n;
2937
  rtx op, op2;
2938
  enum machine_mode mode = TYPE_MODE (type);
2939
 
2940
  if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2941
    return NULL_RTX;
2942
 
2943
  arg0 = CALL_EXPR_ARG (exp, 0);
2944
  arg1 = CALL_EXPR_ARG (exp, 1);
2945
 
2946
  if (TREE_CODE (arg1) != REAL_CST
2947
      || TREE_OVERFLOW (arg1))
2948
    return expand_builtin_mathfn_2 (exp, target, subtarget);
2949
 
2950
  /* Handle constant exponents.  */
2951
 
2952
  /* For integer valued exponents we can expand to an optimal multiplication
2953
     sequence using expand_powi.  */
2954
  c = TREE_REAL_CST (arg1);
2955
  n = real_to_integer (&c);
2956
  real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2957
  if (real_identical (&c, &cint)
2958
      && ((n >= -1 && n <= 2)
2959
          || (flag_unsafe_math_optimizations
2960
              && optimize_insn_for_speed_p ()
2961
              && powi_cost (n) <= POWI_MAX_MULTS)))
2962
    {
2963
      op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2964
      if (n != 1)
2965
        {
2966
          op = force_reg (mode, op);
2967
          op = expand_powi (op, mode, n);
2968
        }
2969
      return op;
2970
    }
2971
 
2972
  narg0 = builtin_save_expr (arg0);
2973
 
2974
  /* If the exponent is not integer valued, check if it is half of an integer.
2975
     In this case we can expand to sqrt (x) * x**(n/2).  */
2976
  fn = mathfn_built_in (type, BUILT_IN_SQRT);
2977
  if (fn != NULL_TREE)
2978
    {
2979
      real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2980
      n = real_to_integer (&c2);
2981
      real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2982
      if (real_identical (&c2, &cint)
2983
          && ((flag_unsafe_math_optimizations
2984
               && optimize_insn_for_speed_p ()
2985
               && powi_cost (n/2) <= POWI_MAX_MULTS)
2986
              /* Even the c == 0.5 case cannot be done unconditionally
2987
                 when we need to preserve signed zeros, as
2988
                 pow (-0, 0.5) is +0, while sqrt(-0) is -0.  */
2989
              || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
2990
              /* For c == 1.5 we can assume that x * sqrt (x) is always
2991
                 smaller than pow (x, 1.5) if sqrt will not be expanded
2992
                 as a call.  */
2993
              || (n == 3
2994
                  && (optab_handler (sqrt_optab, mode)->insn_code
2995
                      != CODE_FOR_nothing))))
2996
        {
2997
          tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
2998
                                                  narg0);
2999
          /* Use expand_expr in case the newly built call expression
3000
             was folded to a non-call.  */
3001
          op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3002
          if (n != 1)
3003
            {
3004
              op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3005
              op2 = force_reg (mode, op2);
3006
              op2 = expand_powi (op2, mode, abs (n / 2));
3007
              op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3008
                                        0, OPTAB_LIB_WIDEN);
3009
              /* If the original exponent was negative, reciprocate the
3010
                 result.  */
3011
              if (n < 0)
3012
                op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3013
                                   op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3014
            }
3015
          return op;
3016
        }
3017
    }
3018
 
3019
  /* Try if the exponent is a third of an integer.  In this case
3020
     we can expand to x**(n/3) * cbrt(x)**(n%3).  As cbrt (x) is
3021
     different from pow (x, 1./3.) due to rounding and behavior
3022
     with negative x we need to constrain this transformation to
3023
     unsafe math and positive x or finite math.  */
3024
  fn = mathfn_built_in (type, BUILT_IN_CBRT);
3025
  if (fn != NULL_TREE
3026
      && flag_unsafe_math_optimizations
3027
      && (tree_expr_nonnegative_p (arg0)
3028
          || !HONOR_NANS (mode)))
3029
    {
3030
      REAL_VALUE_TYPE dconst3;
3031
      real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3032
      real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3033
      real_round (&c2, mode, &c2);
3034
      n = real_to_integer (&c2);
3035
      real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3036
      real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3037
      real_convert (&c2, mode, &c2);
3038
      if (real_identical (&c2, &c)
3039
          && ((optimize_insn_for_speed_p ()
3040
               && powi_cost (n/3) <= POWI_MAX_MULTS)
3041
              || n == 1))
3042
        {
3043
          tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3044
                                                  narg0);
3045
          op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3046
          if (abs (n) % 3 == 2)
3047
            op = expand_simple_binop (mode, MULT, op, op, op,
3048
                                      0, OPTAB_LIB_WIDEN);
3049
          if (n != 1)
3050
            {
3051
              op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3052
              op2 = force_reg (mode, op2);
3053
              op2 = expand_powi (op2, mode, abs (n / 3));
3054
              op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3055
                                        0, OPTAB_LIB_WIDEN);
3056
              /* If the original exponent was negative, reciprocate the
3057
                 result.  */
3058
              if (n < 0)
3059
                op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3060
                                   op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3061
            }
3062
          return op;
3063
        }
3064
    }
3065
 
3066
  /* Fall back to optab expansion.  */
3067
  return expand_builtin_mathfn_2 (exp, target, subtarget);
3068
}
3069
 
3070
/* Expand a call to the powi built-in mathematical function.  Return NULL_RTX if
3071
   a normal call should be emitted rather than expanding the function
3072
   in-line.  EXP is the expression that is a call to the builtin
3073
   function; if convenient, the result should be placed in TARGET.  */
3074
 
3075
static rtx
3076
expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3077
{
3078
  tree arg0, arg1;
3079
  rtx op0, op1;
3080
  enum machine_mode mode;
3081
  enum machine_mode mode2;
3082
 
3083
  if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3084
    return NULL_RTX;
3085
 
3086
  arg0 = CALL_EXPR_ARG (exp, 0);
3087
  arg1 = CALL_EXPR_ARG (exp, 1);
3088
  mode = TYPE_MODE (TREE_TYPE (exp));
3089
 
3090
  /* Handle constant power.  */
3091
 
3092
  if (TREE_CODE (arg1) == INTEGER_CST
3093
      && !TREE_OVERFLOW (arg1))
3094
    {
3095
      HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3096
 
3097
      /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3098
         Otherwise, check the number of multiplications required.  */
3099
      if ((TREE_INT_CST_HIGH (arg1) == 0
3100
           || TREE_INT_CST_HIGH (arg1) == -1)
3101
          && ((n >= -1 && n <= 2)
3102
              || (optimize_insn_for_speed_p ()
3103
                  && powi_cost (n) <= POWI_MAX_MULTS)))
3104
        {
3105
          op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3106
          op0 = force_reg (mode, op0);
3107
          return expand_powi (op0, mode, n);
3108
        }
3109
    }
3110
 
3111
  /* Emit a libcall to libgcc.  */
3112
 
3113
  /* Mode of the 2nd argument must match that of an int.  */
3114
  mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3115
 
3116
  if (target == NULL_RTX)
3117
    target = gen_reg_rtx (mode);
3118
 
3119
  op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3120
  if (GET_MODE (op0) != mode)
3121
    op0 = convert_to_mode (mode, op0, 0);
3122
  op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3123
  if (GET_MODE (op1) != mode2)
3124
    op1 = convert_to_mode (mode2, op1, 0);
3125
 
3126
  target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3127
                                    target, LCT_CONST, mode, 2,
3128
                                    op0, mode, op1, mode2);
3129
 
3130
  return target;
3131
}
3132
 
3133
/* Expand expression EXP which is a call to the strlen builtin.  Return
3134
   NULL_RTX if we failed the caller should emit a normal call, otherwise
3135
   try to get the result in TARGET, if convenient.  */
3136
 
3137
static rtx
3138
expand_builtin_strlen (tree exp, rtx target,
3139
                       enum machine_mode target_mode)
3140
{
3141
  if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3142
    return NULL_RTX;
3143
  else
3144
    {
3145
      rtx pat;
3146
      tree len;
3147
      tree src = CALL_EXPR_ARG (exp, 0);
3148
      rtx result, src_reg, char_rtx, before_strlen;
3149
      enum machine_mode insn_mode = target_mode, char_mode;
3150
      enum insn_code icode = CODE_FOR_nothing;
3151
      int align;
3152
 
3153
      /* If the length can be computed at compile-time, return it.  */
3154
      len = c_strlen (src, 0);
3155
      if (len)
3156
        return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3157
 
3158
      /* If the length can be computed at compile-time and is constant
3159
         integer, but there are side-effects in src, evaluate
3160
         src for side-effects, then return len.
3161
         E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3162
         can be optimized into: i++; x = 3;  */
3163
      len = c_strlen (src, 1);
3164
      if (len && TREE_CODE (len) == INTEGER_CST)
3165
        {
3166
          expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3167
          return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3168
        }
3169
 
3170
      align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3171
 
3172
      /* If SRC is not a pointer type, don't do this operation inline.  */
3173
      if (align == 0)
3174
        return NULL_RTX;
3175
 
3176
      /* Bail out if we can't compute strlen in the right mode.  */
3177
      while (insn_mode != VOIDmode)
3178
        {
3179
          icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3180
          if (icode != CODE_FOR_nothing)
3181
            break;
3182
 
3183
          insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3184
        }
3185
      if (insn_mode == VOIDmode)
3186
        return NULL_RTX;
3187
 
3188
      /* Make a place to write the result of the instruction.  */
3189
      result = target;
3190
      if (! (result != 0
3191
             && REG_P (result)
3192
             && GET_MODE (result) == insn_mode
3193
             && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3194
        result = gen_reg_rtx (insn_mode);
3195
 
3196
      /* Make a place to hold the source address.  We will not expand
3197
         the actual source until we are sure that the expansion will
3198
         not fail -- there are trees that cannot be expanded twice.  */
3199
      src_reg = gen_reg_rtx (Pmode);
3200
 
3201
      /* Mark the beginning of the strlen sequence so we can emit the
3202
         source operand later.  */
3203
      before_strlen = get_last_insn ();
3204
 
3205
      char_rtx = const0_rtx;
3206
      char_mode = insn_data[(int) icode].operand[2].mode;
3207
      if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3208
                                                            char_mode))
3209
        char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3210
 
3211
      pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3212
                             char_rtx, GEN_INT (align));
3213
      if (! pat)
3214
        return NULL_RTX;
3215
      emit_insn (pat);
3216
 
3217
      /* Now that we are assured of success, expand the source.  */
3218
      start_sequence ();
3219
      pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3220
      if (pat != src_reg)
3221
        emit_move_insn (src_reg, pat);
3222
      pat = get_insns ();
3223
      end_sequence ();
3224
 
3225
      if (before_strlen)
3226
        emit_insn_after (pat, before_strlen);
3227
      else
3228
        emit_insn_before (pat, get_insns ());
3229
 
3230
      /* Return the value in the proper mode for this function.  */
3231
      if (GET_MODE (result) == target_mode)
3232
        target = result;
3233
      else if (target != 0)
3234
        convert_move (target, result, 0);
3235
      else
3236
        target = convert_to_mode (target_mode, result, 0);
3237
 
3238
      return target;
3239
    }
3240
}
3241
 
3242
/* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3243
   bytes from constant string DATA + OFFSET and return it as target
3244
   constant.  */
3245
 
3246
static rtx
3247
builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3248
                         enum machine_mode mode)
3249
{
3250
  const char *str = (const char *) data;
3251
 
3252
  gcc_assert (offset >= 0
3253
              && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3254
                  <= strlen (str) + 1));
3255
 
3256
  return c_readstr (str + offset, mode);
3257
}
3258
 
3259
/* Expand a call EXP to the memcpy builtin.
3260
   Return NULL_RTX if we failed, the caller should emit a normal call,
3261
   otherwise try to get the result in TARGET, if convenient (and in
3262
   mode MODE if that's convenient).  */
3263
 
3264
static rtx
3265
expand_builtin_memcpy (tree exp, rtx target)
3266
{
3267
  if (!validate_arglist (exp,
3268
                         POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3269
    return NULL_RTX;
3270
  else
3271
    {
3272
      tree dest = CALL_EXPR_ARG (exp, 0);
3273
      tree src = CALL_EXPR_ARG (exp, 1);
3274
      tree len = CALL_EXPR_ARG (exp, 2);
3275
      const char *src_str;
3276
      unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3277
      unsigned int dest_align
3278
        = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3279
      rtx dest_mem, src_mem, dest_addr, len_rtx;
3280
      HOST_WIDE_INT expected_size = -1;
3281
      unsigned int expected_align = 0;
3282
 
3283
      /* If DEST is not a pointer type, call the normal function.  */
3284
      if (dest_align == 0)
3285
        return NULL_RTX;
3286
 
3287
      /* If either SRC is not a pointer type, don't do this
3288
         operation in-line.  */
3289
      if (src_align == 0)
3290
        return NULL_RTX;
3291
 
3292
      if (currently_expanding_gimple_stmt)
3293
        stringop_block_profile (currently_expanding_gimple_stmt,
3294
                                &expected_align, &expected_size);
3295
 
3296
      if (expected_align < dest_align)
3297
        expected_align = dest_align;
3298
      dest_mem = get_memory_rtx (dest, len);
3299
      set_mem_align (dest_mem, dest_align);
3300
      len_rtx = expand_normal (len);
3301
      src_str = c_getstr (src);
3302
 
3303
      /* If SRC is a string constant and block move would be done
3304
         by pieces, we can avoid loading the string from memory
3305
         and only stored the computed constants.  */
3306
      if (src_str
3307
          && CONST_INT_P (len_rtx)
3308
          && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3309
          && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3310
                                  CONST_CAST (char *, src_str),
3311
                                  dest_align, false))
3312
        {
3313
          dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3314
                                      builtin_memcpy_read_str,
3315
                                      CONST_CAST (char *, src_str),
3316
                                      dest_align, false, 0);
3317
          dest_mem = force_operand (XEXP (dest_mem, 0), target);
3318
          dest_mem = convert_memory_address (ptr_mode, dest_mem);
3319
          return dest_mem;
3320
        }
3321
 
3322
      src_mem = get_memory_rtx (src, len);
3323
      set_mem_align (src_mem, src_align);
3324
 
3325
      /* Copy word part most expediently.  */
3326
      dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3327
                                         CALL_EXPR_TAILCALL (exp)
3328
                                         ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3329
                                         expected_align, expected_size);
3330
 
3331
      if (dest_addr == 0)
3332
        {
3333
          dest_addr = force_operand (XEXP (dest_mem, 0), target);
3334
          dest_addr = convert_memory_address (ptr_mode, dest_addr);
3335
        }
3336
      return dest_addr;
3337
    }
3338
}
3339
 
3340
/* Expand a call EXP to the mempcpy builtin.
3341
   Return NULL_RTX if we failed; the caller should emit a normal call,
3342
   otherwise try to get the result in TARGET, if convenient (and in
3343
   mode MODE if that's convenient).  If ENDP is 0 return the
3344
   destination pointer, if ENDP is 1 return the end pointer ala
3345
   mempcpy, and if ENDP is 2 return the end pointer minus one ala
3346
   stpcpy.  */
3347
 
3348
static rtx
3349
expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3350
{
3351
  if (!validate_arglist (exp,
3352
                         POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3353
    return NULL_RTX;
3354
  else
3355
    {
3356
      tree dest = CALL_EXPR_ARG (exp, 0);
3357
      tree src = CALL_EXPR_ARG (exp, 1);
3358
      tree len = CALL_EXPR_ARG (exp, 2);
3359
      return expand_builtin_mempcpy_args (dest, src, len,
3360
                                          target, mode, /*endp=*/ 1);
3361
    }
3362
}
3363
 
3364
/* Helper function to do the actual work for expand_builtin_mempcpy.  The
3365
   arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3366
   so that this can also be called without constructing an actual CALL_EXPR.
3367
   The other arguments and return value are the same as for
3368
   expand_builtin_mempcpy.  */
3369
 
3370
static rtx
3371
expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3372
                             rtx target, enum machine_mode mode, int endp)
3373
{
3374
    /* If return value is ignored, transform mempcpy into memcpy.  */
3375
  if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3376
    {
3377
      tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3378
      tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3379
                                           dest, src, len);
3380
      return expand_expr (result, target, mode, EXPAND_NORMAL);
3381
    }
3382
  else
3383
    {
3384
      const char *src_str;
3385
      unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3386
      unsigned int dest_align
3387
        = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3388
      rtx dest_mem, src_mem, len_rtx;
3389
 
3390
      /* If either SRC or DEST is not a pointer type, don't do this
3391
         operation in-line.  */
3392
      if (dest_align == 0 || src_align == 0)
3393
        return NULL_RTX;
3394
 
3395
      /* If LEN is not constant, call the normal function.  */
3396
      if (! host_integerp (len, 1))
3397
        return NULL_RTX;
3398
 
3399
      len_rtx = expand_normal (len);
3400
      src_str = c_getstr (src);
3401
 
3402
      /* If SRC is a string constant and block move would be done
3403
         by pieces, we can avoid loading the string from memory
3404
         and only stored the computed constants.  */
3405
      if (src_str
3406
          && CONST_INT_P (len_rtx)
3407
          && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3408
          && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3409
                                  CONST_CAST (char *, src_str),
3410
                                  dest_align, false))
3411
        {
3412
          dest_mem = get_memory_rtx (dest, len);
3413
          set_mem_align (dest_mem, dest_align);
3414
          dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3415
                                      builtin_memcpy_read_str,
3416
                                      CONST_CAST (char *, src_str),
3417
                                      dest_align, false, endp);
3418
          dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3419
          dest_mem = convert_memory_address (ptr_mode, dest_mem);
3420
          return dest_mem;
3421
        }
3422
 
3423
      if (CONST_INT_P (len_rtx)
3424
          && can_move_by_pieces (INTVAL (len_rtx),
3425
                                 MIN (dest_align, src_align)))
3426
        {
3427
          dest_mem = get_memory_rtx (dest, len);
3428
          set_mem_align (dest_mem, dest_align);
3429
          src_mem = get_memory_rtx (src, len);
3430
          set_mem_align (src_mem, src_align);
3431
          dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3432
                                     MIN (dest_align, src_align), endp);
3433
          dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3434
          dest_mem = convert_memory_address (ptr_mode, dest_mem);
3435
          return dest_mem;
3436
        }
3437
 
3438
      return NULL_RTX;
3439
    }
3440
}
3441
 
3442
#ifndef HAVE_movstr
3443
# define HAVE_movstr 0
3444
# define CODE_FOR_movstr CODE_FOR_nothing
3445
#endif
3446
 
3447
/* Expand into a movstr instruction, if one is available.  Return NULL_RTX if
3448
   we failed, the caller should emit a normal call, otherwise try to
3449
   get the result in TARGET, if convenient.  If ENDP is 0 return the
3450
   destination pointer, if ENDP is 1 return the end pointer ala
3451
   mempcpy, and if ENDP is 2 return the end pointer minus one ala
3452
   stpcpy.  */
3453
 
3454
static rtx
3455
expand_movstr (tree dest, tree src, rtx target, int endp)
3456
{
3457
  rtx end;
3458
  rtx dest_mem;
3459
  rtx src_mem;
3460
  rtx insn;
3461
  const struct insn_data * data;
3462
 
3463
  if (!HAVE_movstr)
3464
    return NULL_RTX;
3465
 
3466
  dest_mem = get_memory_rtx (dest, NULL);
3467
  src_mem = get_memory_rtx (src, NULL);
3468
  data = insn_data + CODE_FOR_movstr;
3469
  if (!endp)
3470
    {
3471
      target = force_reg (Pmode, XEXP (dest_mem, 0));
3472
      dest_mem = replace_equiv_address (dest_mem, target);
3473
      end = gen_reg_rtx (Pmode);
3474
    }
3475
  else
3476
    {
3477
      if (target == 0
3478
          || target == const0_rtx
3479
          || ! (*data->operand[0].predicate) (target, Pmode))
3480
        {
3481
          end = gen_reg_rtx (Pmode);
3482
          if (target != const0_rtx)
3483
            target = end;
3484
        }
3485
      else
3486
        end = target;
3487
    }
3488
 
3489
  if (data->operand[0].mode != VOIDmode)
3490
    end = gen_lowpart (data->operand[0].mode, end);
3491
 
3492
  insn = data->genfun (end, dest_mem, src_mem);
3493
 
3494
  gcc_assert (insn);
3495
 
3496
  emit_insn (insn);
3497
 
3498
  /* movstr is supposed to set end to the address of the NUL
3499
     terminator.  If the caller requested a mempcpy-like return value,
3500
     adjust it.  */
3501
  if (endp == 1 && target != const0_rtx)
3502
    {
3503
      rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3504
      emit_move_insn (target, force_operand (tem, NULL_RTX));
3505
    }
3506
 
3507
  return target;
3508
}
3509
 
3510
/* Expand expression EXP, which is a call to the strcpy builtin.  Return
3511
   NULL_RTX if we failed the caller should emit a normal call, otherwise
3512
   try to get the result in TARGET, if convenient (and in mode MODE if that's
3513
   convenient).  */
3514
 
3515
static rtx
3516
expand_builtin_strcpy (tree exp, rtx target)
3517
{
3518
  if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3519
   {
3520
     tree dest = CALL_EXPR_ARG (exp, 0);
3521
     tree src = CALL_EXPR_ARG (exp, 1);
3522
     return expand_builtin_strcpy_args (dest, src, target);
3523
   }
3524
   return NULL_RTX;
3525
}
3526
 
3527
/* Helper function to do the actual work for expand_builtin_strcpy.  The
3528
   arguments to the builtin_strcpy call DEST and SRC are broken out
3529
   so that this can also be called without constructing an actual CALL_EXPR.
3530
   The other arguments and return value are the same as for
3531
   expand_builtin_strcpy.  */
3532
 
3533
static rtx
3534
expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3535
{
3536
  return expand_movstr (dest, src, target, /*endp=*/0);
3537
}
3538
 
3539
/* Expand a call EXP to the stpcpy builtin.
3540
   Return NULL_RTX if we failed the caller should emit a normal call,
3541
   otherwise try to get the result in TARGET, if convenient (and in
3542
   mode MODE if that's convenient).  */
3543
 
3544
static rtx
3545
expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3546
{
3547
  tree dst, src;
3548
  location_t loc = EXPR_LOCATION (exp);
3549
 
3550
  if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3551
    return NULL_RTX;
3552
 
3553
  dst = CALL_EXPR_ARG (exp, 0);
3554
  src = CALL_EXPR_ARG (exp, 1);
3555
 
3556
  /* If return value is ignored, transform stpcpy into strcpy.  */
3557
  if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3558
    {
3559
      tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3560
      tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3561
      return expand_expr (result, target, mode, EXPAND_NORMAL);
3562
    }
3563
  else
3564
    {
3565
      tree len, lenp1;
3566
      rtx ret;
3567
 
3568
      /* Ensure we get an actual string whose length can be evaluated at
3569
         compile-time, not an expression containing a string.  This is
3570
         because the latter will potentially produce pessimized code
3571
         when used to produce the return value.  */
3572
      if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3573
        return expand_movstr (dst, src, target, /*endp=*/2);
3574
 
3575
      lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3576
      ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3577
                                         target, mode, /*endp=*/2);
3578
 
3579
      if (ret)
3580
        return ret;
3581
 
3582
      if (TREE_CODE (len) == INTEGER_CST)
3583
        {
3584
          rtx len_rtx = expand_normal (len);
3585
 
3586
          if (CONST_INT_P (len_rtx))
3587
            {
3588
              ret = expand_builtin_strcpy_args (dst, src, target);
3589
 
3590
              if (ret)
3591
                {
3592
                  if (! target)
3593
                    {
3594
                      if (mode != VOIDmode)
3595
                        target = gen_reg_rtx (mode);
3596
                      else
3597
                        target = gen_reg_rtx (GET_MODE (ret));
3598
                    }
3599
                  if (GET_MODE (target) != GET_MODE (ret))
3600
                    ret = gen_lowpart (GET_MODE (target), ret);
3601
 
3602
                  ret = plus_constant (ret, INTVAL (len_rtx));
3603
                  ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3604
                  gcc_assert (ret);
3605
 
3606
                  return target;
3607
                }
3608
            }
3609
        }
3610
 
3611
      return expand_movstr (dst, src, target, /*endp=*/2);
3612
    }
3613
}
3614
 
3615
/* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3616
   bytes from constant string DATA + OFFSET and return it as target
3617
   constant.  */
3618
 
3619
rtx
3620
builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3621
                          enum machine_mode mode)
3622
{
3623
  const char *str = (const char *) data;
3624
 
3625
  if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3626
    return const0_rtx;
3627
 
3628
  return c_readstr (str + offset, mode);
3629
}
3630
 
3631
/* Expand expression EXP, which is a call to the strncpy builtin.  Return
3632
   NULL_RTX if we failed the caller should emit a normal call.  */
3633
 
3634
static rtx
3635
expand_builtin_strncpy (tree exp, rtx target)
3636
{
3637
  location_t loc = EXPR_LOCATION (exp);
3638
 
3639
  if (validate_arglist (exp,
3640
                        POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3641
    {
3642
      tree dest = CALL_EXPR_ARG (exp, 0);
3643
      tree src = CALL_EXPR_ARG (exp, 1);
3644
      tree len = CALL_EXPR_ARG (exp, 2);
3645
      tree slen = c_strlen (src, 1);
3646
 
3647
      /* We must be passed a constant len and src parameter.  */
3648
      if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3649
        return NULL_RTX;
3650
 
3651
      slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3652
 
3653
      /* We're required to pad with trailing zeros if the requested
3654
         len is greater than strlen(s2)+1.  In that case try to
3655
         use store_by_pieces, if it fails, punt.  */
3656
      if (tree_int_cst_lt (slen, len))
3657
        {
3658
          unsigned int dest_align
3659
            = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3660
          const char *p = c_getstr (src);
3661
          rtx dest_mem;
3662
 
3663
          if (!p || dest_align == 0 || !host_integerp (len, 1)
3664
              || !can_store_by_pieces (tree_low_cst (len, 1),
3665
                                       builtin_strncpy_read_str,
3666
                                       CONST_CAST (char *, p),
3667
                                       dest_align, false))
3668
            return NULL_RTX;
3669
 
3670
          dest_mem = get_memory_rtx (dest, len);
3671
          store_by_pieces (dest_mem, tree_low_cst (len, 1),
3672
                           builtin_strncpy_read_str,
3673
                           CONST_CAST (char *, p), dest_align, false, 0);
3674
          dest_mem = force_operand (XEXP (dest_mem, 0), target);
3675
          dest_mem = convert_memory_address (ptr_mode, dest_mem);
3676
          return dest_mem;
3677
        }
3678
    }
3679
  return NULL_RTX;
3680
}
3681
 
3682
/* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3683
   bytes from constant string DATA + OFFSET and return it as target
3684
   constant.  */
3685
 
3686
rtx
3687
builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3688
                         enum machine_mode mode)
3689
{
3690
  const char *c = (const char *) data;
3691
  char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3692
 
3693
  memset (p, *c, GET_MODE_SIZE (mode));
3694
 
3695
  return c_readstr (p, mode);
3696
}
3697
 
3698
/* Callback routine for store_by_pieces.  Return the RTL of a register
3699
   containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3700
   char value given in the RTL register data.  For example, if mode is
3701
   4 bytes wide, return the RTL for 0x01010101*data.  */
3702
 
3703
static rtx
3704
builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3705
                        enum machine_mode mode)
3706
{
3707
  rtx target, coeff;
3708
  size_t size;
3709
  char *p;
3710
 
3711
  size = GET_MODE_SIZE (mode);
3712
  if (size == 1)
3713
    return (rtx) data;
3714
 
3715
  p = XALLOCAVEC (char, size);
3716
  memset (p, 1, size);
3717
  coeff = c_readstr (p, mode);
3718
 
3719
  target = convert_to_mode (mode, (rtx) data, 1);
3720
  target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3721
  return force_reg (mode, target);
3722
}
3723
 
3724
/* Expand expression EXP, which is a call to the memset builtin.  Return
3725
   NULL_RTX if we failed the caller should emit a normal call, otherwise
3726
   try to get the result in TARGET, if convenient (and in mode MODE if that's
3727
   convenient).  */
3728
 
3729
static rtx
3730
expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3731
{
3732
  if (!validate_arglist (exp,
3733
                         POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3734
    return NULL_RTX;
3735
  else
3736
    {
3737
      tree dest = CALL_EXPR_ARG (exp, 0);
3738
      tree val = CALL_EXPR_ARG (exp, 1);
3739
      tree len = CALL_EXPR_ARG (exp, 2);
3740
      return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3741
    }
3742
}
3743
 
3744
/* Helper function to do the actual work for expand_builtin_memset.  The
3745
   arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3746
   so that this can also be called without constructing an actual CALL_EXPR.
3747
   The other arguments and return value are the same as for
3748
   expand_builtin_memset.  */
3749
 
3750
static rtx
3751
expand_builtin_memset_args (tree dest, tree val, tree len,
3752
                            rtx target, enum machine_mode mode, tree orig_exp)
3753
{
3754
  tree fndecl, fn;
3755
  enum built_in_function fcode;
3756
  char c;
3757
  unsigned int dest_align;
3758
  rtx dest_mem, dest_addr, len_rtx;
3759
  HOST_WIDE_INT expected_size = -1;
3760
  unsigned int expected_align = 0;
3761
 
3762
  dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3763
 
3764
  /* If DEST is not a pointer type, don't do this operation in-line.  */
3765
  if (dest_align == 0)
3766
    return NULL_RTX;
3767
 
3768
  if (currently_expanding_gimple_stmt)
3769
    stringop_block_profile (currently_expanding_gimple_stmt,
3770
                            &expected_align, &expected_size);
3771
 
3772
  if (expected_align < dest_align)
3773
    expected_align = dest_align;
3774
 
3775
  /* If the LEN parameter is zero, return DEST.  */
3776
  if (integer_zerop (len))
3777
    {
3778
      /* Evaluate and ignore VAL in case it has side-effects.  */
3779
      expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3780
      return expand_expr (dest, target, mode, EXPAND_NORMAL);
3781
    }
3782
 
3783
  /* Stabilize the arguments in case we fail.  */
3784
  dest = builtin_save_expr (dest);
3785
  val = builtin_save_expr (val);
3786
  len = builtin_save_expr (len);
3787
 
3788
  len_rtx = expand_normal (len);
3789
  dest_mem = get_memory_rtx (dest, len);
3790
 
3791
  if (TREE_CODE (val) != INTEGER_CST)
3792
    {
3793
      rtx val_rtx;
3794
 
3795
      val_rtx = expand_normal (val);
3796
      val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3797
                                 val_rtx, 0);
3798
 
3799
      /* Assume that we can memset by pieces if we can store
3800
       * the coefficients by pieces (in the required modes).
3801
       * We can't pass builtin_memset_gen_str as that emits RTL.  */
3802
      c = 1;
3803
      if (host_integerp (len, 1)
3804
          && can_store_by_pieces (tree_low_cst (len, 1),
3805
                                  builtin_memset_read_str, &c, dest_align,
3806
                                  true))
3807
        {
3808
          val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3809
                               val_rtx);
3810
          store_by_pieces (dest_mem, tree_low_cst (len, 1),
3811
                           builtin_memset_gen_str, val_rtx, dest_align,
3812
                           true, 0);
3813
        }
3814
      else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3815
                                        dest_align, expected_align,
3816
                                        expected_size))
3817
        goto do_libcall;
3818
 
3819
      dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3820
      dest_mem = convert_memory_address (ptr_mode, dest_mem);
3821
      return dest_mem;
3822
    }
3823
 
3824
  if (target_char_cast (val, &c))
3825
    goto do_libcall;
3826
 
3827
  if (c)
3828
    {
3829
      if (host_integerp (len, 1)
3830
          && can_store_by_pieces (tree_low_cst (len, 1),
3831
                                  builtin_memset_read_str, &c, dest_align,
3832
                                  true))
3833
        store_by_pieces (dest_mem, tree_low_cst (len, 1),
3834
                         builtin_memset_read_str, &c, dest_align, true, 0);
3835
      else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3836
                                        dest_align, expected_align,
3837
                                        expected_size))
3838
        goto do_libcall;
3839
 
3840
      dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3841
      dest_mem = convert_memory_address (ptr_mode, dest_mem);
3842
      return dest_mem;
3843
    }
3844
 
3845
  set_mem_align (dest_mem, dest_align);
3846
  dest_addr = clear_storage_hints (dest_mem, len_rtx,
3847
                                   CALL_EXPR_TAILCALL (orig_exp)
3848
                                   ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3849
                                   expected_align, expected_size);
3850
 
3851
  if (dest_addr == 0)
3852
    {
3853
      dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3854
      dest_addr = convert_memory_address (ptr_mode, dest_addr);
3855
    }
3856
 
3857
  return dest_addr;
3858
 
3859
 do_libcall:
3860
  fndecl = get_callee_fndecl (orig_exp);
3861
  fcode = DECL_FUNCTION_CODE (fndecl);
3862
  if (fcode == BUILT_IN_MEMSET)
3863
    fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3864
                                dest, val, len);
3865
  else if (fcode == BUILT_IN_BZERO)
3866
    fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3867
                                dest, len);
3868
  else
3869
    gcc_unreachable ();
3870
  gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3871
  CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3872
  return expand_call (fn, target, target == const0_rtx);
3873
}
3874
 
3875
/* Expand expression EXP, which is a call to the bzero builtin.  Return
3876
   NULL_RTX if we failed the caller should emit a normal call.  */
3877
 
3878
static rtx
3879
expand_builtin_bzero (tree exp)
3880
{
3881
  tree dest, size;
3882
  location_t loc = EXPR_LOCATION (exp);
3883
 
3884
  if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3885
    return NULL_RTX;
3886
 
3887
  dest = CALL_EXPR_ARG (exp, 0);
3888
  size = CALL_EXPR_ARG (exp, 1);
3889
 
3890
  /* New argument list transforming bzero(ptr x, int y) to
3891
     memset(ptr x, int 0, size_t y).   This is done this way
3892
     so that if it isn't expanded inline, we fallback to
3893
     calling bzero instead of memset.  */
3894
 
3895
  return expand_builtin_memset_args (dest, integer_zero_node,
3896
                                     fold_convert_loc (loc, sizetype, size),
3897
                                     const0_rtx, VOIDmode, exp);
3898
}
3899
 
3900
/* Expand expression EXP, which is a call to the memcmp built-in function.
3901
   Return NULL_RTX if we failed and the
3902
   caller should emit a normal call, otherwise try to get the result in
3903
   TARGET, if convenient (and in mode MODE, if that's convenient).  */
3904
 
3905
static rtx
3906
expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3907
                       ATTRIBUTE_UNUSED enum machine_mode mode)
3908
{
3909
  location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3910
 
3911
  if (!validate_arglist (exp,
3912
                         POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3913
    return NULL_RTX;
3914
 
3915
#if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3916
  {
3917
    rtx arg1_rtx, arg2_rtx, arg3_rtx;
3918
    rtx result;
3919
    rtx insn;
3920
    tree arg1 = CALL_EXPR_ARG (exp, 0);
3921
    tree arg2 = CALL_EXPR_ARG (exp, 1);
3922
    tree len = CALL_EXPR_ARG (exp, 2);
3923
 
3924
    int arg1_align
3925
      = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3926
    int arg2_align
3927
      = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3928
    enum machine_mode insn_mode;
3929
 
3930
#ifdef HAVE_cmpmemsi
3931
    if (HAVE_cmpmemsi)
3932
      insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3933
    else
3934
#endif
3935
#ifdef HAVE_cmpstrnsi
3936
    if (HAVE_cmpstrnsi)
3937
      insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3938
    else
3939
#endif
3940
      return NULL_RTX;
3941
 
3942
    /* If we don't have POINTER_TYPE, call the function.  */
3943
    if (arg1_align == 0 || arg2_align == 0)
3944
      return NULL_RTX;
3945
 
3946
    /* Make a place to write the result of the instruction.  */
3947
    result = target;
3948
    if (! (result != 0
3949
           && REG_P (result) && GET_MODE (result) == insn_mode
3950
           && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3951
      result = gen_reg_rtx (insn_mode);
3952
 
3953
    arg1_rtx = get_memory_rtx (arg1, len);
3954
    arg2_rtx = get_memory_rtx (arg2, len);
3955
    arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3956
 
3957
    /* Set MEM_SIZE as appropriate.  */
3958
    if (CONST_INT_P (arg3_rtx))
3959
      {
3960
        set_mem_size (arg1_rtx, arg3_rtx);
3961
        set_mem_size (arg2_rtx, arg3_rtx);
3962
      }
3963
 
3964
#ifdef HAVE_cmpmemsi
3965
    if (HAVE_cmpmemsi)
3966
      insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3967
                           GEN_INT (MIN (arg1_align, arg2_align)));
3968
    else
3969
#endif
3970
#ifdef HAVE_cmpstrnsi
3971
    if (HAVE_cmpstrnsi)
3972
      insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3973
                            GEN_INT (MIN (arg1_align, arg2_align)));
3974
    else
3975
#endif
3976
      gcc_unreachable ();
3977
 
3978
    if (insn)
3979
      emit_insn (insn);
3980
    else
3981
      emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3982
                               TYPE_MODE (integer_type_node), 3,
3983
                               XEXP (arg1_rtx, 0), Pmode,
3984
                               XEXP (arg2_rtx, 0), Pmode,
3985
                               convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3986
                                                TYPE_UNSIGNED (sizetype)),
3987
                               TYPE_MODE (sizetype));
3988
 
3989
    /* Return the value in the proper mode for this function.  */
3990
    mode = TYPE_MODE (TREE_TYPE (exp));
3991
    if (GET_MODE (result) == mode)
3992
      return result;
3993
    else if (target != 0)
3994
      {
3995
        convert_move (target, result, 0);
3996
        return target;
3997
      }
3998
    else
3999
      return convert_to_mode (mode, result, 0);
4000
  }
4001
#endif
4002
 
4003
  return NULL_RTX;
4004
}
4005
 
4006
/* Expand expression EXP, which is a call to the strcmp builtin.  Return NULL_RTX
4007
   if we failed the caller should emit a normal call, otherwise try to get
4008
   the result in TARGET, if convenient.  */
4009
 
4010
static rtx
4011
expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4012
{
4013
  if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4014
    return NULL_RTX;
4015
 
4016
#if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4017
  if (cmpstr_optab[SImode] != CODE_FOR_nothing
4018
      || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4019
    {
4020
      rtx arg1_rtx, arg2_rtx;
4021
      rtx result, insn = NULL_RTX;
4022
      tree fndecl, fn;
4023
      tree arg1 = CALL_EXPR_ARG (exp, 0);
4024
      tree arg2 = CALL_EXPR_ARG (exp, 1);
4025
 
4026
      int arg1_align
4027
        = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4028
      int arg2_align
4029
        = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4030
 
4031
      /* If we don't have POINTER_TYPE, call the function.  */
4032
      if (arg1_align == 0 || arg2_align == 0)
4033
        return NULL_RTX;
4034
 
4035
      /* Stabilize the arguments in case gen_cmpstr(n)si fail.  */
4036
      arg1 = builtin_save_expr (arg1);
4037
      arg2 = builtin_save_expr (arg2);
4038
 
4039
      arg1_rtx = get_memory_rtx (arg1, NULL);
4040
      arg2_rtx = get_memory_rtx (arg2, NULL);
4041
 
4042
#ifdef HAVE_cmpstrsi
4043
      /* Try to call cmpstrsi.  */
4044
      if (HAVE_cmpstrsi)
4045
        {
4046
          enum machine_mode insn_mode
4047
            = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4048
 
4049
          /* Make a place to write the result of the instruction.  */
4050
          result = target;
4051
          if (! (result != 0
4052
                 && REG_P (result) && GET_MODE (result) == insn_mode
4053
                 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4054
            result = gen_reg_rtx (insn_mode);
4055
 
4056
          insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4057
                               GEN_INT (MIN (arg1_align, arg2_align)));
4058
        }
4059
#endif
4060
#ifdef HAVE_cmpstrnsi
4061
      /* Try to determine at least one length and call cmpstrnsi.  */
4062
      if (!insn && HAVE_cmpstrnsi)
4063
        {
4064
          tree len;
4065
          rtx arg3_rtx;
4066
 
4067
          enum machine_mode insn_mode
4068
            = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4069
          tree len1 = c_strlen (arg1, 1);
4070
          tree len2 = c_strlen (arg2, 1);
4071
 
4072
          if (len1)
4073
            len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4074
          if (len2)
4075
            len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4076
 
4077
          /* If we don't have a constant length for the first, use the length
4078
             of the second, if we know it.  We don't require a constant for
4079
             this case; some cost analysis could be done if both are available
4080
             but neither is constant.  For now, assume they're equally cheap,
4081
             unless one has side effects.  If both strings have constant lengths,
4082
             use the smaller.  */
4083
 
4084
          if (!len1)
4085
            len = len2;
4086
          else if (!len2)
4087
            len = len1;
4088
          else if (TREE_SIDE_EFFECTS (len1))
4089
            len = len2;
4090
          else if (TREE_SIDE_EFFECTS (len2))
4091
            len = len1;
4092
          else if (TREE_CODE (len1) != INTEGER_CST)
4093
            len = len2;
4094
          else if (TREE_CODE (len2) != INTEGER_CST)
4095
            len = len1;
4096
          else if (tree_int_cst_lt (len1, len2))
4097
            len = len1;
4098
          else
4099
            len = len2;
4100
 
4101
          /* If both arguments have side effects, we cannot optimize.  */
4102
          if (!len || TREE_SIDE_EFFECTS (len))
4103
            goto do_libcall;
4104
 
4105
          arg3_rtx = expand_normal (len);
4106
 
4107
          /* Make a place to write the result of the instruction.  */
4108
          result = target;
4109
          if (! (result != 0
4110
                 && REG_P (result) && GET_MODE (result) == insn_mode
4111
                 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4112
            result = gen_reg_rtx (insn_mode);
4113
 
4114
          insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4115
                                GEN_INT (MIN (arg1_align, arg2_align)));
4116
        }
4117
#endif
4118
 
4119
      if (insn)
4120
        {
4121
          enum machine_mode mode;
4122
          emit_insn (insn);
4123
 
4124
          /* Return the value in the proper mode for this function.  */
4125
          mode = TYPE_MODE (TREE_TYPE (exp));
4126
          if (GET_MODE (result) == mode)
4127
            return result;
4128
          if (target == 0)
4129
            return convert_to_mode (mode, result, 0);
4130
          convert_move (target, result, 0);
4131
          return target;
4132
        }
4133
 
4134
      /* Expand the library call ourselves using a stabilized argument
4135
         list to avoid re-evaluating the function's arguments twice.  */
4136
#ifdef HAVE_cmpstrnsi
4137
    do_libcall:
4138
#endif
4139
      fndecl = get_callee_fndecl (exp);
4140
      fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4141
      gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4142
      CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4143
      return expand_call (fn, target, target == const0_rtx);
4144
    }
4145
#endif
4146
  return NULL_RTX;
4147
}
4148
 
4149
/* Expand expression EXP, which is a call to the strncmp builtin. Return
4150
   NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4151
   the result in TARGET, if convenient.  */
4152
 
4153
static rtx
4154
expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4155
                        ATTRIBUTE_UNUSED enum machine_mode mode)
4156
{
4157
  location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4158
 
4159
  if (!validate_arglist (exp,
4160
                         POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4161
    return NULL_RTX;
4162
 
4163
  /* If c_strlen can determine an expression for one of the string
4164
     lengths, and it doesn't have side effects, then emit cmpstrnsi
4165
     using length MIN(strlen(string)+1, arg3).  */
4166
#ifdef HAVE_cmpstrnsi
4167
  if (HAVE_cmpstrnsi)
4168
  {
4169
    tree len, len1, len2;
4170
    rtx arg1_rtx, arg2_rtx, arg3_rtx;
4171
    rtx result, insn;
4172
    tree fndecl, fn;
4173
    tree arg1 = CALL_EXPR_ARG (exp, 0);
4174
    tree arg2 = CALL_EXPR_ARG (exp, 1);
4175
    tree arg3 = CALL_EXPR_ARG (exp, 2);
4176
 
4177
    int arg1_align
4178
      = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4179
    int arg2_align
4180
      = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4181
    enum machine_mode insn_mode
4182
      = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4183
 
4184
    len1 = c_strlen (arg1, 1);
4185
    len2 = c_strlen (arg2, 1);
4186
 
4187
    if (len1)
4188
      len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4189
    if (len2)
4190
      len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4191
 
4192
    /* If we don't have a constant length for the first, use the length
4193
       of the second, if we know it.  We don't require a constant for
4194
       this case; some cost analysis could be done if both are available
4195
       but neither is constant.  For now, assume they're equally cheap,
4196
       unless one has side effects.  If both strings have constant lengths,
4197
       use the smaller.  */
4198
 
4199
    if (!len1)
4200
      len = len2;
4201
    else if (!len2)
4202
      len = len1;
4203
    else if (TREE_SIDE_EFFECTS (len1))
4204
      len = len2;
4205
    else if (TREE_SIDE_EFFECTS (len2))
4206
      len = len1;
4207
    else if (TREE_CODE (len1) != INTEGER_CST)
4208
      len = len2;
4209
    else if (TREE_CODE (len2) != INTEGER_CST)
4210
      len = len1;
4211
    else if (tree_int_cst_lt (len1, len2))
4212
      len = len1;
4213
    else
4214
      len = len2;
4215
 
4216
    /* If both arguments have side effects, we cannot optimize.  */
4217
    if (!len || TREE_SIDE_EFFECTS (len))
4218
      return NULL_RTX;
4219
 
4220
    /* The actual new length parameter is MIN(len,arg3).  */
4221
    len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4222
                       fold_convert_loc (loc, TREE_TYPE (len), arg3));
4223
 
4224
    /* If we don't have POINTER_TYPE, call the function.  */
4225
    if (arg1_align == 0 || arg2_align == 0)
4226
      return NULL_RTX;
4227
 
4228
    /* Make a place to write the result of the instruction.  */
4229
    result = target;
4230
    if (! (result != 0
4231
           && REG_P (result) && GET_MODE (result) == insn_mode
4232
           && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4233
      result = gen_reg_rtx (insn_mode);
4234
 
4235
    /* Stabilize the arguments in case gen_cmpstrnsi fails.  */
4236
    arg1 = builtin_save_expr (arg1);
4237
    arg2 = builtin_save_expr (arg2);
4238
    len = builtin_save_expr (len);
4239
 
4240
    arg1_rtx = get_memory_rtx (arg1, len);
4241
    arg2_rtx = get_memory_rtx (arg2, len);
4242
    arg3_rtx = expand_normal (len);
4243
    insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4244
                          GEN_INT (MIN (arg1_align, arg2_align)));
4245
    if (insn)
4246
      {
4247
        emit_insn (insn);
4248
 
4249
        /* Return the value in the proper mode for this function.  */
4250
        mode = TYPE_MODE (TREE_TYPE (exp));
4251
        if (GET_MODE (result) == mode)
4252
          return result;
4253
        if (target == 0)
4254
          return convert_to_mode (mode, result, 0);
4255
        convert_move (target, result, 0);
4256
        return target;
4257
      }
4258
 
4259
    /* Expand the library call ourselves using a stabilized argument
4260
       list to avoid re-evaluating the function's arguments twice.  */
4261
    fndecl = get_callee_fndecl (exp);
4262
    fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4263
                                arg1, arg2, len);
4264
    gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4265
    CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4266
    return expand_call (fn, target, target == const0_rtx);
4267
  }
4268
#endif
4269
  return NULL_RTX;
4270
}
4271
 
4272
/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4273
   if that's convenient.  */
4274
 
4275
rtx
4276
expand_builtin_saveregs (void)
4277
{
4278
  rtx val, seq;
4279
 
4280
  /* Don't do __builtin_saveregs more than once in a function.
4281
     Save the result of the first call and reuse it.  */
4282
  if (saveregs_value != 0)
4283
    return saveregs_value;
4284
 
4285
  /* When this function is called, it means that registers must be
4286
     saved on entry to this function.  So we migrate the call to the
4287
     first insn of this function.  */
4288
 
4289
  start_sequence ();
4290
 
4291
  /* Do whatever the machine needs done in this case.  */
4292
  val = targetm.calls.expand_builtin_saveregs ();
4293
 
4294
  seq = get_insns ();
4295
  end_sequence ();
4296
 
4297
  saveregs_value = val;
4298
 
4299
  /* Put the insns after the NOTE that starts the function.  If this
4300
     is inside a start_sequence, make the outer-level insn chain current, so
4301
     the code is placed at the start of the function.  */
4302
  push_topmost_sequence ();
4303
  emit_insn_after (seq, entry_of_function ());
4304
  pop_topmost_sequence ();
4305
 
4306
  return val;
4307
}
4308
 
4309
/* __builtin_args_info (N) returns word N of the arg space info
4310
   for the current function.  The number and meanings of words
4311
   is controlled by the definition of CUMULATIVE_ARGS.  */
4312
 
4313
static rtx
4314
expand_builtin_args_info (tree exp)
4315
{
4316
  int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4317
  int *word_ptr = (int *) &crtl->args.info;
4318
 
4319
  gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4320
 
4321
  if (call_expr_nargs (exp) != 0)
4322
    {
4323
      if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4324
        error ("argument of %<__builtin_args_info%> must be constant");
4325
      else
4326
        {
4327
          HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4328
 
4329
          if (wordnum < 0 || wordnum >= nwords)
4330
            error ("argument of %<__builtin_args_info%> out of range");
4331
          else
4332
            return GEN_INT (word_ptr[wordnum]);
4333
        }
4334
    }
4335
  else
4336
    error ("missing argument in %<__builtin_args_info%>");
4337
 
4338
  return const0_rtx;
4339
}
4340
 
4341
/* Expand a call to __builtin_next_arg.  */
4342
 
4343
static rtx
4344
expand_builtin_next_arg (void)
4345
{
4346
  /* Checking arguments is already done in fold_builtin_next_arg
4347
     that must be called before this function.  */
4348
  return expand_binop (ptr_mode, add_optab,
4349
                       crtl->args.internal_arg_pointer,
4350
                       crtl->args.arg_offset_rtx,
4351
                       NULL_RTX, 0, OPTAB_LIB_WIDEN);
4352
}
4353
 
4354
/* Make it easier for the backends by protecting the valist argument
4355
   from multiple evaluations.  */
4356
 
4357
static tree
4358
stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4359
{
4360
  tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4361
 
4362
  gcc_assert (vatype != NULL_TREE);
4363
 
4364
  if (TREE_CODE (vatype) == ARRAY_TYPE)
4365
    {
4366
      if (TREE_SIDE_EFFECTS (valist))
4367
        valist = save_expr (valist);
4368
 
4369
      /* For this case, the backends will be expecting a pointer to
4370
         vatype, but it's possible we've actually been given an array
4371
         (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4372
         So fix it.  */
4373
      if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4374
        {
4375
          tree p1 = build_pointer_type (TREE_TYPE (vatype));
4376
          valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4377
        }
4378
    }
4379
  else
4380
    {
4381
      tree pt;
4382
 
4383
      if (! needs_lvalue)
4384
        {
4385
          if (! TREE_SIDE_EFFECTS (valist))
4386
            return valist;
4387
 
4388
          pt = build_pointer_type (vatype);
4389
          valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4390
          TREE_SIDE_EFFECTS (valist) = 1;
4391
        }
4392
 
4393
      if (TREE_SIDE_EFFECTS (valist))
4394
        valist = save_expr (valist);
4395
      valist = build_fold_indirect_ref_loc (loc, valist);
4396
    }
4397
 
4398
  return valist;
4399
}
4400
 
4401
/* The "standard" definition of va_list is void*.  */
4402
 
4403
tree
4404
std_build_builtin_va_list (void)
4405
{
4406
  return ptr_type_node;
4407
}
4408
 
4409
/* The "standard" abi va_list is va_list_type_node.  */
4410
 
4411
tree
4412
std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4413
{
4414
  return va_list_type_node;
4415
}
4416
 
4417
/* The "standard" type of va_list is va_list_type_node.  */
4418
 
4419
tree
4420
std_canonical_va_list_type (tree type)
4421
{
4422
  tree wtype, htype;
4423
 
4424
  if (INDIRECT_REF_P (type))
4425
    type = TREE_TYPE (type);
4426
  else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4427
    type = TREE_TYPE (type);
4428
  wtype = va_list_type_node;
4429
  htype = type;
4430
  /* Treat structure va_list types.  */
4431
  if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4432
    htype = TREE_TYPE (htype);
4433
  else if (TREE_CODE (wtype) == ARRAY_TYPE)
4434
    {
4435
      /* If va_list is an array type, the argument may have decayed
4436
         to a pointer type, e.g. by being passed to another function.
4437
         In that case, unwrap both types so that we can compare the
4438
         underlying records.  */
4439
      if (TREE_CODE (htype) == ARRAY_TYPE
4440
          || POINTER_TYPE_P (htype))
4441
        {
4442
          wtype = TREE_TYPE (wtype);
4443
          htype = TREE_TYPE (htype);
4444
        }
4445
    }
4446
  if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4447
    return va_list_type_node;
4448
 
4449
  return NULL_TREE;
4450
}
4451
 
4452
/* The "standard" implementation of va_start: just assign `nextarg' to
4453
   the variable.  */
4454
 
4455
void
4456
std_expand_builtin_va_start (tree valist, rtx nextarg)
4457
{
4458
  rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4459
  convert_move (va_r, nextarg, 0);
4460
}
4461
 
4462
/* Expand EXP, a call to __builtin_va_start.  */
4463
 
4464
static rtx
4465
expand_builtin_va_start (tree exp)
4466
{
4467
  rtx nextarg;
4468
  tree valist;
4469
  location_t loc = EXPR_LOCATION (exp);
4470
 
4471
  if (call_expr_nargs (exp) < 2)
4472
    {
4473
      error_at (loc, "too few arguments to function %<va_start%>");
4474
      return const0_rtx;
4475
    }
4476
 
4477
  if (fold_builtin_next_arg (exp, true))
4478
    return const0_rtx;
4479
 
4480
  nextarg = expand_builtin_next_arg ();
4481
  valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4482
 
4483
  if (targetm.expand_builtin_va_start)
4484
    targetm.expand_builtin_va_start (valist, nextarg);
4485
  else
4486
    std_expand_builtin_va_start (valist, nextarg);
4487
 
4488
  return const0_rtx;
4489
}
4490
 
4491
/* The "standard" implementation of va_arg: read the value from the
4492
   current (padded) address and increment by the (padded) size.  */
4493
 
4494
tree
4495
std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4496
                          gimple_seq *post_p)
4497
{
4498
  tree addr, t, type_size, rounded_size, valist_tmp;
4499
  unsigned HOST_WIDE_INT align, boundary;
4500
  bool indirect;
4501
 
4502
#ifdef ARGS_GROW_DOWNWARD
4503
  /* All of the alignment and movement below is for args-grow-up machines.
4504
     As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4505
     implement their own specialized gimplify_va_arg_expr routines.  */
4506
  gcc_unreachable ();
4507
#endif
4508
 
4509
  indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4510
  if (indirect)
4511
    type = build_pointer_type (type);
4512
 
4513
  align = PARM_BOUNDARY / BITS_PER_UNIT;
4514
  boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4515
 
4516
  /* When we align parameter on stack for caller, if the parameter
4517
     alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4518
     aligned at MAX_SUPPORTED_STACK_ALIGNMENT.  We will match callee
4519
     here with caller.  */
4520
  if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4521
    boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4522
 
4523
  boundary /= BITS_PER_UNIT;
4524
 
4525
  /* Hoist the valist value into a temporary for the moment.  */
4526
  valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4527
 
4528
  /* va_list pointer is aligned to PARM_BOUNDARY.  If argument actually
4529
     requires greater alignment, we must perform dynamic alignment.  */
4530
  if (boundary > align
4531
      && !integer_zerop (TYPE_SIZE (type)))
4532
    {
4533
      t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4534
                  fold_build2 (POINTER_PLUS_EXPR,
4535
                               TREE_TYPE (valist),
4536
                               valist_tmp, size_int (boundary - 1)));
4537
      gimplify_and_add (t, pre_p);
4538
 
4539
      t = fold_convert (sizetype, valist_tmp);
4540
      t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4541
                  fold_convert (TREE_TYPE (valist),
4542
                                fold_build2 (BIT_AND_EXPR, sizetype, t,
4543
                                             size_int (-boundary))));
4544
      gimplify_and_add (t, pre_p);
4545
    }
4546
  else
4547
    boundary = align;
4548
 
4549
  /* If the actual alignment is less than the alignment of the type,
4550
     adjust the type accordingly so that we don't assume strict alignment
4551
     when dereferencing the pointer.  */
4552
  boundary *= BITS_PER_UNIT;
4553
  if (boundary < TYPE_ALIGN (type))
4554
    {
4555
      type = build_variant_type_copy (type);
4556
      TYPE_ALIGN (type) = boundary;
4557
    }
4558
 
4559
  /* Compute the rounded size of the type.  */
4560
  type_size = size_in_bytes (type);
4561
  rounded_size = round_up (type_size, align);
4562
 
4563
  /* Reduce rounded_size so it's sharable with the postqueue.  */
4564
  gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4565
 
4566
  /* Get AP.  */
4567
  addr = valist_tmp;
4568
  if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4569
    {
4570
      /* Small args are padded downward.  */
4571
      t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4572
                       rounded_size, size_int (align));
4573
      t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4574
                       size_binop (MINUS_EXPR, rounded_size, type_size));
4575
      addr = fold_build2 (POINTER_PLUS_EXPR,
4576
                          TREE_TYPE (addr), addr, t);
4577
    }
4578
 
4579
  /* Compute new value for AP.  */
4580
  t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4581
  t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4582
  gimplify_and_add (t, pre_p);
4583
 
4584
  addr = fold_convert (build_pointer_type (type), addr);
4585
 
4586
  if (indirect)
4587
    addr = build_va_arg_indirect_ref (addr);
4588
 
4589
  return build_va_arg_indirect_ref (addr);
4590
}
4591
 
4592
/* Build an indirect-ref expression over the given TREE, which represents a
4593
   piece of a va_arg() expansion.  */
4594
tree
4595
build_va_arg_indirect_ref (tree addr)
4596
{
4597
  addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4598
 
4599
  if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF.  */
4600
    mf_mark (addr);
4601
 
4602
  return addr;
4603
}
4604
 
4605
/* Return a dummy expression of type TYPE in order to keep going after an
4606
   error.  */
4607
 
4608
static tree
4609
dummy_object (tree type)
4610
{
4611
  tree t = build_int_cst (build_pointer_type (type), 0);
4612
  return build1 (INDIRECT_REF, type, t);
4613
}
4614
 
4615
/* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4616
   builtin function, but a very special sort of operator.  */
4617
 
4618
enum gimplify_status
4619
gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4620
{
4621
  tree promoted_type, have_va_type;
4622
  tree valist = TREE_OPERAND (*expr_p, 0);
4623
  tree type = TREE_TYPE (*expr_p);
4624
  tree t;
4625
  location_t loc = EXPR_LOCATION (*expr_p);
4626
 
4627
  /* Verify that valist is of the proper type.  */
4628
  have_va_type = TREE_TYPE (valist);
4629
  if (have_va_type == error_mark_node)
4630
    return GS_ERROR;
4631
  have_va_type = targetm.canonical_va_list_type (have_va_type);
4632
 
4633
  if (have_va_type == NULL_TREE)
4634
    {
4635
      error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4636
      return GS_ERROR;
4637
    }
4638
 
4639
  /* Generate a diagnostic for requesting data of a type that cannot
4640
     be passed through `...' due to type promotion at the call site.  */
4641
  if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4642
           != type)
4643
    {
4644
      static bool gave_help;
4645
      bool warned;
4646
 
4647
      /* Unfortunately, this is merely undefined, rather than a constraint
4648
         violation, so we cannot make this an error.  If this call is never
4649
         executed, the program is still strictly conforming.  */
4650
      warned = warning_at (loc, 0,
4651
                           "%qT is promoted to %qT when passed through %<...%>",
4652
                           type, promoted_type);
4653
      if (!gave_help && warned)
4654
        {
4655
          gave_help = true;
4656
          inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4657
                  promoted_type, type);
4658
        }
4659
 
4660
      /* We can, however, treat "undefined" any way we please.
4661
         Call abort to encourage the user to fix the program.  */
4662
      if (warned)
4663
        inform (loc, "if this code is reached, the program will abort");
4664
      /* Before the abort, allow the evaluation of the va_list
4665
         expression to exit or longjmp.  */
4666
      gimplify_and_add (valist, pre_p);
4667
      t = build_call_expr_loc (loc,
4668
                               implicit_built_in_decls[BUILT_IN_TRAP], 0);
4669
      gimplify_and_add (t, pre_p);
4670
 
4671
      /* This is dead code, but go ahead and finish so that the
4672
         mode of the result comes out right.  */
4673
      *expr_p = dummy_object (type);
4674
      return GS_ALL_DONE;
4675
    }
4676
  else
4677
    {
4678
      /* Make it easier for the backends by protecting the valist argument
4679
         from multiple evaluations.  */
4680
      if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4681
        {
4682
          /* For this case, the backends will be expecting a pointer to
4683
             TREE_TYPE (abi), but it's possible we've
4684
             actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4685
             So fix it.  */
4686
          if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4687
            {
4688
              tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4689
              valist = fold_convert_loc (loc, p1,
4690
                                         build_fold_addr_expr_loc (loc, valist));
4691
            }
4692
 
4693
          gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4694
        }
4695
      else
4696
        gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4697
 
4698
      if (!targetm.gimplify_va_arg_expr)
4699
        /* FIXME: Once most targets are converted we should merely
4700
           assert this is non-null.  */
4701
        return GS_ALL_DONE;
4702
 
4703
      *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4704
      return GS_OK;
4705
    }
4706
}
4707
 
4708
/* Expand EXP, a call to __builtin_va_end.  */
4709
 
4710
static rtx
4711
expand_builtin_va_end (tree exp)
4712
{
4713
  tree valist = CALL_EXPR_ARG (exp, 0);
4714
 
4715
  /* Evaluate for side effects, if needed.  I hate macros that don't
4716
     do that.  */
4717
  if (TREE_SIDE_EFFECTS (valist))
4718
    expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4719
 
4720
  return const0_rtx;
4721
}
4722
 
4723
/* Expand EXP, a call to __builtin_va_copy.  We do this as a
4724
   builtin rather than just as an assignment in stdarg.h because of the
4725
   nastiness of array-type va_list types.  */
4726
 
4727
static rtx
4728
expand_builtin_va_copy (tree exp)
4729
{
4730
  tree dst, src, t;
4731
  location_t loc = EXPR_LOCATION (exp);
4732
 
4733
  dst = CALL_EXPR_ARG (exp, 0);
4734
  src = CALL_EXPR_ARG (exp, 1);
4735
 
4736
  dst = stabilize_va_list_loc (loc, dst, 1);
4737
  src = stabilize_va_list_loc (loc, src, 0);
4738
 
4739
  gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4740
 
4741
  if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4742
    {
4743
      t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4744
      TREE_SIDE_EFFECTS (t) = 1;
4745
      expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4746
    }
4747
  else
4748
    {
4749
      rtx dstb, srcb, size;
4750
 
4751
      /* Evaluate to pointers.  */
4752
      dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4753
      srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4754
      size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4755
                  NULL_RTX, VOIDmode, EXPAND_NORMAL);
4756
 
4757
      dstb = convert_memory_address (Pmode, dstb);
4758
      srcb = convert_memory_address (Pmode, srcb);
4759
 
4760
      /* "Dereference" to BLKmode memories.  */
4761
      dstb = gen_rtx_MEM (BLKmode, dstb);
4762
      set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4763
      set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4764
      srcb = gen_rtx_MEM (BLKmode, srcb);
4765
      set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4766
      set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4767
 
4768
      /* Copy.  */
4769
      emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4770
    }
4771
 
4772
  return const0_rtx;
4773
}
4774
 
4775
/* Expand a call to one of the builtin functions __builtin_frame_address or
4776
   __builtin_return_address.  */
4777
 
4778
static rtx
4779
expand_builtin_frame_address (tree fndecl, tree exp)
4780
{
4781
  /* The argument must be a nonnegative integer constant.
4782
     It counts the number of frames to scan up the stack.
4783
     The value is the return address saved in that frame.  */
4784
  if (call_expr_nargs (exp) == 0)
4785
    /* Warning about missing arg was already issued.  */
4786
    return const0_rtx;
4787
  else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4788
    {
4789
      if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4790
        error ("invalid argument to %<__builtin_frame_address%>");
4791
      else
4792
        error ("invalid argument to %<__builtin_return_address%>");
4793
      return const0_rtx;
4794
    }
4795
  else
4796
    {
4797
      rtx tem
4798
        = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4799
                                      tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4800
 
4801
      /* Some ports cannot access arbitrary stack frames.  */
4802
      if (tem == NULL)
4803
        {
4804
          if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4805
            warning (0, "unsupported argument to %<__builtin_frame_address%>");
4806
          else
4807
            warning (0, "unsupported argument to %<__builtin_return_address%>");
4808
          return const0_rtx;
4809
        }
4810
 
4811
      /* For __builtin_frame_address, return what we've got.  */
4812
      if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4813
        return tem;
4814
 
4815
      if (!REG_P (tem)
4816
          && ! CONSTANT_P (tem))
4817
        tem = copy_to_mode_reg (Pmode, tem);
4818
      return tem;
4819
    }
4820
}
4821
 
4822
/* Expand EXP, a call to the alloca builtin.  Return NULL_RTX if
4823
   we failed and the caller should emit a normal call, otherwise try to get
4824
   the result in TARGET, if convenient.  */
4825
 
4826
static rtx
4827
expand_builtin_alloca (tree exp, rtx target)
4828
{
4829
  rtx op0;
4830
  rtx result;
4831
 
4832
  /* Emit normal call if marked not-inlineable.  */
4833
  if (CALL_CANNOT_INLINE_P (exp))
4834
    return NULL_RTX;
4835
 
4836
  if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4837
    return NULL_RTX;
4838
 
4839
  /* Compute the argument.  */
4840
  op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4841
 
4842
  /* Allocate the desired space.  */
4843
  result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4844
  result = convert_memory_address (ptr_mode, result);
4845
 
4846
  return result;
4847
}
4848
 
4849
/* Expand a call to a bswap builtin with argument ARG0.  MODE
4850
   is the mode to expand with.  */
4851
 
4852
static rtx
4853
expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4854
{
4855
  enum machine_mode mode;
4856
  tree arg;
4857
  rtx op0;
4858
 
4859
  if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4860
    return NULL_RTX;
4861
 
4862
  arg = CALL_EXPR_ARG (exp, 0);
4863
  mode = TYPE_MODE (TREE_TYPE (arg));
4864
  op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4865
 
4866
  target = expand_unop (mode, bswap_optab, op0, target, 1);
4867
 
4868
  gcc_assert (target);
4869
 
4870
  return convert_to_mode (mode, target, 0);
4871
}
4872
 
4873
/* Expand a call to a unary builtin in EXP.
4874
   Return NULL_RTX if a normal call should be emitted rather than expanding the
4875
   function in-line.  If convenient, the result should be placed in TARGET.
4876
   SUBTARGET may be used as the target for computing one of EXP's operands.  */
4877
 
4878
static rtx
4879
expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4880
                     rtx subtarget, optab op_optab)
4881
{
4882
  rtx op0;
4883
 
4884
  if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4885
    return NULL_RTX;
4886
 
4887
  /* Compute the argument.  */
4888
  op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4889
                     VOIDmode, EXPAND_NORMAL);
4890
  /* Compute op, into TARGET if possible.
4891
     Set TARGET to wherever the result comes back.  */
4892
  target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4893
                        op_optab, op0, target, 1);
4894
  gcc_assert (target);
4895
 
4896
  return convert_to_mode (target_mode, target, 0);
4897
}
4898
 
4899
/* Expand a call to __builtin_expect.  We just return our argument
4900
   as the builtin_expect semantic should've been already executed by
4901
   tree branch prediction pass. */
4902
 
4903
static rtx
4904
expand_builtin_expect (tree exp, rtx target)
4905
{
4906
  tree arg;
4907
 
4908
  if (call_expr_nargs (exp) < 2)
4909
    return const0_rtx;
4910
  arg = CALL_EXPR_ARG (exp, 0);
4911
 
4912
  target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4913
  /* When guessing was done, the hints should be already stripped away.  */
4914
  gcc_assert (!flag_guess_branch_prob
4915
              || optimize == 0 || errorcount || sorrycount);
4916
  return target;
4917
}
4918
 
4919
void
4920
expand_builtin_trap (void)
4921
{
4922
#ifdef HAVE_trap
4923
  if (HAVE_trap)
4924
    emit_insn (gen_trap ());
4925
  else
4926
#endif
4927
    emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4928
  emit_barrier ();
4929
}
4930
 
4931
/* Expand a call to __builtin_unreachable.  We do nothing except emit
4932
   a barrier saying that control flow will not pass here.
4933
 
4934
   It is the responsibility of the program being compiled to ensure
4935
   that control flow does never reach __builtin_unreachable.  */
4936
static void
4937
expand_builtin_unreachable (void)
4938
{
4939
  emit_barrier ();
4940
}
4941
 
4942
/* Expand EXP, a call to fabs, fabsf or fabsl.
4943
   Return NULL_RTX if a normal call should be emitted rather than expanding
4944
   the function inline.  If convenient, the result should be placed
4945
   in TARGET.  SUBTARGET may be used as the target for computing
4946
   the operand.  */
4947
 
4948
static rtx
4949
expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4950
{
4951
  enum machine_mode mode;
4952
  tree arg;
4953
  rtx op0;
4954
 
4955
  if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4956
    return NULL_RTX;
4957
 
4958
  arg = CALL_EXPR_ARG (exp, 0);
4959
  CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4960
  mode = TYPE_MODE (TREE_TYPE (arg));
4961
  op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4962
  return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4963
}
4964
 
4965
/* Expand EXP, a call to copysign, copysignf, or copysignl.
4966
   Return NULL is a normal call should be emitted rather than expanding the
4967
   function inline.  If convenient, the result should be placed in TARGET.
4968
   SUBTARGET may be used as the target for computing the operand.  */
4969
 
4970
static rtx
4971
expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4972
{
4973
  rtx op0, op1;
4974
  tree arg;
4975
 
4976
  if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4977
    return NULL_RTX;
4978
 
4979
  arg = CALL_EXPR_ARG (exp, 0);
4980
  op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4981
 
4982
  arg = CALL_EXPR_ARG (exp, 1);
4983
  op1 = expand_normal (arg);
4984
 
4985
  return expand_copysign (op0, op1, target);
4986
}
4987
 
4988
/* Create a new constant string literal and return a char* pointer to it.
4989
   The STRING_CST value is the LEN characters at STR.  */
4990
tree
4991
build_string_literal (int len, const char *str)
4992
{
4993
  tree t, elem, index, type;
4994
 
4995
  t = build_string (len, str);
4996
  elem = build_type_variant (char_type_node, 1, 0);
4997
  index = build_index_type (size_int (len - 1));
4998
  type = build_array_type (elem, index);
4999
  TREE_TYPE (t) = type;
5000
  TREE_CONSTANT (t) = 1;
5001
  TREE_READONLY (t) = 1;
5002
  TREE_STATIC (t) = 1;
5003
 
5004
  type = build_pointer_type (elem);
5005
  t = build1 (ADDR_EXPR, type,
5006
              build4 (ARRAY_REF, elem,
5007
                      t, integer_zero_node, NULL_TREE, NULL_TREE));
5008
  return t;
5009
}
5010
 
5011
/* Expand a call to either the entry or exit function profiler.  */
5012
 
5013
static rtx
5014
expand_builtin_profile_func (bool exitp)
5015
{
5016
  rtx this_rtx, which;
5017
 
5018
  this_rtx = DECL_RTL (current_function_decl);
5019
  gcc_assert (MEM_P (this_rtx));
5020
  this_rtx = XEXP (this_rtx, 0);
5021
 
5022
  if (exitp)
5023
    which = profile_function_exit_libfunc;
5024
  else
5025
    which = profile_function_entry_libfunc;
5026
 
5027
  emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5028
                     expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5029
                                                 0),
5030
                     Pmode);
5031
 
5032
  return const0_rtx;
5033
}
5034
 
5035
/* Expand a call to __builtin___clear_cache.  */
5036
 
5037
static rtx
5038
expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5039
{
5040
#ifndef HAVE_clear_cache
5041
#ifdef CLEAR_INSN_CACHE
5042
  /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5043
     does something.  Just do the default expansion to a call to
5044
     __clear_cache().  */
5045
  return NULL_RTX;
5046
#else
5047
  /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5048
     does nothing.  There is no need to call it.  Do nothing.  */
5049
  return const0_rtx;
5050
#endif /* CLEAR_INSN_CACHE */
5051
#else
5052
  /* We have a "clear_cache" insn, and it will handle everything.  */
5053
  tree begin, end;
5054
  rtx begin_rtx, end_rtx;
5055
  enum insn_code icode;
5056
 
5057
  /* We must not expand to a library call.  If we did, any
5058
     fallback library function in libgcc that might contain a call to
5059
     __builtin___clear_cache() would recurse infinitely.  */
5060
  if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5061
    {
5062
      error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5063
      return const0_rtx;
5064
    }
5065
 
5066
  if (HAVE_clear_cache)
5067
    {
5068
      icode = CODE_FOR_clear_cache;
5069
 
5070
      begin = CALL_EXPR_ARG (exp, 0);
5071
      begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5072
      begin_rtx = convert_memory_address (Pmode, begin_rtx);
5073
      if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5074
        begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5075
 
5076
      end = CALL_EXPR_ARG (exp, 1);
5077
      end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5078
      end_rtx = convert_memory_address (Pmode, end_rtx);
5079
      if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5080
        end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5081
 
5082
      emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5083
    }
5084
  return const0_rtx;
5085
#endif /* HAVE_clear_cache */
5086
}
5087
 
5088
/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT.  */
5089
 
5090
static rtx
5091
round_trampoline_addr (rtx tramp)
5092
{
5093
  rtx temp, addend, mask;
5094
 
5095
  /* If we don't need too much alignment, we'll have been guaranteed
5096
     proper alignment by get_trampoline_type.  */
5097
  if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5098
    return tramp;
5099
 
5100
  /* Round address up to desired boundary.  */
5101
  temp = gen_reg_rtx (Pmode);
5102
  addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5103
  mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5104
 
5105
  temp  = expand_simple_binop (Pmode, PLUS, tramp, addend,
5106
                               temp, 0, OPTAB_LIB_WIDEN);
5107
  tramp = expand_simple_binop (Pmode, AND, temp, mask,
5108
                               temp, 0, OPTAB_LIB_WIDEN);
5109
 
5110
  return tramp;
5111
}
5112
 
5113
static rtx
5114
expand_builtin_init_trampoline (tree exp)
5115
{
5116
  tree t_tramp, t_func, t_chain;
5117
  rtx m_tramp, r_tramp, r_chain, tmp;
5118
 
5119
  if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5120
                         POINTER_TYPE, VOID_TYPE))
5121
    return NULL_RTX;
5122
 
5123
  t_tramp = CALL_EXPR_ARG (exp, 0);
5124
  t_func = CALL_EXPR_ARG (exp, 1);
5125
  t_chain = CALL_EXPR_ARG (exp, 2);
5126
 
5127
  r_tramp = expand_normal (t_tramp);
5128
  m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5129
  MEM_NOTRAP_P (m_tramp) = 1;
5130
 
5131
  /* The TRAMP argument should be the address of a field within the
5132
     local function's FRAME decl.  Let's see if we can fill in the
5133
     to fill in the MEM_ATTRs for this memory.  */
5134
  if (TREE_CODE (t_tramp) == ADDR_EXPR)
5135
    set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5136
                                     true, 0);
5137
 
5138
  tmp = round_trampoline_addr (r_tramp);
5139
  if (tmp != r_tramp)
5140
    {
5141
      m_tramp = change_address (m_tramp, BLKmode, tmp);
5142
      set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5143
      set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5144
    }
5145
 
5146
  /* The FUNC argument should be the address of the nested function.
5147
     Extract the actual function decl to pass to the hook.  */
5148
  gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5149
  t_func = TREE_OPERAND (t_func, 0);
5150
  gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5151
 
5152
  r_chain = expand_normal (t_chain);
5153
 
5154
  /* Generate insns to initialize the trampoline.  */
5155
  targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5156
 
5157
  trampolines_created = 1;
5158
  return const0_rtx;
5159
}
5160
 
5161
static rtx
5162
expand_builtin_adjust_trampoline (tree exp)
5163
{
5164
  rtx tramp;
5165
 
5166
  if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5167
    return NULL_RTX;
5168
 
5169
  tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5170
  tramp = round_trampoline_addr (tramp);
5171
  if (targetm.calls.trampoline_adjust_address)
5172
    tramp = targetm.calls.trampoline_adjust_address (tramp);
5173
 
5174
  return tramp;
5175
}
5176
 
5177
/* Expand the call EXP to the built-in signbit, signbitf or signbitl
5178
   function.  The function first checks whether the back end provides
5179
   an insn to implement signbit for the respective mode.  If not, it
5180
   checks whether the floating point format of the value is such that
5181
   the sign bit can be extracted.  If that is not the case, the
5182
   function returns NULL_RTX to indicate that a normal call should be
5183
   emitted rather than expanding the function in-line.  EXP is the
5184
   expression that is a call to the builtin function; if convenient,
5185
   the result should be placed in TARGET.  */
5186
static rtx
5187
expand_builtin_signbit (tree exp, rtx target)
5188
{
5189
  const struct real_format *fmt;
5190
  enum machine_mode fmode, imode, rmode;
5191
  HOST_WIDE_INT hi, lo;
5192
  tree arg;
5193
  int word, bitpos;
5194
  enum insn_code icode;
5195
  rtx temp;
5196
  location_t loc = EXPR_LOCATION (exp);
5197
 
5198
  if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5199
    return NULL_RTX;
5200
 
5201
  arg = CALL_EXPR_ARG (exp, 0);
5202
  fmode = TYPE_MODE (TREE_TYPE (arg));
5203
  rmode = TYPE_MODE (TREE_TYPE (exp));
5204
  fmt = REAL_MODE_FORMAT (fmode);
5205
 
5206
  arg = builtin_save_expr (arg);
5207
 
5208
  /* Expand the argument yielding a RTX expression. */
5209
  temp = expand_normal (arg);
5210
 
5211
  /* Check if the back end provides an insn that handles signbit for the
5212
     argument's mode. */
5213
  icode = signbit_optab->handlers [(int) fmode].insn_code;
5214
  if (icode != CODE_FOR_nothing)
5215
    {
5216
      rtx last = get_last_insn ();
5217
      target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5218
      if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5219
        return target;
5220
      delete_insns_since (last);
5221
    }
5222
 
5223
  /* For floating point formats without a sign bit, implement signbit
5224
     as "ARG < 0.0".  */
5225
  bitpos = fmt->signbit_ro;
5226
  if (bitpos < 0)
5227
  {
5228
    /* But we can't do this if the format supports signed zero.  */
5229
    if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5230
      return NULL_RTX;
5231
 
5232
    arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5233
                       build_real (TREE_TYPE (arg), dconst0));
5234
    return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5235
  }
5236
 
5237
  if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5238
    {
5239
      imode = int_mode_for_mode (fmode);
5240
      if (imode == BLKmode)
5241
        return NULL_RTX;
5242
      temp = gen_lowpart (imode, temp);
5243
    }
5244
  else
5245
    {
5246
      imode = word_mode;
5247
      /* Handle targets with different FP word orders.  */
5248
      if (FLOAT_WORDS_BIG_ENDIAN)
5249
        word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5250
      else
5251
        word = bitpos / BITS_PER_WORD;
5252
      temp = operand_subword_force (temp, word, fmode);
5253
      bitpos = bitpos % BITS_PER_WORD;
5254
    }
5255
 
5256
  /* Force the intermediate word_mode (or narrower) result into a
5257
     register.  This avoids attempting to create paradoxical SUBREGs
5258
     of floating point modes below.  */
5259
  temp = force_reg (imode, temp);
5260
 
5261
  /* If the bitpos is within the "result mode" lowpart, the operation
5262
     can be implement with a single bitwise AND.  Otherwise, we need
5263
     a right shift and an AND.  */
5264
 
5265
  if (bitpos < GET_MODE_BITSIZE (rmode))
5266
    {
5267
      if (bitpos < HOST_BITS_PER_WIDE_INT)
5268
        {
5269
          hi = 0;
5270
          lo = (HOST_WIDE_INT) 1 << bitpos;
5271
        }
5272
      else
5273
        {
5274
          hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5275
          lo = 0;
5276
        }
5277
 
5278
      if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5279
        temp = gen_lowpart (rmode, temp);
5280
      temp = expand_binop (rmode, and_optab, temp,
5281
                           immed_double_const (lo, hi, rmode),
5282
                           NULL_RTX, 1, OPTAB_LIB_WIDEN);
5283
    }
5284
  else
5285
    {
5286
      /* Perform a logical right shift to place the signbit in the least
5287
         significant bit, then truncate the result to the desired mode
5288
         and mask just this bit.  */
5289
      temp = expand_shift (RSHIFT_EXPR, imode, temp,
5290
                           build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5291
      temp = gen_lowpart (rmode, temp);
5292
      temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5293
                           NULL_RTX, 1, OPTAB_LIB_WIDEN);
5294
    }
5295
 
5296
  return temp;
5297
}
5298
 
5299
/* Expand fork or exec calls.  TARGET is the desired target of the
5300
   call.  EXP is the call. FN is the
5301
   identificator of the actual function.  IGNORE is nonzero if the
5302
   value is to be ignored.  */
5303
 
5304
static rtx
5305
expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5306
{
5307
  tree id, decl;
5308
  tree call;
5309
 
5310
  /* If we are not profiling, just call the function.  */
5311
  if (!profile_arc_flag)
5312
    return NULL_RTX;
5313
 
5314
  /* Otherwise call the wrapper.  This should be equivalent for the rest of
5315
     compiler, so the code does not diverge, and the wrapper may run the
5316
     code necessary for keeping the profiling sane.  */
5317
 
5318
  switch (DECL_FUNCTION_CODE (fn))
5319
    {
5320
    case BUILT_IN_FORK:
5321
      id = get_identifier ("__gcov_fork");
5322
      break;
5323
 
5324
    case BUILT_IN_EXECL:
5325
      id = get_identifier ("__gcov_execl");
5326
      break;
5327
 
5328
    case BUILT_IN_EXECV:
5329
      id = get_identifier ("__gcov_execv");
5330
      break;
5331
 
5332
    case BUILT_IN_EXECLP:
5333
      id = get_identifier ("__gcov_execlp");
5334
      break;
5335
 
5336
    case BUILT_IN_EXECLE:
5337
      id = get_identifier ("__gcov_execle");
5338
      break;
5339
 
5340
    case BUILT_IN_EXECVP:
5341
      id = get_identifier ("__gcov_execvp");
5342
      break;
5343
 
5344
    case BUILT_IN_EXECVE:
5345
      id = get_identifier ("__gcov_execve");
5346
      break;
5347
 
5348
    default:
5349
      gcc_unreachable ();
5350
    }
5351
 
5352
  decl = build_decl (DECL_SOURCE_LOCATION (fn),
5353
                     FUNCTION_DECL, id, TREE_TYPE (fn));
5354
  DECL_EXTERNAL (decl) = 1;
5355
  TREE_PUBLIC (decl) = 1;
5356
  DECL_ARTIFICIAL (decl) = 1;
5357
  TREE_NOTHROW (decl) = 1;
5358
  DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5359
  DECL_VISIBILITY_SPECIFIED (decl) = 1;
5360
  call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5361
  return expand_call (call, target, ignore);
5362
 }
5363
 
5364
 
5365
 
5366
/* Reconstitute a mode for a __sync intrinsic operation.  Since the type of
5367
   the pointer in these functions is void*, the tree optimizers may remove
5368
   casts.  The mode computed in expand_builtin isn't reliable either, due
5369
   to __sync_bool_compare_and_swap.
5370
 
5371
   FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5372
   group of builtins.  This gives us log2 of the mode size.  */
5373
 
5374
static inline enum machine_mode
5375
get_builtin_sync_mode (int fcode_diff)
5376
{
5377
  /* The size is not negotiable, so ask not to get BLKmode in return
5378
     if the target indicates that a smaller size would be better.  */
5379
  return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5380
}
5381
 
5382
/* Expand the memory expression LOC and return the appropriate memory operand
5383
   for the builtin_sync operations.  */
5384
 
5385
static rtx
5386
get_builtin_sync_mem (tree loc, enum machine_mode mode)
5387
{
5388
  rtx addr, mem;
5389
 
5390
  addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5391
  addr = convert_memory_address (Pmode, addr);
5392
 
5393
  /* Note that we explicitly do not want any alias information for this
5394
     memory, so that we kill all other live memories.  Otherwise we don't
5395
     satisfy the full barrier semantics of the intrinsic.  */
5396
  mem = validize_mem (gen_rtx_MEM (mode, addr));
5397
 
5398
  set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5399
  set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5400
  MEM_VOLATILE_P (mem) = 1;
5401
 
5402
  return mem;
5403
}
5404
 
5405
/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5406
   EXP is the CALL_EXPR.  CODE is the rtx code
5407
   that corresponds to the arithmetic or logical operation from the name;
5408
   an exception here is that NOT actually means NAND.  TARGET is an optional
5409
   place for us to store the results; AFTER is true if this is the
5410
   fetch_and_xxx form.  IGNORE is true if we don't actually care about
5411
   the result of the operation at all.  */
5412
 
5413
static rtx
5414
expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5415
                               enum rtx_code code, bool after,
5416
                               rtx target, bool ignore)
5417
{
5418
  rtx val, mem;
5419
  enum machine_mode old_mode;
5420
  location_t loc = EXPR_LOCATION (exp);
5421
 
5422
  if (code == NOT && warn_sync_nand)
5423
    {
5424
      tree fndecl = get_callee_fndecl (exp);
5425
      enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5426
 
5427
      static bool warned_f_a_n, warned_n_a_f;
5428
 
5429
      switch (fcode)
5430
        {
5431
        case BUILT_IN_FETCH_AND_NAND_1:
5432
        case BUILT_IN_FETCH_AND_NAND_2:
5433
        case BUILT_IN_FETCH_AND_NAND_4:
5434
        case BUILT_IN_FETCH_AND_NAND_8:
5435
        case BUILT_IN_FETCH_AND_NAND_16:
5436
 
5437
          if (warned_f_a_n)
5438
            break;
5439
 
5440
          fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5441
          inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5442
          warned_f_a_n = true;
5443
          break;
5444
 
5445
        case BUILT_IN_NAND_AND_FETCH_1:
5446
        case BUILT_IN_NAND_AND_FETCH_2:
5447
        case BUILT_IN_NAND_AND_FETCH_4:
5448
        case BUILT_IN_NAND_AND_FETCH_8:
5449
        case BUILT_IN_NAND_AND_FETCH_16:
5450
 
5451
          if (warned_n_a_f)
5452
            break;
5453
 
5454
          fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5455
          inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5456
          warned_n_a_f = true;
5457
          break;
5458
 
5459
        default:
5460
          gcc_unreachable ();
5461
        }
5462
    }
5463
 
5464
  /* Expand the operands.  */
5465
  mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5466
 
5467
  val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5468
  /* If VAL is promoted to a wider mode, convert it back to MODE.  Take care
5469
     of CONST_INTs, where we know the old_mode only from the call argument.  */
5470
  old_mode = GET_MODE (val);
5471
  if (old_mode == VOIDmode)
5472
    old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5473
  val = convert_modes (mode, old_mode, val, 1);
5474
 
5475
  if (ignore)
5476
    return expand_sync_operation (mem, val, code);
5477
  else
5478
    return expand_sync_fetch_operation (mem, val, code, after, target);
5479
}
5480
 
5481
/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5482
   intrinsics. EXP is the CALL_EXPR.  IS_BOOL is
5483
   true if this is the boolean form.  TARGET is a place for us to store the
5484
   results; this is NOT optional if IS_BOOL is true.  */
5485
 
5486
static rtx
5487
expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5488
                                 bool is_bool, rtx target)
5489
{
5490
  rtx old_val, new_val, mem;
5491
  enum machine_mode old_mode;
5492
 
5493
  /* Expand the operands.  */
5494
  mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5495
 
5496
 
5497
  old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5498
                         mode, EXPAND_NORMAL);
5499
  /* If VAL is promoted to a wider mode, convert it back to MODE.  Take care
5500
     of CONST_INTs, where we know the old_mode only from the call argument.  */
5501
  old_mode = GET_MODE (old_val);
5502
  if (old_mode == VOIDmode)
5503
    old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5504
  old_val = convert_modes (mode, old_mode, old_val, 1);
5505
 
5506
  new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5507
                         mode, EXPAND_NORMAL);
5508
  /* If VAL is promoted to a wider mode, convert it back to MODE.  Take care
5509
     of CONST_INTs, where we know the old_mode only from the call argument.  */
5510
  old_mode = GET_MODE (new_val);
5511
  if (old_mode == VOIDmode)
5512
    old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5513
  new_val = convert_modes (mode, old_mode, new_val, 1);
5514
 
5515
  if (is_bool)
5516
    return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5517
  else
5518
    return expand_val_compare_and_swap (mem, old_val, new_val, target);
5519
}
5520
 
5521
/* Expand the __sync_lock_test_and_set intrinsic.  Note that the most
5522
   general form is actually an atomic exchange, and some targets only
5523
   support a reduced form with the second argument being a constant 1.
5524
   EXP is the CALL_EXPR; TARGET is an optional place for us to store
5525
   the results.  */
5526
 
5527
static rtx
5528
expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5529
                                  rtx target)
5530
{
5531
  rtx val, mem;
5532
  enum machine_mode old_mode;
5533
 
5534
  /* Expand the operands.  */
5535
  mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5536
  val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5537
  /* If VAL is promoted to a wider mode, convert it back to MODE.  Take care
5538
     of CONST_INTs, where we know the old_mode only from the call argument.  */
5539
  old_mode = GET_MODE (val);
5540
  if (old_mode == VOIDmode)
5541
    old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5542
  val = convert_modes (mode, old_mode, val, 1);
5543
 
5544
  return expand_sync_lock_test_and_set (mem, val, target);
5545
}
5546
 
5547
/* Expand the __sync_synchronize intrinsic.  */
5548
 
5549
static void
5550
expand_builtin_synchronize (void)
5551
{
5552
  gimple x;
5553
  VEC (tree, gc) *v_clobbers;
5554
 
5555
#ifdef HAVE_memory_barrier
5556
  if (HAVE_memory_barrier)
5557
    {
5558
      emit_insn (gen_memory_barrier ());
5559
      return;
5560
    }
5561
#endif
5562
 
5563
  if (synchronize_libfunc != NULL_RTX)
5564
    {
5565
      emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5566
      return;
5567
    }
5568
 
5569
  /* If no explicit memory barrier instruction is available, create an
5570
     empty asm stmt with a memory clobber.  */
5571
  v_clobbers = VEC_alloc (tree, gc, 1);
5572
  VEC_quick_push (tree, v_clobbers,
5573
                  tree_cons (NULL, build_string (6, "memory"), NULL));
5574
  x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5575
  gimple_asm_set_volatile (x, true);
5576
  expand_asm_stmt (x);
5577
}
5578
 
5579
/* Expand the __sync_lock_release intrinsic.  EXP is the CALL_EXPR.  */
5580
 
5581
static void
5582
expand_builtin_lock_release (enum machine_mode mode, tree exp)
5583
{
5584
  enum insn_code icode;
5585
  rtx mem, insn;
5586
  rtx val = const0_rtx;
5587
 
5588
  /* Expand the operands.  */
5589
  mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5590
 
5591
  /* If there is an explicit operation in the md file, use it.  */
5592
  icode = sync_lock_release[mode];
5593
  if (icode != CODE_FOR_nothing)
5594
    {
5595
      if (!insn_data[icode].operand[1].predicate (val, mode))
5596
        val = force_reg (mode, val);
5597
 
5598
      insn = GEN_FCN (icode) (mem, val);
5599
      if (insn)
5600
        {
5601
          emit_insn (insn);
5602
          return;
5603
        }
5604
    }
5605
 
5606
  /* Otherwise we can implement this operation by emitting a barrier
5607
     followed by a store of zero.  */
5608
  expand_builtin_synchronize ();
5609
  emit_move_insn (mem, val);
5610
}
5611
 
5612
/* Expand an expression EXP that calls a built-in function,
5613
   with result going to TARGET if that's convenient
5614
   (and in mode MODE if that's convenient).
5615
   SUBTARGET may be used as the target for computing one of EXP's operands.
5616
   IGNORE is nonzero if the value is to be ignored.  */
5617
 
5618
rtx
5619
expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5620
                int ignore)
5621
{
5622
  tree fndecl = get_callee_fndecl (exp);
5623
  enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5624
  enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5625
 
5626
  if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5627
    return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5628
 
5629
  /* When not optimizing, generate calls to library functions for a certain
5630
     set of builtins.  */
5631
  if (!optimize
5632
      && !called_as_built_in (fndecl)
5633
      && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5634
      && fcode != BUILT_IN_ALLOCA
5635
      && fcode != BUILT_IN_FREE)
5636
    return expand_call (exp, target, ignore);
5637
 
5638
  /* The built-in function expanders test for target == const0_rtx
5639
     to determine whether the function's result will be ignored.  */
5640
  if (ignore)
5641
    target = const0_rtx;
5642
 
5643
  /* If the result of a pure or const built-in function is ignored, and
5644
     none of its arguments are volatile, we can avoid expanding the
5645
     built-in call and just evaluate the arguments for side-effects.  */
5646
  if (target == const0_rtx
5647
      && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5648
    {
5649
      bool volatilep = false;
5650
      tree arg;
5651
      call_expr_arg_iterator iter;
5652
 
5653
      FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5654
        if (TREE_THIS_VOLATILE (arg))
5655
          {
5656
            volatilep = true;
5657
            break;
5658
          }
5659
 
5660
      if (! volatilep)
5661
        {
5662
          FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5663
            expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5664
          return const0_rtx;
5665
        }
5666
    }
5667
 
5668
  switch (fcode)
5669
    {
5670
    CASE_FLT_FN (BUILT_IN_FABS):
5671
      target = expand_builtin_fabs (exp, target, subtarget);
5672
      if (target)
5673
        return target;
5674
      break;
5675
 
5676
    CASE_FLT_FN (BUILT_IN_COPYSIGN):
5677
      target = expand_builtin_copysign (exp, target, subtarget);
5678
      if (target)
5679
        return target;
5680
      break;
5681
 
5682
      /* Just do a normal library call if we were unable to fold
5683
         the values.  */
5684
    CASE_FLT_FN (BUILT_IN_CABS):
5685
      break;
5686
 
5687
    CASE_FLT_FN (BUILT_IN_EXP):
5688
    CASE_FLT_FN (BUILT_IN_EXP10):
5689
    CASE_FLT_FN (BUILT_IN_POW10):
5690
    CASE_FLT_FN (BUILT_IN_EXP2):
5691
    CASE_FLT_FN (BUILT_IN_EXPM1):
5692
    CASE_FLT_FN (BUILT_IN_LOGB):
5693
    CASE_FLT_FN (BUILT_IN_LOG):
5694
    CASE_FLT_FN (BUILT_IN_LOG10):
5695
    CASE_FLT_FN (BUILT_IN_LOG2):
5696
    CASE_FLT_FN (BUILT_IN_LOG1P):
5697
    CASE_FLT_FN (BUILT_IN_TAN):
5698
    CASE_FLT_FN (BUILT_IN_ASIN):
5699
    CASE_FLT_FN (BUILT_IN_ACOS):
5700
    CASE_FLT_FN (BUILT_IN_ATAN):
5701
    CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5702
      /* Treat these like sqrt only if unsafe math optimizations are allowed,
5703
         because of possible accuracy problems.  */
5704
      if (! flag_unsafe_math_optimizations)
5705
        break;
5706
    CASE_FLT_FN (BUILT_IN_SQRT):
5707
    CASE_FLT_FN (BUILT_IN_FLOOR):
5708
    CASE_FLT_FN (BUILT_IN_CEIL):
5709
    CASE_FLT_FN (BUILT_IN_TRUNC):
5710
    CASE_FLT_FN (BUILT_IN_ROUND):
5711
    CASE_FLT_FN (BUILT_IN_NEARBYINT):
5712
    CASE_FLT_FN (BUILT_IN_RINT):
5713
      target = expand_builtin_mathfn (exp, target, subtarget);
5714
      if (target)
5715
        return target;
5716
      break;
5717
 
5718
    CASE_FLT_FN (BUILT_IN_ILOGB):
5719
      if (! flag_unsafe_math_optimizations)
5720
        break;
5721
    CASE_FLT_FN (BUILT_IN_ISINF):
5722
    CASE_FLT_FN (BUILT_IN_FINITE):
5723
    case BUILT_IN_ISFINITE:
5724
    case BUILT_IN_ISNORMAL:
5725
      target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5726
      if (target)
5727
        return target;
5728
      break;
5729
 
5730
    CASE_FLT_FN (BUILT_IN_LCEIL):
5731
    CASE_FLT_FN (BUILT_IN_LLCEIL):
5732
    CASE_FLT_FN (BUILT_IN_LFLOOR):
5733
    CASE_FLT_FN (BUILT_IN_LLFLOOR):
5734
      target = expand_builtin_int_roundingfn (exp, target);
5735
      if (target)
5736
        return target;
5737
      break;
5738
 
5739
    CASE_FLT_FN (BUILT_IN_LRINT):
5740
    CASE_FLT_FN (BUILT_IN_LLRINT):
5741
    CASE_FLT_FN (BUILT_IN_LROUND):
5742
    CASE_FLT_FN (BUILT_IN_LLROUND):
5743
      target = expand_builtin_int_roundingfn_2 (exp, target);
5744
      if (target)
5745
        return target;
5746
      break;
5747
 
5748
    CASE_FLT_FN (BUILT_IN_POW):
5749
      target = expand_builtin_pow (exp, target, subtarget);
5750
      if (target)
5751
        return target;
5752
      break;
5753
 
5754
    CASE_FLT_FN (BUILT_IN_POWI):
5755
      target = expand_builtin_powi (exp, target, subtarget);
5756
      if (target)
5757
        return target;
5758
      break;
5759
 
5760
    CASE_FLT_FN (BUILT_IN_ATAN2):
5761
    CASE_FLT_FN (BUILT_IN_LDEXP):
5762
    CASE_FLT_FN (BUILT_IN_SCALB):
5763
    CASE_FLT_FN (BUILT_IN_SCALBN):
5764
    CASE_FLT_FN (BUILT_IN_SCALBLN):
5765
      if (! flag_unsafe_math_optimizations)
5766
        break;
5767
 
5768
    CASE_FLT_FN (BUILT_IN_FMOD):
5769
    CASE_FLT_FN (BUILT_IN_REMAINDER):
5770
    CASE_FLT_FN (BUILT_IN_DREM):
5771
      target = expand_builtin_mathfn_2 (exp, target, subtarget);
5772
      if (target)
5773
        return target;
5774
      break;
5775
 
5776
    CASE_FLT_FN (BUILT_IN_CEXPI):
5777
      target = expand_builtin_cexpi (exp, target, subtarget);
5778
      gcc_assert (target);
5779
      return target;
5780
 
5781
    CASE_FLT_FN (BUILT_IN_SIN):
5782
    CASE_FLT_FN (BUILT_IN_COS):
5783
      if (! flag_unsafe_math_optimizations)
5784
        break;
5785
      target = expand_builtin_mathfn_3 (exp, target, subtarget);
5786
      if (target)
5787
        return target;
5788
      break;
5789
 
5790
    CASE_FLT_FN (BUILT_IN_SINCOS):
5791
      if (! flag_unsafe_math_optimizations)
5792
        break;
5793
      target = expand_builtin_sincos (exp);
5794
      if (target)
5795
        return target;
5796
      break;
5797
 
5798
    case BUILT_IN_APPLY_ARGS:
5799
      return expand_builtin_apply_args ();
5800
 
5801
      /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5802
         FUNCTION with a copy of the parameters described by
5803
         ARGUMENTS, and ARGSIZE.  It returns a block of memory
5804
         allocated on the stack into which is stored all the registers
5805
         that might possibly be used for returning the result of a
5806
         function.  ARGUMENTS is the value returned by
5807
         __builtin_apply_args.  ARGSIZE is the number of bytes of
5808
         arguments that must be copied.  ??? How should this value be
5809
         computed?  We'll also need a safe worst case value for varargs
5810
         functions.  */
5811
    case BUILT_IN_APPLY:
5812
      if (!validate_arglist (exp, POINTER_TYPE,
5813
                             POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5814
          && !validate_arglist (exp, REFERENCE_TYPE,
5815
                                POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5816
        return const0_rtx;
5817
      else
5818
        {
5819
          rtx ops[3];
5820
 
5821
          ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5822
          ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5823
          ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5824
 
5825
          return expand_builtin_apply (ops[0], ops[1], ops[2]);
5826
        }
5827
 
5828
      /* __builtin_return (RESULT) causes the function to return the
5829
         value described by RESULT.  RESULT is address of the block of
5830
         memory returned by __builtin_apply.  */
5831
    case BUILT_IN_RETURN:
5832
      if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5833
        expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5834
      return const0_rtx;
5835
 
5836
    case BUILT_IN_SAVEREGS:
5837
      return expand_builtin_saveregs ();
5838
 
5839
    case BUILT_IN_ARGS_INFO:
5840
      return expand_builtin_args_info (exp);
5841
 
5842
    case BUILT_IN_VA_ARG_PACK:
5843
      /* All valid uses of __builtin_va_arg_pack () are removed during
5844
         inlining.  */
5845
      error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5846
      return const0_rtx;
5847
 
5848
    case BUILT_IN_VA_ARG_PACK_LEN:
5849
      /* All valid uses of __builtin_va_arg_pack_len () are removed during
5850
         inlining.  */
5851
      error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5852
      return const0_rtx;
5853
 
5854
      /* Return the address of the first anonymous stack arg.  */
5855
    case BUILT_IN_NEXT_ARG:
5856
      if (fold_builtin_next_arg (exp, false))
5857
        return const0_rtx;
5858
      return expand_builtin_next_arg ();
5859
 
5860
    case BUILT_IN_CLEAR_CACHE:
5861
      target = expand_builtin___clear_cache (exp);
5862
      if (target)
5863
        return target;
5864
      break;
5865
 
5866
    case BUILT_IN_CLASSIFY_TYPE:
5867
      return expand_builtin_classify_type (exp);
5868
 
5869
    case BUILT_IN_CONSTANT_P:
5870
      return const0_rtx;
5871
 
5872
    case BUILT_IN_FRAME_ADDRESS:
5873
    case BUILT_IN_RETURN_ADDRESS:
5874
      return expand_builtin_frame_address (fndecl, exp);
5875
 
5876
    /* Returns the address of the area where the structure is returned.
5877
 
5878
    case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5879
      if (call_expr_nargs (exp) != 0
5880
          || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5881
          || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5882
        return const0_rtx;
5883
      else
5884
        return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5885
 
5886
    case BUILT_IN_ALLOCA:
5887
      target = expand_builtin_alloca (exp, target);
5888
      if (target)
5889
        return target;
5890
      break;
5891
 
5892
    case BUILT_IN_STACK_SAVE:
5893
      return expand_stack_save ();
5894
 
5895
    case BUILT_IN_STACK_RESTORE:
5896
      expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5897
      return const0_rtx;
5898
 
5899
    case BUILT_IN_BSWAP32:
5900
    case BUILT_IN_BSWAP64:
5901
      target = expand_builtin_bswap (exp, target, subtarget);
5902
 
5903
      if (target)
5904
        return target;
5905
      break;
5906
 
5907
    CASE_INT_FN (BUILT_IN_FFS):
5908
    case BUILT_IN_FFSIMAX:
5909
      target = expand_builtin_unop (target_mode, exp, target,
5910
                                    subtarget, ffs_optab);
5911
      if (target)
5912
        return target;
5913
      break;
5914
 
5915
    CASE_INT_FN (BUILT_IN_CLZ):
5916
    case BUILT_IN_CLZIMAX:
5917
      target = expand_builtin_unop (target_mode, exp, target,
5918
                                    subtarget, clz_optab);
5919
      if (target)
5920
        return target;
5921
      break;
5922
 
5923
    CASE_INT_FN (BUILT_IN_CTZ):
5924
    case BUILT_IN_CTZIMAX:
5925
      target = expand_builtin_unop (target_mode, exp, target,
5926
                                    subtarget, ctz_optab);
5927
      if (target)
5928
        return target;
5929
      break;
5930
 
5931
    CASE_INT_FN (BUILT_IN_POPCOUNT):
5932
    case BUILT_IN_POPCOUNTIMAX:
5933
      target = expand_builtin_unop (target_mode, exp, target,
5934
                                    subtarget, popcount_optab);
5935
      if (target)
5936
        return target;
5937
      break;
5938
 
5939
    CASE_INT_FN (BUILT_IN_PARITY):
5940
    case BUILT_IN_PARITYIMAX:
5941
      target = expand_builtin_unop (target_mode, exp, target,
5942
                                    subtarget, parity_optab);
5943
      if (target)
5944
        return target;
5945
      break;
5946
 
5947
    case BUILT_IN_STRLEN:
5948
      target = expand_builtin_strlen (exp, target, target_mode);
5949
      if (target)
5950
        return target;
5951
      break;
5952
 
5953
    case BUILT_IN_STRCPY:
5954
      target = expand_builtin_strcpy (exp, target);
5955
      if (target)
5956
        return target;
5957
      break;
5958
 
5959
    case BUILT_IN_STRNCPY:
5960
      target = expand_builtin_strncpy (exp, target);
5961
      if (target)
5962
        return target;
5963
      break;
5964
 
5965
    case BUILT_IN_STPCPY:
5966
      target = expand_builtin_stpcpy (exp, target, mode);
5967
      if (target)
5968
        return target;
5969
      break;
5970
 
5971
    case BUILT_IN_MEMCPY:
5972
      target = expand_builtin_memcpy (exp, target);
5973
      if (target)
5974
        return target;
5975
      break;
5976
 
5977
    case BUILT_IN_MEMPCPY:
5978
      target = expand_builtin_mempcpy (exp, target, mode);
5979
      if (target)
5980
        return target;
5981
      break;
5982
 
5983
    case BUILT_IN_MEMSET:
5984
      target = expand_builtin_memset (exp, target, mode);
5985
      if (target)
5986
        return target;
5987
      break;
5988
 
5989
    case BUILT_IN_BZERO:
5990
      target = expand_builtin_bzero (exp);
5991
      if (target)
5992
        return target;
5993
      break;
5994
 
5995
    case BUILT_IN_STRCMP:
5996
      target = expand_builtin_strcmp (exp, target);
5997
      if (target)
5998
        return target;
5999
      break;
6000
 
6001
    case BUILT_IN_STRNCMP:
6002
      target = expand_builtin_strncmp (exp, target, mode);
6003
      if (target)
6004
        return target;
6005
      break;
6006
 
6007
    case BUILT_IN_BCMP:
6008
    case BUILT_IN_MEMCMP:
6009
      target = expand_builtin_memcmp (exp, target, mode);
6010
      if (target)
6011
        return target;
6012
      break;
6013
 
6014
    case BUILT_IN_SETJMP:
6015
      /* This should have been lowered to the builtins below.  */
6016
      gcc_unreachable ();
6017
 
6018
    case BUILT_IN_SETJMP_SETUP:
6019
      /* __builtin_setjmp_setup is passed a pointer to an array of five words
6020
          and the receiver label.  */
6021
      if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6022
        {
6023
          rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6024
                                      VOIDmode, EXPAND_NORMAL);
6025
          tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6026
          rtx label_r = label_rtx (label);
6027
 
6028
          /* This is copied from the handling of non-local gotos.  */
6029
          expand_builtin_setjmp_setup (buf_addr, label_r);
6030
          nonlocal_goto_handler_labels
6031
            = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6032
                                 nonlocal_goto_handler_labels);
6033
          /* ??? Do not let expand_label treat us as such since we would
6034
             not want to be both on the list of non-local labels and on
6035
             the list of forced labels.  */
6036
          FORCED_LABEL (label) = 0;
6037
          return const0_rtx;
6038
        }
6039
      break;
6040
 
6041
    case BUILT_IN_SETJMP_DISPATCHER:
6042
       /* __builtin_setjmp_dispatcher is passed the dispatcher label.  */
6043
      if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6044
        {
6045
          tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6046
          rtx label_r = label_rtx (label);
6047
 
6048
          /* Remove the dispatcher label from the list of non-local labels
6049
             since the receiver labels have been added to it above.  */
6050
          remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6051
          return const0_rtx;
6052
        }
6053
      break;
6054
 
6055
    case BUILT_IN_SETJMP_RECEIVER:
6056
       /* __builtin_setjmp_receiver is passed the receiver label.  */
6057
      if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6058
        {
6059
          tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6060
          rtx label_r = label_rtx (label);
6061
 
6062
          expand_builtin_setjmp_receiver (label_r);
6063
          return const0_rtx;
6064
        }
6065
      break;
6066
 
6067
      /* __builtin_longjmp is passed a pointer to an array of five words.
6068
         It's similar to the C library longjmp function but works with
6069
         __builtin_setjmp above.  */
6070
    case BUILT_IN_LONGJMP:
6071
      if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6072
        {
6073
          rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6074
                                      VOIDmode, EXPAND_NORMAL);
6075
          rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6076
 
6077
          if (value != const1_rtx)
6078
            {
6079
              error ("%<__builtin_longjmp%> second argument must be 1");
6080
              return const0_rtx;
6081
            }
6082
 
6083
          expand_builtin_longjmp (buf_addr, value);
6084
          return const0_rtx;
6085
        }
6086
      break;
6087
 
6088
    case BUILT_IN_NONLOCAL_GOTO:
6089
      target = expand_builtin_nonlocal_goto (exp);
6090
      if (target)
6091
        return target;
6092
      break;
6093
 
6094
      /* This updates the setjmp buffer that is its argument with the value
6095
         of the current stack pointer.  */
6096
    case BUILT_IN_UPDATE_SETJMP_BUF:
6097
      if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6098
        {
6099
          rtx buf_addr
6100
            = expand_normal (CALL_EXPR_ARG (exp, 0));
6101
 
6102
          expand_builtin_update_setjmp_buf (buf_addr);
6103
          return const0_rtx;
6104
        }
6105
      break;
6106
 
6107
    case BUILT_IN_TRAP:
6108
      expand_builtin_trap ();
6109
      return const0_rtx;
6110
 
6111
    case BUILT_IN_UNREACHABLE:
6112
      expand_builtin_unreachable ();
6113
      return const0_rtx;
6114
 
6115
    CASE_FLT_FN (BUILT_IN_SIGNBIT):
6116
    case BUILT_IN_SIGNBITD32:
6117
    case BUILT_IN_SIGNBITD64:
6118
    case BUILT_IN_SIGNBITD128:
6119
      target = expand_builtin_signbit (exp, target);
6120
      if (target)
6121
        return target;
6122
      break;
6123
 
6124
      /* Various hooks for the DWARF 2 __throw routine.  */
6125
    case BUILT_IN_UNWIND_INIT:
6126
      expand_builtin_unwind_init ();
6127
      return const0_rtx;
6128
    case BUILT_IN_DWARF_CFA:
6129
      return virtual_cfa_rtx;
6130
#ifdef DWARF2_UNWIND_INFO
6131
    case BUILT_IN_DWARF_SP_COLUMN:
6132
      return expand_builtin_dwarf_sp_column ();
6133
    case BUILT_IN_INIT_DWARF_REG_SIZES:
6134
      expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6135
      return const0_rtx;
6136
#endif
6137
    case BUILT_IN_FROB_RETURN_ADDR:
6138
      return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6139
    case BUILT_IN_EXTRACT_RETURN_ADDR:
6140
      return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6141
    case BUILT_IN_EH_RETURN:
6142
      expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6143
                                CALL_EXPR_ARG (exp, 1));
6144
      return const0_rtx;
6145
#ifdef EH_RETURN_DATA_REGNO
6146
    case BUILT_IN_EH_RETURN_DATA_REGNO:
6147
      return expand_builtin_eh_return_data_regno (exp);
6148
#endif
6149
    case BUILT_IN_EXTEND_POINTER:
6150
      return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6151
    case BUILT_IN_EH_POINTER:
6152
      return expand_builtin_eh_pointer (exp);
6153
    case BUILT_IN_EH_FILTER:
6154
      return expand_builtin_eh_filter (exp);
6155
    case BUILT_IN_EH_COPY_VALUES:
6156
      return expand_builtin_eh_copy_values (exp);
6157
 
6158
    case BUILT_IN_VA_START:
6159
      return expand_builtin_va_start (exp);
6160
    case BUILT_IN_VA_END:
6161
      return expand_builtin_va_end (exp);
6162
    case BUILT_IN_VA_COPY:
6163
      return expand_builtin_va_copy (exp);
6164
    case BUILT_IN_EXPECT:
6165
      return expand_builtin_expect (exp, target);
6166
    case BUILT_IN_PREFETCH:
6167
      expand_builtin_prefetch (exp);
6168
      return const0_rtx;
6169
 
6170
    case BUILT_IN_PROFILE_FUNC_ENTER:
6171
      return expand_builtin_profile_func (false);
6172
    case BUILT_IN_PROFILE_FUNC_EXIT:
6173
      return expand_builtin_profile_func (true);
6174
 
6175
    case BUILT_IN_INIT_TRAMPOLINE:
6176
      return expand_builtin_init_trampoline (exp);
6177
    case BUILT_IN_ADJUST_TRAMPOLINE:
6178
      return expand_builtin_adjust_trampoline (exp);
6179
 
6180
    case BUILT_IN_FORK:
6181
    case BUILT_IN_EXECL:
6182
    case BUILT_IN_EXECV:
6183
    case BUILT_IN_EXECLP:
6184
    case BUILT_IN_EXECLE:
6185
    case BUILT_IN_EXECVP:
6186
    case BUILT_IN_EXECVE:
6187
      target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6188
      if (target)
6189
        return target;
6190
      break;
6191
 
6192
    case BUILT_IN_FETCH_AND_ADD_1:
6193
    case BUILT_IN_FETCH_AND_ADD_2:
6194
    case BUILT_IN_FETCH_AND_ADD_4:
6195
    case BUILT_IN_FETCH_AND_ADD_8:
6196
    case BUILT_IN_FETCH_AND_ADD_16:
6197
      mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6198
      target = expand_builtin_sync_operation (mode, exp, PLUS,
6199
                                              false, target, ignore);
6200
      if (target)
6201
        return target;
6202
      break;
6203
 
6204
    case BUILT_IN_FETCH_AND_SUB_1:
6205
    case BUILT_IN_FETCH_AND_SUB_2:
6206
    case BUILT_IN_FETCH_AND_SUB_4:
6207
    case BUILT_IN_FETCH_AND_SUB_8:
6208
    case BUILT_IN_FETCH_AND_SUB_16:
6209
      mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6210
      target = expand_builtin_sync_operation (mode, exp, MINUS,
6211
                                              false, target, ignore);
6212
      if (target)
6213
        return target;
6214
      break;
6215
 
6216
    case BUILT_IN_FETCH_AND_OR_1:
6217
    case BUILT_IN_FETCH_AND_OR_2:
6218
    case BUILT_IN_FETCH_AND_OR_4:
6219
    case BUILT_IN_FETCH_AND_OR_8:
6220
    case BUILT_IN_FETCH_AND_OR_16:
6221
      mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6222
      target = expand_builtin_sync_operation (mode, exp, IOR,
6223
                                              false, target, ignore);
6224
      if (target)
6225
        return target;
6226
      break;
6227
 
6228
    case BUILT_IN_FETCH_AND_AND_1:
6229
    case BUILT_IN_FETCH_AND_AND_2:
6230
    case BUILT_IN_FETCH_AND_AND_4:
6231
    case BUILT_IN_FETCH_AND_AND_8:
6232
    case BUILT_IN_FETCH_AND_AND_16:
6233
      mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6234
      target = expand_builtin_sync_operation (mode, exp, AND,
6235
                                              false, target, ignore);
6236
      if (target)
6237
        return target;
6238
      break;
6239
 
6240
    case BUILT_IN_FETCH_AND_XOR_1:
6241
    case BUILT_IN_FETCH_AND_XOR_2:
6242
    case BUILT_IN_FETCH_AND_XOR_4:
6243
    case BUILT_IN_FETCH_AND_XOR_8:
6244
    case BUILT_IN_FETCH_AND_XOR_16:
6245
      mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6246
      target = expand_builtin_sync_operation (mode, exp, XOR,
6247
                                              false, target, ignore);
6248
      if (target)
6249
        return target;
6250
      break;
6251
 
6252
    case BUILT_IN_FETCH_AND_NAND_1:
6253
    case BUILT_IN_FETCH_AND_NAND_2:
6254
    case BUILT_IN_FETCH_AND_NAND_4:
6255
    case BUILT_IN_FETCH_AND_NAND_8:
6256
    case BUILT_IN_FETCH_AND_NAND_16:
6257
      mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6258
      target = expand_builtin_sync_operation (mode, exp, NOT,
6259
                                              false, target, ignore);
6260
      if (target)
6261
        return target;
6262
      break;
6263
 
6264
    case BUILT_IN_ADD_AND_FETCH_1:
6265
    case BUILT_IN_ADD_AND_FETCH_2:
6266
    case BUILT_IN_ADD_AND_FETCH_4:
6267
    case BUILT_IN_ADD_AND_FETCH_8:
6268
    case BUILT_IN_ADD_AND_FETCH_16:
6269
      mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6270
      target = expand_builtin_sync_operation (mode, exp, PLUS,
6271
                                              true, target, ignore);
6272
      if (target)
6273
        return target;
6274
      break;
6275
 
6276
    case BUILT_IN_SUB_AND_FETCH_1:
6277
    case BUILT_IN_SUB_AND_FETCH_2:
6278
    case BUILT_IN_SUB_AND_FETCH_4:
6279
    case BUILT_IN_SUB_AND_FETCH_8:
6280
    case BUILT_IN_SUB_AND_FETCH_16:
6281
      mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6282
      target = expand_builtin_sync_operation (mode, exp, MINUS,
6283
                                              true, target, ignore);
6284
      if (target)
6285
        return target;
6286
      break;
6287
 
6288
    case BUILT_IN_OR_AND_FETCH_1:
6289
    case BUILT_IN_OR_AND_FETCH_2:
6290
    case BUILT_IN_OR_AND_FETCH_4:
6291
    case BUILT_IN_OR_AND_FETCH_8:
6292
    case BUILT_IN_OR_AND_FETCH_16:
6293
      mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6294
      target = expand_builtin_sync_operation (mode, exp, IOR,
6295
                                              true, target, ignore);
6296
      if (target)
6297
        return target;
6298
      break;
6299
 
6300
    case BUILT_IN_AND_AND_FETCH_1:
6301
    case BUILT_IN_AND_AND_FETCH_2:
6302
    case BUILT_IN_AND_AND_FETCH_4:
6303
    case BUILT_IN_AND_AND_FETCH_8:
6304
    case BUILT_IN_AND_AND_FETCH_16:
6305
      mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6306
      target = expand_builtin_sync_operation (mode, exp, AND,
6307
                                              true, target, ignore);
6308
      if (target)
6309
        return target;
6310
      break;
6311
 
6312
    case BUILT_IN_XOR_AND_FETCH_1:
6313
    case BUILT_IN_XOR_AND_FETCH_2:
6314
    case BUILT_IN_XOR_AND_FETCH_4:
6315
    case BUILT_IN_XOR_AND_FETCH_8:
6316
    case BUILT_IN_XOR_AND_FETCH_16:
6317
      mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6318
      target = expand_builtin_sync_operation (mode, exp, XOR,
6319
                                              true, target, ignore);
6320
      if (target)
6321
        return target;
6322
      break;
6323
 
6324
    case BUILT_IN_NAND_AND_FETCH_1:
6325
    case BUILT_IN_NAND_AND_FETCH_2:
6326
    case BUILT_IN_NAND_AND_FETCH_4:
6327
    case BUILT_IN_NAND_AND_FETCH_8:
6328
    case BUILT_IN_NAND_AND_FETCH_16:
6329
      mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6330
      target = expand_builtin_sync_operation (mode, exp, NOT,
6331
                                              true, target, ignore);
6332
      if (target)
6333
        return target;
6334
      break;
6335
 
6336
    case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6337
    case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6338
    case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6339
    case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6340
    case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6341
      if (mode == VOIDmode)
6342
        mode = TYPE_MODE (boolean_type_node);
6343
      if (!target || !register_operand (target, mode))
6344
        target = gen_reg_rtx (mode);
6345
 
6346
      mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6347
      target = expand_builtin_compare_and_swap (mode, exp, true, target);
6348
      if (target)
6349
        return target;
6350
      break;
6351
 
6352
    case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6353
    case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6354
    case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6355
    case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6356
    case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6357
      mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6358
      target = expand_builtin_compare_and_swap (mode, exp, false, target);
6359
      if (target)
6360
        return target;
6361
      break;
6362
 
6363
    case BUILT_IN_LOCK_TEST_AND_SET_1:
6364
    case BUILT_IN_LOCK_TEST_AND_SET_2:
6365
    case BUILT_IN_LOCK_TEST_AND_SET_4:
6366
    case BUILT_IN_LOCK_TEST_AND_SET_8:
6367
    case BUILT_IN_LOCK_TEST_AND_SET_16:
6368
      mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6369
      target = expand_builtin_lock_test_and_set (mode, exp, target);
6370
      if (target)
6371
        return target;
6372
      break;
6373
 
6374
    case BUILT_IN_LOCK_RELEASE_1:
6375
    case BUILT_IN_LOCK_RELEASE_2:
6376
    case BUILT_IN_LOCK_RELEASE_4:
6377
    case BUILT_IN_LOCK_RELEASE_8:
6378
    case BUILT_IN_LOCK_RELEASE_16:
6379
      mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6380
      expand_builtin_lock_release (mode, exp);
6381
      return const0_rtx;
6382
 
6383
    case BUILT_IN_SYNCHRONIZE:
6384
      expand_builtin_synchronize ();
6385
      return const0_rtx;
6386
 
6387
    case BUILT_IN_OBJECT_SIZE:
6388
      return expand_builtin_object_size (exp);
6389
 
6390
    case BUILT_IN_MEMCPY_CHK:
6391
    case BUILT_IN_MEMPCPY_CHK:
6392
    case BUILT_IN_MEMMOVE_CHK:
6393
    case BUILT_IN_MEMSET_CHK:
6394
      target = expand_builtin_memory_chk (exp, target, mode, fcode);
6395
      if (target)
6396
        return target;
6397
      break;
6398
 
6399
    case BUILT_IN_STRCPY_CHK:
6400
    case BUILT_IN_STPCPY_CHK:
6401
    case BUILT_IN_STRNCPY_CHK:
6402
    case BUILT_IN_STRCAT_CHK:
6403
    case BUILT_IN_STRNCAT_CHK:
6404
    case BUILT_IN_SNPRINTF_CHK:
6405
    case BUILT_IN_VSNPRINTF_CHK:
6406
      maybe_emit_chk_warning (exp, fcode);
6407
      break;
6408
 
6409
    case BUILT_IN_SPRINTF_CHK:
6410
    case BUILT_IN_VSPRINTF_CHK:
6411
      maybe_emit_sprintf_chk_warning (exp, fcode);
6412
      break;
6413
 
6414
    case BUILT_IN_FREE:
6415
      maybe_emit_free_warning (exp);
6416
      break;
6417
 
6418
    default:    /* just do library call, if unknown builtin */
6419
      break;
6420
    }
6421
 
6422
  /* The switch statement above can drop through to cause the function
6423
     to be called normally.  */
6424
  return expand_call (exp, target, ignore);
6425
}
6426
 
6427
/* Determine whether a tree node represents a call to a built-in
6428
   function.  If the tree T is a call to a built-in function with
6429
   the right number of arguments of the appropriate types, return
6430
   the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6431
   Otherwise the return value is END_BUILTINS.  */
6432
 
6433
enum built_in_function
6434
builtin_mathfn_code (const_tree t)
6435
{
6436
  const_tree fndecl, arg, parmlist;
6437
  const_tree argtype, parmtype;
6438
  const_call_expr_arg_iterator iter;
6439
 
6440
  if (TREE_CODE (t) != CALL_EXPR
6441
      || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6442
    return END_BUILTINS;
6443
 
6444
  fndecl = get_callee_fndecl (t);
6445
  if (fndecl == NULL_TREE
6446
      || TREE_CODE (fndecl) != FUNCTION_DECL
6447
      || ! DECL_BUILT_IN (fndecl)
6448
      || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6449
    return END_BUILTINS;
6450
 
6451
  parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6452
  init_const_call_expr_arg_iterator (t, &iter);
6453
  for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6454
    {
6455
      /* If a function doesn't take a variable number of arguments,
6456
         the last element in the list will have type `void'.  */
6457
      parmtype = TREE_VALUE (parmlist);
6458
      if (VOID_TYPE_P (parmtype))
6459
        {
6460
          if (more_const_call_expr_args_p (&iter))
6461
            return END_BUILTINS;
6462
          return DECL_FUNCTION_CODE (fndecl);
6463
        }
6464
 
6465
      if (! more_const_call_expr_args_p (&iter))
6466
        return END_BUILTINS;
6467
 
6468
      arg = next_const_call_expr_arg (&iter);
6469
      argtype = TREE_TYPE (arg);
6470
 
6471
      if (SCALAR_FLOAT_TYPE_P (parmtype))
6472
        {
6473
          if (! SCALAR_FLOAT_TYPE_P (argtype))
6474
            return END_BUILTINS;
6475
        }
6476
      else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6477
        {
6478
          if (! COMPLEX_FLOAT_TYPE_P (argtype))
6479
            return END_BUILTINS;
6480
        }
6481
      else if (POINTER_TYPE_P (parmtype))
6482
        {
6483
          if (! POINTER_TYPE_P (argtype))
6484
            return END_BUILTINS;
6485
        }
6486
      else if (INTEGRAL_TYPE_P (parmtype))
6487
        {
6488
          if (! INTEGRAL_TYPE_P (argtype))
6489
            return END_BUILTINS;
6490
        }
6491
      else
6492
        return END_BUILTINS;
6493
    }
6494
 
6495
  /* Variable-length argument list.  */
6496
  return DECL_FUNCTION_CODE (fndecl);
6497
}
6498
 
6499
/* Fold a call to __builtin_constant_p, if we know its argument ARG will
6500
   evaluate to a constant.  */
6501
 
6502
static tree
6503
fold_builtin_constant_p (tree arg)
6504
{
6505
  /* We return 1 for a numeric type that's known to be a constant
6506
     value at compile-time or for an aggregate type that's a
6507
     literal constant.  */
6508
  STRIP_NOPS (arg);
6509
 
6510
  /* If we know this is a constant, emit the constant of one.  */
6511
  if (CONSTANT_CLASS_P (arg)
6512
      || (TREE_CODE (arg) == CONSTRUCTOR
6513
          && TREE_CONSTANT (arg)))
6514
    return integer_one_node;
6515
  if (TREE_CODE (arg) == ADDR_EXPR)
6516
    {
6517
       tree op = TREE_OPERAND (arg, 0);
6518
       if (TREE_CODE (op) == STRING_CST
6519
           || (TREE_CODE (op) == ARRAY_REF
6520
               && integer_zerop (TREE_OPERAND (op, 1))
6521
               && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6522
         return integer_one_node;
6523
    }
6524
 
6525
  /* If this expression has side effects, show we don't know it to be a
6526
     constant.  Likewise if it's a pointer or aggregate type since in
6527
     those case we only want literals, since those are only optimized
6528
     when generating RTL, not later.
6529
     And finally, if we are compiling an initializer, not code, we
6530
     need to return a definite result now; there's not going to be any
6531
     more optimization done.  */
6532
  if (TREE_SIDE_EFFECTS (arg)
6533
      || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6534
      || POINTER_TYPE_P (TREE_TYPE (arg))
6535
      || cfun == 0
6536
      || folding_initializer)
6537
    return integer_zero_node;
6538
 
6539
  return NULL_TREE;
6540
}
6541
 
6542
/* Create builtin_expect with PRED and EXPECTED as its arguments and
6543
   return it as a truthvalue.  */
6544
 
6545
static tree
6546
build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6547
{
6548
  tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6549
 
6550
  fn = built_in_decls[BUILT_IN_EXPECT];
6551
  arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6552
  ret_type = TREE_TYPE (TREE_TYPE (fn));
6553
  pred_type = TREE_VALUE (arg_types);
6554
  expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6555
 
6556
  pred = fold_convert_loc (loc, pred_type, pred);
6557
  expected = fold_convert_loc (loc, expected_type, expected);
6558
  call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6559
 
6560
  return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6561
                 build_int_cst (ret_type, 0));
6562
}
6563
 
6564
/* Fold a call to builtin_expect with arguments ARG0 and ARG1.  Return
6565
   NULL_TREE if no simplification is possible.  */
6566
 
6567
static tree
6568
fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6569
{
6570
  tree inner, fndecl;
6571
  enum tree_code code;
6572
 
6573
  /* If this is a builtin_expect within a builtin_expect keep the
6574
     inner one.  See through a comparison against a constant.  It
6575
     might have been added to create a thruthvalue.  */
6576
  inner = arg0;
6577
  if (COMPARISON_CLASS_P (inner)
6578
      && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6579
    inner = TREE_OPERAND (inner, 0);
6580
 
6581
  if (TREE_CODE (inner) == CALL_EXPR
6582
      && (fndecl = get_callee_fndecl (inner))
6583
      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6584
      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6585
    return arg0;
6586
 
6587
  /* Distribute the expected value over short-circuiting operators.
6588
     See through the cast from truthvalue_type_node to long.  */
6589
  inner = arg0;
6590
  while (TREE_CODE (inner) == NOP_EXPR
6591
         && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6592
         && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6593
    inner = TREE_OPERAND (inner, 0);
6594
 
6595
  code = TREE_CODE (inner);
6596
  if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6597
    {
6598
      tree op0 = TREE_OPERAND (inner, 0);
6599
      tree op1 = TREE_OPERAND (inner, 1);
6600
 
6601
      op0 = build_builtin_expect_predicate (loc, op0, arg1);
6602
      op1 = build_builtin_expect_predicate (loc, op1, arg1);
6603
      inner = build2 (code, TREE_TYPE (inner), op0, op1);
6604
 
6605
      return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6606
    }
6607
 
6608
  /* If the argument isn't invariant then there's nothing else we can do.  */
6609
  if (!TREE_CONSTANT (arg0))
6610
    return NULL_TREE;
6611
 
6612
  /* If we expect that a comparison against the argument will fold to
6613
     a constant return the constant.  In practice, this means a true
6614
     constant or the address of a non-weak symbol.  */
6615
  inner = arg0;
6616
  STRIP_NOPS (inner);
6617
  if (TREE_CODE (inner) == ADDR_EXPR)
6618
    {
6619
      do
6620
        {
6621
          inner = TREE_OPERAND (inner, 0);
6622
        }
6623
      while (TREE_CODE (inner) == COMPONENT_REF
6624
             || TREE_CODE (inner) == ARRAY_REF);
6625
      if ((TREE_CODE (inner) == VAR_DECL
6626
           || TREE_CODE (inner) == FUNCTION_DECL)
6627
          && DECL_WEAK (inner))
6628
        return NULL_TREE;
6629
    }
6630
 
6631
  /* Otherwise, ARG0 already has the proper type for the return value.  */
6632
  return arg0;
6633
}
6634
 
6635
/* Fold a call to __builtin_classify_type with argument ARG.  */
6636
 
6637
static tree
6638
fold_builtin_classify_type (tree arg)
6639
{
6640
  if (arg == 0)
6641
    return build_int_cst (NULL_TREE, no_type_class);
6642
 
6643
  return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6644
}
6645
 
6646
/* Fold a call to __builtin_strlen with argument ARG.  */
6647
 
6648
static tree
6649
fold_builtin_strlen (location_t loc, tree type, tree arg)
6650
{
6651
  if (!validate_arg (arg, POINTER_TYPE))
6652
    return NULL_TREE;
6653
  else
6654
    {
6655
      tree len = c_strlen (arg, 0);
6656
 
6657
      if (len)
6658
        return fold_convert_loc (loc, type, len);
6659
 
6660
      return NULL_TREE;
6661
    }
6662
}
6663
 
6664
/* Fold a call to __builtin_inf or __builtin_huge_val.  */
6665
 
6666
static tree
6667
fold_builtin_inf (location_t loc, tree type, int warn)
6668
{
6669
  REAL_VALUE_TYPE real;
6670
 
6671
  /* __builtin_inff is intended to be usable to define INFINITY on all
6672
     targets.  If an infinity is not available, INFINITY expands "to a
6673
     positive constant of type float that overflows at translation
6674
     time", footnote "In this case, using INFINITY will violate the
6675
     constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6676
     Thus we pedwarn to ensure this constraint violation is
6677
     diagnosed.  */
6678
  if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6679
    pedwarn (loc, 0, "target format does not support infinity");
6680
 
6681
  real_inf (&real);
6682
  return build_real (type, real);
6683
}
6684
 
6685
/* Fold a call to __builtin_nan or __builtin_nans with argument ARG.  */
6686
 
6687
static tree
6688
fold_builtin_nan (tree arg, tree type, int quiet)
6689
{
6690
  REAL_VALUE_TYPE real;
6691
  const char *str;
6692
 
6693
  if (!validate_arg (arg, POINTER_TYPE))
6694
    return NULL_TREE;
6695
  str = c_getstr (arg);
6696
  if (!str)
6697
    return NULL_TREE;
6698
 
6699
  if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6700
    return NULL_TREE;
6701
 
6702
  return build_real (type, real);
6703
}
6704
 
6705
/* Return true if the floating point expression T has an integer value.
6706
   We also allow +Inf, -Inf and NaN to be considered integer values.  */
6707
 
6708
static bool
6709
integer_valued_real_p (tree t)
6710
{
6711
  switch (TREE_CODE (t))
6712
    {
6713
    case FLOAT_EXPR:
6714
      return true;
6715
 
6716
    case ABS_EXPR:
6717
    case SAVE_EXPR:
6718
      return integer_valued_real_p (TREE_OPERAND (t, 0));
6719
 
6720
    case COMPOUND_EXPR:
6721
    case MODIFY_EXPR:
6722
    case BIND_EXPR:
6723
      return integer_valued_real_p (TREE_OPERAND (t, 1));
6724
 
6725
    case PLUS_EXPR:
6726
    case MINUS_EXPR:
6727
    case MULT_EXPR:
6728
    case MIN_EXPR:
6729
    case MAX_EXPR:
6730
      return integer_valued_real_p (TREE_OPERAND (t, 0))
6731
             && integer_valued_real_p (TREE_OPERAND (t, 1));
6732
 
6733
    case COND_EXPR:
6734
      return integer_valued_real_p (TREE_OPERAND (t, 1))
6735
             && integer_valued_real_p (TREE_OPERAND (t, 2));
6736
 
6737
    case REAL_CST:
6738
      return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6739
 
6740
    case NOP_EXPR:
6741
      {
6742
        tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6743
        if (TREE_CODE (type) == INTEGER_TYPE)
6744
          return true;
6745
        if (TREE_CODE (type) == REAL_TYPE)
6746
          return integer_valued_real_p (TREE_OPERAND (t, 0));
6747
        break;
6748
      }
6749
 
6750
    case CALL_EXPR:
6751
      switch (builtin_mathfn_code (t))
6752
        {
6753
        CASE_FLT_FN (BUILT_IN_CEIL):
6754
        CASE_FLT_FN (BUILT_IN_FLOOR):
6755
        CASE_FLT_FN (BUILT_IN_NEARBYINT):
6756
        CASE_FLT_FN (BUILT_IN_RINT):
6757
        CASE_FLT_FN (BUILT_IN_ROUND):
6758
        CASE_FLT_FN (BUILT_IN_TRUNC):
6759
          return true;
6760
 
6761
        CASE_FLT_FN (BUILT_IN_FMIN):
6762
        CASE_FLT_FN (BUILT_IN_FMAX):
6763
          return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6764
            && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6765
 
6766
        default:
6767
          break;
6768
        }
6769
      break;
6770
 
6771
    default:
6772
      break;
6773
    }
6774
  return false;
6775
}
6776
 
6777
/* FNDECL is assumed to be a builtin where truncation can be propagated
6778
   across (for instance floor((double)f) == (double)floorf (f).
6779
   Do the transformation for a call with argument ARG.  */
6780
 
6781
static tree
6782
fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6783
{
6784
  enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6785
 
6786
  if (!validate_arg (arg, REAL_TYPE))
6787
    return NULL_TREE;
6788
 
6789
  /* Integer rounding functions are idempotent.  */
6790
  if (fcode == builtin_mathfn_code (arg))
6791
    return arg;
6792
 
6793
  /* If argument is already integer valued, and we don't need to worry
6794
     about setting errno, there's no need to perform rounding.  */
6795
  if (! flag_errno_math && integer_valued_real_p (arg))
6796
    return arg;
6797
 
6798
  if (optimize)
6799
    {
6800
      tree arg0 = strip_float_extensions (arg);
6801
      tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6802
      tree newtype = TREE_TYPE (arg0);
6803
      tree decl;
6804
 
6805
      if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6806
          && (decl = mathfn_built_in (newtype, fcode)))
6807
        return fold_convert_loc (loc, ftype,
6808
                                 build_call_expr_loc (loc, decl, 1,
6809
                                                  fold_convert_loc (loc,
6810
                                                                    newtype,
6811
                                                                    arg0)));
6812
    }
6813
  return NULL_TREE;
6814
}
6815
 
6816
/* FNDECL is assumed to be builtin which can narrow the FP type of
6817
   the argument, for instance lround((double)f) -> lroundf (f).
6818
   Do the transformation for a call with argument ARG.  */
6819
 
6820
static tree
6821
fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6822
{
6823
  enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6824
 
6825
  if (!validate_arg (arg, REAL_TYPE))
6826
    return NULL_TREE;
6827
 
6828
  /* If argument is already integer valued, and we don't need to worry
6829
     about setting errno, there's no need to perform rounding.  */
6830
  if (! flag_errno_math && integer_valued_real_p (arg))
6831
    return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6832
                        TREE_TYPE (TREE_TYPE (fndecl)), arg);
6833
 
6834
  if (optimize)
6835
    {
6836
      tree ftype = TREE_TYPE (arg);
6837
      tree arg0 = strip_float_extensions (arg);
6838
      tree newtype = TREE_TYPE (arg0);
6839
      tree decl;
6840
 
6841
      if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6842
          && (decl = mathfn_built_in (newtype, fcode)))
6843
        return build_call_expr_loc (loc, decl, 1,
6844
                                fold_convert_loc (loc, newtype, arg0));
6845
    }
6846
 
6847
  /* Canonicalize llround (x) to lround (x) on LP64 targets where
6848
     sizeof (long long) == sizeof (long).  */
6849
  if (TYPE_PRECISION (long_long_integer_type_node)
6850
      == TYPE_PRECISION (long_integer_type_node))
6851
    {
6852
      tree newfn = NULL_TREE;
6853
      switch (fcode)
6854
        {
6855
        CASE_FLT_FN (BUILT_IN_LLCEIL):
6856
          newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6857
          break;
6858
 
6859
        CASE_FLT_FN (BUILT_IN_LLFLOOR):
6860
          newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6861
          break;
6862
 
6863
        CASE_FLT_FN (BUILT_IN_LLROUND):
6864
          newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6865
          break;
6866
 
6867
        CASE_FLT_FN (BUILT_IN_LLRINT):
6868
          newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6869
          break;
6870
 
6871
        default:
6872
          break;
6873
        }
6874
 
6875
      if (newfn)
6876
        {
6877
          tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6878
          return fold_convert_loc (loc,
6879
                                   TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6880
        }
6881
    }
6882
 
6883
  return NULL_TREE;
6884
}
6885
 
6886
/* Fold call to builtin cabs, cabsf or cabsl with argument ARG.  TYPE is the
6887
   return type.  Return NULL_TREE if no simplification can be made.  */
6888
 
6889
static tree
6890
fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6891
{
6892
  tree res;
6893
 
6894
  if (!validate_arg (arg, COMPLEX_TYPE)
6895
      || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6896
    return NULL_TREE;
6897
 
6898
  /* Calculate the result when the argument is a constant.  */
6899
  if (TREE_CODE (arg) == COMPLEX_CST
6900
      && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6901
                              type, mpfr_hypot)))
6902
    return res;
6903
 
6904
  if (TREE_CODE (arg) == COMPLEX_EXPR)
6905
    {
6906
      tree real = TREE_OPERAND (arg, 0);
6907
      tree imag = TREE_OPERAND (arg, 1);
6908
 
6909
      /* If either part is zero, cabs is fabs of the other.  */
6910
      if (real_zerop (real))
6911
        return fold_build1_loc (loc, ABS_EXPR, type, imag);
6912
      if (real_zerop (imag))
6913
        return fold_build1_loc (loc, ABS_EXPR, type, real);
6914
 
6915
      /* cabs(x+xi) -> fabs(x)*sqrt(2).  */
6916
      if (flag_unsafe_math_optimizations
6917
          && operand_equal_p (real, imag, OEP_PURE_SAME))
6918
        {
6919
          const REAL_VALUE_TYPE sqrt2_trunc
6920
            = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6921
          STRIP_NOPS (real);
6922
          return fold_build2_loc (loc, MULT_EXPR, type,
6923
                              fold_build1_loc (loc, ABS_EXPR, type, real),
6924
                              build_real (type, sqrt2_trunc));
6925
        }
6926
    }
6927
 
6928
  /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z).  */
6929
  if (TREE_CODE (arg) == NEGATE_EXPR
6930
      || TREE_CODE (arg) == CONJ_EXPR)
6931
    return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6932
 
6933
  /* Don't do this when optimizing for size.  */
6934
  if (flag_unsafe_math_optimizations
6935
      && optimize && optimize_function_for_speed_p (cfun))
6936
    {
6937
      tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6938
 
6939
      if (sqrtfn != NULL_TREE)
6940
        {
6941
          tree rpart, ipart, result;
6942
 
6943
          arg = builtin_save_expr (arg);
6944
 
6945
          rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6946
          ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6947
 
6948
          rpart = builtin_save_expr (rpart);
6949
          ipart = builtin_save_expr (ipart);
6950
 
6951
          result = fold_build2_loc (loc, PLUS_EXPR, type,
6952
                                fold_build2_loc (loc, MULT_EXPR, type,
6953
                                             rpart, rpart),
6954
                                fold_build2_loc (loc, MULT_EXPR, type,
6955
                                             ipart, ipart));
6956
 
6957
          return build_call_expr_loc (loc, sqrtfn, 1, result);
6958
        }
6959
    }
6960
 
6961
  return NULL_TREE;
6962
}
6963
 
6964
/* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6965
   Return NULL_TREE if no simplification can be made.  */
6966
 
6967
static tree
6968
fold_builtin_sqrt (location_t loc, tree arg, tree type)
6969
{
6970
 
6971
  enum built_in_function fcode;
6972
  tree res;
6973
 
6974
  if (!validate_arg (arg, REAL_TYPE))
6975
    return NULL_TREE;
6976
 
6977
  /* Calculate the result when the argument is a constant.  */
6978
  if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6979
    return res;
6980
 
6981
  /* Optimize sqrt(expN(x)) = expN(x*0.5).  */
6982
  fcode = builtin_mathfn_code (arg);
6983
  if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6984
    {
6985
      tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6986
      arg = fold_build2_loc (loc, MULT_EXPR, type,
6987
                         CALL_EXPR_ARG (arg, 0),
6988
                         build_real (type, dconsthalf));
6989
      return build_call_expr_loc (loc, expfn, 1, arg);
6990
    }
6991
 
6992
  /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)).  */
6993
  if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6994
    {
6995
      tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6996
 
6997
      if (powfn)
6998
        {
6999
          tree arg0 = CALL_EXPR_ARG (arg, 0);
7000
          tree tree_root;
7001
          /* The inner root was either sqrt or cbrt.  */
7002
          /* This was a conditional expression but it triggered a bug
7003
             in Sun C 5.5.  */
7004
          REAL_VALUE_TYPE dconstroot;
7005
          if (BUILTIN_SQRT_P (fcode))
7006
            dconstroot = dconsthalf;
7007
          else
7008
            dconstroot = dconst_third ();
7009
 
7010
          /* Adjust for the outer root.  */
7011
          SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7012
          dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7013
          tree_root = build_real (type, dconstroot);
7014
          return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7015
        }
7016
    }
7017
 
7018
  /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5).  */
7019
  if (flag_unsafe_math_optimizations
7020
      && (fcode == BUILT_IN_POW
7021
          || fcode == BUILT_IN_POWF
7022
          || fcode == BUILT_IN_POWL))
7023
    {
7024
      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7025
      tree arg0 = CALL_EXPR_ARG (arg, 0);
7026
      tree arg1 = CALL_EXPR_ARG (arg, 1);
7027
      tree narg1;
7028
      if (!tree_expr_nonnegative_p (arg0))
7029
        arg0 = build1 (ABS_EXPR, type, arg0);
7030
      narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7031
                           build_real (type, dconsthalf));
7032
      return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7033
    }
7034
 
7035
  return NULL_TREE;
7036
}
7037
 
7038
/* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7039
   Return NULL_TREE if no simplification can be made.  */
7040
 
7041
static tree
7042
fold_builtin_cbrt (location_t loc, tree arg, tree type)
7043
{
7044
  const enum built_in_function fcode = builtin_mathfn_code (arg);
7045
  tree res;
7046
 
7047
  if (!validate_arg (arg, REAL_TYPE))
7048
    return NULL_TREE;
7049
 
7050
  /* Calculate the result when the argument is a constant.  */
7051
  if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7052
    return res;
7053
 
7054
  if (flag_unsafe_math_optimizations)
7055
    {
7056
      /* Optimize cbrt(expN(x)) -> expN(x/3).  */
7057
      if (BUILTIN_EXPONENT_P (fcode))
7058
        {
7059
          tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7060
          const REAL_VALUE_TYPE third_trunc =
7061
            real_value_truncate (TYPE_MODE (type), dconst_third ());
7062
          arg = fold_build2_loc (loc, MULT_EXPR, type,
7063
                             CALL_EXPR_ARG (arg, 0),
7064
                             build_real (type, third_trunc));
7065
          return build_call_expr_loc (loc, expfn, 1, arg);
7066
        }
7067
 
7068
      /* Optimize cbrt(sqrt(x)) -> pow(x,1/6).  */
7069
      if (BUILTIN_SQRT_P (fcode))
7070
        {
7071
          tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7072
 
7073
          if (powfn)
7074
            {
7075
              tree arg0 = CALL_EXPR_ARG (arg, 0);
7076
              tree tree_root;
7077
              REAL_VALUE_TYPE dconstroot = dconst_third ();
7078
 
7079
              SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7080
              dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7081
              tree_root = build_real (type, dconstroot);
7082
              return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7083
            }
7084
        }
7085
 
7086
      /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative.  */
7087
      if (BUILTIN_CBRT_P (fcode))
7088
        {
7089
          tree arg0 = CALL_EXPR_ARG (arg, 0);
7090
          if (tree_expr_nonnegative_p (arg0))
7091
            {
7092
              tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7093
 
7094
              if (powfn)
7095
                {
7096
                  tree tree_root;
7097
                  REAL_VALUE_TYPE dconstroot;
7098
 
7099
                  real_arithmetic (&dconstroot, MULT_EXPR,
7100
                                   dconst_third_ptr (), dconst_third_ptr ());
7101
                  dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7102
                  tree_root = build_real (type, dconstroot);
7103
                  return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7104
                }
7105
            }
7106
        }
7107
 
7108
      /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative.  */
7109
      if (fcode == BUILT_IN_POW
7110
          || fcode == BUILT_IN_POWF
7111
          || fcode == BUILT_IN_POWL)
7112
        {
7113
          tree arg00 = CALL_EXPR_ARG (arg, 0);
7114
          tree arg01 = CALL_EXPR_ARG (arg, 1);
7115
          if (tree_expr_nonnegative_p (arg00))
7116
            {
7117
              tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7118
              const REAL_VALUE_TYPE dconstroot
7119
                = real_value_truncate (TYPE_MODE (type), dconst_third ());
7120
              tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7121
                                         build_real (type, dconstroot));
7122
              return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7123
            }
7124
        }
7125
    }
7126
  return NULL_TREE;
7127
}
7128
 
7129
/* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7130
   TYPE is the type of the return value.  Return NULL_TREE if no
7131
   simplification can be made.  */
7132
 
7133
static tree
7134
fold_builtin_cos (location_t loc,
7135
                  tree arg, tree type, tree fndecl)
7136
{
7137
  tree res, narg;
7138
 
7139
  if (!validate_arg (arg, REAL_TYPE))
7140
    return NULL_TREE;
7141
 
7142
  /* Calculate the result when the argument is a constant.  */
7143
  if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7144
    return res;
7145
 
7146
  /* Optimize cos(-x) into cos (x).  */
7147
  if ((narg = fold_strip_sign_ops (arg)))
7148
    return build_call_expr_loc (loc, fndecl, 1, narg);
7149
 
7150
  return NULL_TREE;
7151
}
7152
 
7153
/* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7154
   Return NULL_TREE if no simplification can be made.  */
7155
 
7156
static tree
7157
fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7158
{
7159
  if (validate_arg (arg, REAL_TYPE))
7160
    {
7161
      tree res, narg;
7162
 
7163
      /* Calculate the result when the argument is a constant.  */
7164
      if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7165
        return res;
7166
 
7167
      /* Optimize cosh(-x) into cosh (x).  */
7168
      if ((narg = fold_strip_sign_ops (arg)))
7169
        return build_call_expr_loc (loc, fndecl, 1, narg);
7170
    }
7171
 
7172
  return NULL_TREE;
7173
}
7174
 
7175
/* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7176
   argument ARG.  TYPE is the type of the return value.  Return
7177
   NULL_TREE if no simplification can be made.  */
7178
 
7179
static tree
7180
fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7181
                   bool hyper)
7182
{
7183
  if (validate_arg (arg, COMPLEX_TYPE)
7184
      && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7185
    {
7186
      tree tmp;
7187
 
7188
      /* Calculate the result when the argument is a constant.  */
7189
      if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7190
        return tmp;
7191
 
7192
      /* Optimize fn(-x) into fn(x).  */
7193
      if ((tmp = fold_strip_sign_ops (arg)))
7194
        return build_call_expr_loc (loc, fndecl, 1, tmp);
7195
    }
7196
 
7197
  return NULL_TREE;
7198
}
7199
 
7200
/* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7201
   Return NULL_TREE if no simplification can be made.  */
7202
 
7203
static tree
7204
fold_builtin_tan (tree arg, tree type)
7205
{
7206
  enum built_in_function fcode;
7207
  tree res;
7208
 
7209
  if (!validate_arg (arg, REAL_TYPE))
7210
    return NULL_TREE;
7211
 
7212
  /* Calculate the result when the argument is a constant.  */
7213
  if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7214
    return res;
7215
 
7216
  /* Optimize tan(atan(x)) = x.  */
7217
  fcode = builtin_mathfn_code (arg);
7218
  if (flag_unsafe_math_optimizations
7219
      && (fcode == BUILT_IN_ATAN
7220
          || fcode == BUILT_IN_ATANF
7221
          || fcode == BUILT_IN_ATANL))
7222
    return CALL_EXPR_ARG (arg, 0);
7223
 
7224
  return NULL_TREE;
7225
}
7226
 
7227
/* Fold function call to builtin sincos, sincosf, or sincosl.  Return
7228
   NULL_TREE if no simplification can be made.  */
7229
 
7230
static tree
7231
fold_builtin_sincos (location_t loc,
7232
                     tree arg0, tree arg1, tree arg2)
7233
{
7234
  tree type;
7235
  tree res, fn, call;
7236
 
7237
  if (!validate_arg (arg0, REAL_TYPE)
7238
      || !validate_arg (arg1, POINTER_TYPE)
7239
      || !validate_arg (arg2, POINTER_TYPE))
7240
    return NULL_TREE;
7241
 
7242
  type = TREE_TYPE (arg0);
7243
 
7244
  /* Calculate the result when the argument is a constant.  */
7245
  if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7246
    return res;
7247
 
7248
  /* Canonicalize sincos to cexpi.  */
7249
  if (!TARGET_C99_FUNCTIONS)
7250
    return NULL_TREE;
7251
  fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7252
  if (!fn)
7253
    return NULL_TREE;
7254
 
7255
  call = build_call_expr_loc (loc, fn, 1, arg0);
7256
  call = builtin_save_expr (call);
7257
 
7258
  return build2 (COMPOUND_EXPR, void_type_node,
7259
                 build2 (MODIFY_EXPR, void_type_node,
7260
                         build_fold_indirect_ref_loc (loc, arg1),
7261
                         build1 (IMAGPART_EXPR, type, call)),
7262
                 build2 (MODIFY_EXPR, void_type_node,
7263
                         build_fold_indirect_ref_loc (loc, arg2),
7264
                         build1 (REALPART_EXPR, type, call)));
7265
}
7266
 
7267
/* Fold function call to builtin cexp, cexpf, or cexpl.  Return
7268
   NULL_TREE if no simplification can be made.  */
7269
 
7270
static tree
7271
fold_builtin_cexp (location_t loc, tree arg0, tree type)
7272
{
7273
  tree rtype;
7274
  tree realp, imagp, ifn;
7275
  tree res;
7276
 
7277
  if (!validate_arg (arg0, COMPLEX_TYPE)
7278
      || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7279
    return NULL_TREE;
7280
 
7281
  /* Calculate the result when the argument is a constant.  */
7282
  if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7283
    return res;
7284
 
7285
  rtype = TREE_TYPE (TREE_TYPE (arg0));
7286
 
7287
  /* In case we can figure out the real part of arg0 and it is constant zero
7288
     fold to cexpi.  */
7289
  if (!TARGET_C99_FUNCTIONS)
7290
    return NULL_TREE;
7291
  ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7292
  if (!ifn)
7293
    return NULL_TREE;
7294
 
7295
  if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7296
      && real_zerop (realp))
7297
    {
7298
      tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7299
      return build_call_expr_loc (loc, ifn, 1, narg);
7300
    }
7301
 
7302
  /* In case we can easily decompose real and imaginary parts split cexp
7303
     to exp (r) * cexpi (i).  */
7304
  if (flag_unsafe_math_optimizations
7305
      && realp)
7306
    {
7307
      tree rfn, rcall, icall;
7308
 
7309
      rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7310
      if (!rfn)
7311
        return NULL_TREE;
7312
 
7313
      imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7314
      if (!imagp)
7315
        return NULL_TREE;
7316
 
7317
      icall = build_call_expr_loc (loc, ifn, 1, imagp);
7318
      icall = builtin_save_expr (icall);
7319
      rcall = build_call_expr_loc (loc, rfn, 1, realp);
7320
      rcall = builtin_save_expr (rcall);
7321
      return fold_build2_loc (loc, COMPLEX_EXPR, type,
7322
                          fold_build2_loc (loc, MULT_EXPR, rtype,
7323
                                       rcall,
7324
                                       fold_build1_loc (loc, REALPART_EXPR,
7325
                                                    rtype, icall)),
7326
                          fold_build2_loc (loc, MULT_EXPR, rtype,
7327
                                       rcall,
7328
                                       fold_build1_loc (loc, IMAGPART_EXPR,
7329
                                                    rtype, icall)));
7330
    }
7331
 
7332
  return NULL_TREE;
7333
}
7334
 
7335
/* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7336
   Return NULL_TREE if no simplification can be made.  */
7337
 
7338
static tree
7339
fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7340
{
7341
  if (!validate_arg (arg, REAL_TYPE))
7342
    return NULL_TREE;
7343
 
7344
  /* Optimize trunc of constant value.  */
7345
  if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7346
    {
7347
      REAL_VALUE_TYPE r, x;
7348
      tree type = TREE_TYPE (TREE_TYPE (fndecl));
7349
 
7350
      x = TREE_REAL_CST (arg);
7351
      real_trunc (&r, TYPE_MODE (type), &x);
7352
      return build_real (type, r);
7353
    }
7354
 
7355
  return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7356
}
7357
 
7358
/* Fold function call to builtin floor, floorf or floorl with argument ARG.
7359
   Return NULL_TREE if no simplification can be made.  */
7360
 
7361
static tree
7362
fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7363
{
7364
  if (!validate_arg (arg, REAL_TYPE))
7365
    return NULL_TREE;
7366
 
7367
  /* Optimize floor of constant value.  */
7368
  if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7369
    {
7370
      REAL_VALUE_TYPE x;
7371
 
7372
      x = TREE_REAL_CST (arg);
7373
      if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7374
        {
7375
          tree type = TREE_TYPE (TREE_TYPE (fndecl));
7376
          REAL_VALUE_TYPE r;
7377
 
7378
          real_floor (&r, TYPE_MODE (type), &x);
7379
          return build_real (type, r);
7380
        }
7381
    }
7382
 
7383
  /* Fold floor (x) where x is nonnegative to trunc (x).  */
7384
  if (tree_expr_nonnegative_p (arg))
7385
    {
7386
      tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7387
      if (truncfn)
7388
        return build_call_expr_loc (loc, truncfn, 1, arg);
7389
    }
7390
 
7391
  return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7392
}
7393
 
7394
/* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7395
   Return NULL_TREE if no simplification can be made.  */
7396
 
7397
static tree
7398
fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7399
{
7400
  if (!validate_arg (arg, REAL_TYPE))
7401
    return NULL_TREE;
7402
 
7403
  /* Optimize ceil of constant value.  */
7404
  if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7405
    {
7406
      REAL_VALUE_TYPE x;
7407
 
7408
      x = TREE_REAL_CST (arg);
7409
      if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7410
        {
7411
          tree type = TREE_TYPE (TREE_TYPE (fndecl));
7412
          REAL_VALUE_TYPE r;
7413
 
7414
          real_ceil (&r, TYPE_MODE (type), &x);
7415
          return build_real (type, r);
7416
        }
7417
    }
7418
 
7419
  return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7420
}
7421
 
7422
/* Fold function call to builtin round, roundf or roundl with argument ARG.
7423
   Return NULL_TREE if no simplification can be made.  */
7424
 
7425
static tree
7426
fold_builtin_round (location_t loc, tree fndecl, tree arg)
7427
{
7428
  if (!validate_arg (arg, REAL_TYPE))
7429
    return NULL_TREE;
7430
 
7431
  /* Optimize round of constant value.  */
7432
  if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7433
    {
7434
      REAL_VALUE_TYPE x;
7435
 
7436
      x = TREE_REAL_CST (arg);
7437
      if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7438
        {
7439
          tree type = TREE_TYPE (TREE_TYPE (fndecl));
7440
          REAL_VALUE_TYPE r;
7441
 
7442
          real_round (&r, TYPE_MODE (type), &x);
7443
          return build_real (type, r);
7444
        }
7445
    }
7446
 
7447
  return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7448
}
7449
 
7450
/* Fold function call to builtin lround, lroundf or lroundl (or the
7451
   corresponding long long versions) and other rounding functions.  ARG
7452
   is the argument to the call.  Return NULL_TREE if no simplification
7453
   can be made.  */
7454
 
7455
static tree
7456
fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7457
{
7458
  if (!validate_arg (arg, REAL_TYPE))
7459
    return NULL_TREE;
7460
 
7461
  /* Optimize lround of constant value.  */
7462
  if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7463
    {
7464
      const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7465
 
7466
      if (real_isfinite (&x))
7467
        {
7468
          tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7469
          tree ftype = TREE_TYPE (arg);
7470
          unsigned HOST_WIDE_INT lo2;
7471
          HOST_WIDE_INT hi, lo;
7472
          REAL_VALUE_TYPE r;
7473
 
7474
          switch (DECL_FUNCTION_CODE (fndecl))
7475
            {
7476
            CASE_FLT_FN (BUILT_IN_LFLOOR):
7477
            CASE_FLT_FN (BUILT_IN_LLFLOOR):
7478
              real_floor (&r, TYPE_MODE (ftype), &x);
7479
              break;
7480
 
7481
            CASE_FLT_FN (BUILT_IN_LCEIL):
7482
            CASE_FLT_FN (BUILT_IN_LLCEIL):
7483
              real_ceil (&r, TYPE_MODE (ftype), &x);
7484
              break;
7485
 
7486
            CASE_FLT_FN (BUILT_IN_LROUND):
7487
            CASE_FLT_FN (BUILT_IN_LLROUND):
7488
              real_round (&r, TYPE_MODE (ftype), &x);
7489
              break;
7490
 
7491
            default:
7492
              gcc_unreachable ();
7493
            }
7494
 
7495
          REAL_VALUE_TO_INT (&lo, &hi, r);
7496
          if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7497
            return build_int_cst_wide (itype, lo2, hi);
7498
        }
7499
    }
7500
 
7501
  switch (DECL_FUNCTION_CODE (fndecl))
7502
    {
7503
    CASE_FLT_FN (BUILT_IN_LFLOOR):
7504
    CASE_FLT_FN (BUILT_IN_LLFLOOR):
7505
      /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x).  */
7506
      if (tree_expr_nonnegative_p (arg))
7507
        return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7508
                            TREE_TYPE (TREE_TYPE (fndecl)), arg);
7509
      break;
7510
    default:;
7511
    }
7512
 
7513
  return fold_fixed_mathfn (loc, fndecl, arg);
7514
}
7515
 
7516
/* Fold function call to builtin ffs, clz, ctz, popcount and parity
7517
   and their long and long long variants (i.e. ffsl and ffsll).  ARG is
7518
   the argument to the call.  Return NULL_TREE if no simplification can
7519
   be made.  */
7520
 
7521
static tree
7522
fold_builtin_bitop (tree fndecl, tree arg)
7523
{
7524
  if (!validate_arg (arg, INTEGER_TYPE))
7525
    return NULL_TREE;
7526
 
7527
  /* Optimize for constant argument.  */
7528
  if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7529
    {
7530
      HOST_WIDE_INT hi, width, result;
7531
      unsigned HOST_WIDE_INT lo;
7532
      tree type;
7533
 
7534
      type = TREE_TYPE (arg);
7535
      width = TYPE_PRECISION (type);
7536
      lo = TREE_INT_CST_LOW (arg);
7537
 
7538
      /* Clear all the bits that are beyond the type's precision.  */
7539
      if (width > HOST_BITS_PER_WIDE_INT)
7540
        {
7541
          hi = TREE_INT_CST_HIGH (arg);
7542
          if (width < 2 * HOST_BITS_PER_WIDE_INT)
7543
            hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7544
        }
7545
      else
7546
        {
7547
          hi = 0;
7548
          if (width < HOST_BITS_PER_WIDE_INT)
7549
            lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7550
        }
7551
 
7552
      switch (DECL_FUNCTION_CODE (fndecl))
7553
        {
7554
        CASE_INT_FN (BUILT_IN_FFS):
7555
          if (lo != 0)
7556
            result = exact_log2 (lo & -lo) + 1;
7557
          else if (hi != 0)
7558
            result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7559
          else
7560
            result = 0;
7561
          break;
7562
 
7563
        CASE_INT_FN (BUILT_IN_CLZ):
7564
          if (hi != 0)
7565
            result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7566
          else if (lo != 0)
7567
            result = width - floor_log2 (lo) - 1;
7568
          else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7569
            result = width;
7570
          break;
7571
 
7572
        CASE_INT_FN (BUILT_IN_CTZ):
7573
          if (lo != 0)
7574
            result = exact_log2 (lo & -lo);
7575
          else if (hi != 0)
7576
            result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7577
          else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7578
            result = width;
7579
          break;
7580
 
7581
        CASE_INT_FN (BUILT_IN_POPCOUNT):
7582
          result = 0;
7583
          while (lo)
7584
            result++, lo &= lo - 1;
7585
          while (hi)
7586
            result++, hi &= hi - 1;
7587
          break;
7588
 
7589
        CASE_INT_FN (BUILT_IN_PARITY):
7590
          result = 0;
7591
          while (lo)
7592
            result++, lo &= lo - 1;
7593
          while (hi)
7594
            result++, hi &= hi - 1;
7595
          result &= 1;
7596
          break;
7597
 
7598
        default:
7599
          gcc_unreachable ();
7600
        }
7601
 
7602
      return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7603
    }
7604
 
7605
  return NULL_TREE;
7606
}
7607
 
7608
/* Fold function call to builtin_bswap and the long and long long
7609
   variants.  Return NULL_TREE if no simplification can be made.  */
7610
static tree
7611
fold_builtin_bswap (tree fndecl, tree arg)
7612
{
7613
  if (! validate_arg (arg, INTEGER_TYPE))
7614
    return NULL_TREE;
7615
 
7616
  /* Optimize constant value.  */
7617
  if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7618
    {
7619
      HOST_WIDE_INT hi, width, r_hi = 0;
7620
      unsigned HOST_WIDE_INT lo, r_lo = 0;
7621
      tree type;
7622
 
7623
      type = TREE_TYPE (arg);
7624
      width = TYPE_PRECISION (type);
7625
      lo = TREE_INT_CST_LOW (arg);
7626
      hi = TREE_INT_CST_HIGH (arg);
7627
 
7628
      switch (DECL_FUNCTION_CODE (fndecl))
7629
        {
7630
          case BUILT_IN_BSWAP32:
7631
          case BUILT_IN_BSWAP64:
7632
            {
7633
              int s;
7634
 
7635
              for (s = 0; s < width; s += 8)
7636
                {
7637
                  int d = width - s - 8;
7638
                  unsigned HOST_WIDE_INT byte;
7639
 
7640
                  if (s < HOST_BITS_PER_WIDE_INT)
7641
                    byte = (lo >> s) & 0xff;
7642
                  else
7643
                    byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7644
 
7645
                  if (d < HOST_BITS_PER_WIDE_INT)
7646
                    r_lo |= byte << d;
7647
                  else
7648
                    r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7649
                }
7650
            }
7651
 
7652
            break;
7653
 
7654
        default:
7655
          gcc_unreachable ();
7656
        }
7657
 
7658
      if (width < HOST_BITS_PER_WIDE_INT)
7659
        return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7660
      else
7661
        return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7662
    }
7663
 
7664
  return NULL_TREE;
7665
}
7666
 
7667
/* A subroutine of fold_builtin to fold the various logarithmic
7668
   functions.  Return NULL_TREE if no simplification can me made.
7669
   FUNC is the corresponding MPFR logarithm function.  */
7670
 
7671
static tree
7672
fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7673
                        int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7674
{
7675
  if (validate_arg (arg, REAL_TYPE))
7676
    {
7677
      tree type = TREE_TYPE (TREE_TYPE (fndecl));
7678
      tree res;
7679
      const enum built_in_function fcode = builtin_mathfn_code (arg);
7680
 
7681
      /* Calculate the result when the argument is a constant.  */
7682
      if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7683
        return res;
7684
 
7685
      /* Special case, optimize logN(expN(x)) = x.  */
7686
      if (flag_unsafe_math_optimizations
7687
          && ((func == mpfr_log
7688
               && (fcode == BUILT_IN_EXP
7689
                   || fcode == BUILT_IN_EXPF
7690
                   || fcode == BUILT_IN_EXPL))
7691
              || (func == mpfr_log2
7692
                  && (fcode == BUILT_IN_EXP2
7693
                      || fcode == BUILT_IN_EXP2F
7694
                      || fcode == BUILT_IN_EXP2L))
7695
              || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7696
        return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7697
 
7698
      /* Optimize logN(func()) for various exponential functions.  We
7699
         want to determine the value "x" and the power "exponent" in
7700
         order to transform logN(x**exponent) into exponent*logN(x).  */
7701
      if (flag_unsafe_math_optimizations)
7702
        {
7703
          tree exponent = 0, x = 0;
7704
 
7705
          switch (fcode)
7706
          {
7707
          CASE_FLT_FN (BUILT_IN_EXP):
7708
            /* Prepare to do logN(exp(exponent) -> exponent*logN(e).  */
7709
            x = build_real (type, real_value_truncate (TYPE_MODE (type),
7710
                                                       dconst_e ()));
7711
            exponent = CALL_EXPR_ARG (arg, 0);
7712
            break;
7713
          CASE_FLT_FN (BUILT_IN_EXP2):
7714
            /* Prepare to do logN(exp2(exponent) -> exponent*logN(2).  */
7715
            x = build_real (type, dconst2);
7716
            exponent = CALL_EXPR_ARG (arg, 0);
7717
            break;
7718
          CASE_FLT_FN (BUILT_IN_EXP10):
7719
          CASE_FLT_FN (BUILT_IN_POW10):
7720
            /* Prepare to do logN(exp10(exponent) -> exponent*logN(10).  */
7721
            {
7722
              REAL_VALUE_TYPE dconst10;
7723
              real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7724
              x = build_real (type, dconst10);
7725
            }
7726
            exponent = CALL_EXPR_ARG (arg, 0);
7727
            break;
7728
          CASE_FLT_FN (BUILT_IN_SQRT):
7729
            /* Prepare to do logN(sqrt(x) -> 0.5*logN(x).  */
7730
            x = CALL_EXPR_ARG (arg, 0);
7731
            exponent = build_real (type, dconsthalf);
7732
            break;
7733
          CASE_FLT_FN (BUILT_IN_CBRT):
7734
            /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x).  */
7735
            x = CALL_EXPR_ARG (arg, 0);
7736
            exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7737
                                                              dconst_third ()));
7738
            break;
7739
          CASE_FLT_FN (BUILT_IN_POW):
7740
            /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x).  */
7741
            x = CALL_EXPR_ARG (arg, 0);
7742
            exponent = CALL_EXPR_ARG (arg, 1);
7743
            break;
7744
          default:
7745
            break;
7746
          }
7747
 
7748
          /* Now perform the optimization.  */
7749
          if (x && exponent)
7750
            {
7751
              tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7752
              return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7753
            }
7754
        }
7755
    }
7756
 
7757
  return NULL_TREE;
7758
}
7759
 
7760
/* Fold a builtin function call to hypot, hypotf, or hypotl.  Return
7761
   NULL_TREE if no simplification can be made.  */
7762
 
7763
static tree
7764
fold_builtin_hypot (location_t loc, tree fndecl,
7765
                    tree arg0, tree arg1, tree type)
7766
{
7767
  tree res, narg0, narg1;
7768
 
7769
  if (!validate_arg (arg0, REAL_TYPE)
7770
      || !validate_arg (arg1, REAL_TYPE))
7771
    return NULL_TREE;
7772
 
7773
  /* Calculate the result when the argument is a constant.  */
7774
  if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7775
    return res;
7776
 
7777
  /* If either argument to hypot has a negate or abs, strip that off.
7778
     E.g. hypot(-x,fabs(y)) -> hypot(x,y).  */
7779
  narg0 = fold_strip_sign_ops (arg0);
7780
  narg1 = fold_strip_sign_ops (arg1);
7781
  if (narg0 || narg1)
7782
    {
7783
      return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7784
                              narg1 ? narg1 : arg1);
7785
    }
7786
 
7787
  /* If either argument is zero, hypot is fabs of the other.  */
7788
  if (real_zerop (arg0))
7789
    return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7790
  else if (real_zerop (arg1))
7791
    return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7792
 
7793
  /* hypot(x,x) -> fabs(x)*sqrt(2).  */
7794
  if (flag_unsafe_math_optimizations
7795
      && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7796
    {
7797
      const REAL_VALUE_TYPE sqrt2_trunc
7798
        = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7799
      return fold_build2_loc (loc, MULT_EXPR, type,
7800
                          fold_build1_loc (loc, ABS_EXPR, type, arg0),
7801
                          build_real (type, sqrt2_trunc));
7802
    }
7803
 
7804
  return NULL_TREE;
7805
}
7806
 
7807
 
7808
/* Fold a builtin function call to pow, powf, or powl.  Return
7809
   NULL_TREE if no simplification can be made.  */
7810
static tree
7811
fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7812
{
7813
  tree res;
7814
 
7815
  if (!validate_arg (arg0, REAL_TYPE)
7816
       || !validate_arg (arg1, REAL_TYPE))
7817
    return NULL_TREE;
7818
 
7819
  /* Calculate the result when the argument is a constant.  */
7820
  if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7821
    return res;
7822
 
7823
  /* Optimize pow(1.0,y) = 1.0.  */
7824
  if (real_onep (arg0))
7825
    return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7826
 
7827
  if (TREE_CODE (arg1) == REAL_CST
7828
      && !TREE_OVERFLOW (arg1))
7829
    {
7830
      REAL_VALUE_TYPE cint;
7831
      REAL_VALUE_TYPE c;
7832
      HOST_WIDE_INT n;
7833
 
7834
      c = TREE_REAL_CST (arg1);
7835
 
7836
      /* Optimize pow(x,0.0) = 1.0.  */
7837
      if (REAL_VALUES_EQUAL (c, dconst0))
7838
        return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7839
                                 arg0);
7840
 
7841
      /* Optimize pow(x,1.0) = x.  */
7842
      if (REAL_VALUES_EQUAL (c, dconst1))
7843
        return arg0;
7844
 
7845
      /* Optimize pow(x,-1.0) = 1.0/x.  */
7846
      if (REAL_VALUES_EQUAL (c, dconstm1))
7847
        return fold_build2_loc (loc, RDIV_EXPR, type,
7848
                            build_real (type, dconst1), arg0);
7849
 
7850
      /* Optimize pow(x,0.5) = sqrt(x).  */
7851
      if (flag_unsafe_math_optimizations
7852
          && REAL_VALUES_EQUAL (c, dconsthalf))
7853
        {
7854
          tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7855
 
7856
          if (sqrtfn != NULL_TREE)
7857
            return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7858
        }
7859
 
7860
      /* Optimize pow(x,1.0/3.0) = cbrt(x).  */
7861
      if (flag_unsafe_math_optimizations)
7862
        {
7863
          const REAL_VALUE_TYPE dconstroot
7864
            = real_value_truncate (TYPE_MODE (type), dconst_third ());
7865
 
7866
          if (REAL_VALUES_EQUAL (c, dconstroot))
7867
            {
7868
              tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7869
              if (cbrtfn != NULL_TREE)
7870
                return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7871
            }
7872
        }
7873
 
7874
      /* Check for an integer exponent.  */
7875
      n = real_to_integer (&c);
7876
      real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7877
      if (real_identical (&c, &cint))
7878
        {
7879
          /* Attempt to evaluate pow at compile-time, unless this should
7880
             raise an exception.  */
7881
          if (TREE_CODE (arg0) == REAL_CST
7882
              && !TREE_OVERFLOW (arg0)
7883
              && (n > 0
7884
                  || (!flag_trapping_math && !flag_errno_math)
7885
                  || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7886
            {
7887
              REAL_VALUE_TYPE x;
7888
              bool inexact;
7889
 
7890
              x = TREE_REAL_CST (arg0);
7891
              inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7892
              if (flag_unsafe_math_optimizations || !inexact)
7893
                return build_real (type, x);
7894
            }
7895
 
7896
          /* Strip sign ops from even integer powers.  */
7897
          if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7898
            {
7899
              tree narg0 = fold_strip_sign_ops (arg0);
7900
              if (narg0)
7901
                return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7902
            }
7903
        }
7904
    }
7905
 
7906
  if (flag_unsafe_math_optimizations)
7907
    {
7908
      const enum built_in_function fcode = builtin_mathfn_code (arg0);
7909
 
7910
      /* Optimize pow(expN(x),y) = expN(x*y).  */
7911
      if (BUILTIN_EXPONENT_P (fcode))
7912
        {
7913
          tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7914
          tree arg = CALL_EXPR_ARG (arg0, 0);
7915
          arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7916
          return build_call_expr_loc (loc, expfn, 1, arg);
7917
        }
7918
 
7919
      /* Optimize pow(sqrt(x),y) = pow(x,y*0.5).  */
7920
      if (BUILTIN_SQRT_P (fcode))
7921
        {
7922
          tree narg0 = CALL_EXPR_ARG (arg0, 0);
7923
          tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7924
                                    build_real (type, dconsthalf));
7925
          return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7926
        }
7927
 
7928
      /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative.  */
7929
      if (BUILTIN_CBRT_P (fcode))
7930
        {
7931
          tree arg = CALL_EXPR_ARG (arg0, 0);
7932
          if (tree_expr_nonnegative_p (arg))
7933
            {
7934
              const REAL_VALUE_TYPE dconstroot
7935
                = real_value_truncate (TYPE_MODE (type), dconst_third ());
7936
              tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7937
                                        build_real (type, dconstroot));
7938
              return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7939
            }
7940
        }
7941
 
7942
      /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative.  */
7943
      if (fcode == BUILT_IN_POW
7944
          || fcode == BUILT_IN_POWF
7945
          || fcode == BUILT_IN_POWL)
7946
        {
7947
          tree arg00 = CALL_EXPR_ARG (arg0, 0);
7948
          if (tree_expr_nonnegative_p (arg00))
7949
            {
7950
              tree arg01 = CALL_EXPR_ARG (arg0, 1);
7951
              tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7952
              return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7953
            }
7954
        }
7955
    }
7956
 
7957
  return NULL_TREE;
7958
}
7959
 
7960
/* Fold a builtin function call to powi, powif, or powil with argument ARG.
7961
   Return NULL_TREE if no simplification can be made.  */
7962
static tree
7963
fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7964
                   tree arg0, tree arg1, tree type)
7965
{
7966
  if (!validate_arg (arg0, REAL_TYPE)
7967
      || !validate_arg (arg1, INTEGER_TYPE))
7968
    return NULL_TREE;
7969
 
7970
  /* Optimize pow(1.0,y) = 1.0.  */
7971
  if (real_onep (arg0))
7972
    return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7973
 
7974
  if (host_integerp (arg1, 0))
7975
    {
7976
      HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7977
 
7978
      /* Evaluate powi at compile-time.  */
7979
      if (TREE_CODE (arg0) == REAL_CST
7980
          && !TREE_OVERFLOW (arg0))
7981
        {
7982
          REAL_VALUE_TYPE x;
7983
          x = TREE_REAL_CST (arg0);
7984
          real_powi (&x, TYPE_MODE (type), &x, c);
7985
          return build_real (type, x);
7986
        }
7987
 
7988
      /* Optimize pow(x,0) = 1.0.  */
7989
      if (c == 0)
7990
        return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7991
                                 arg0);
7992
 
7993
      /* Optimize pow(x,1) = x.  */
7994
      if (c == 1)
7995
        return arg0;
7996
 
7997
      /* Optimize pow(x,-1) = 1.0/x.  */
7998
      if (c == -1)
7999
        return fold_build2_loc (loc, RDIV_EXPR, type,
8000
                           build_real (type, dconst1), arg0);
8001
    }
8002
 
8003
  return NULL_TREE;
8004
}
8005
 
8006
/* A subroutine of fold_builtin to fold the various exponent
8007
   functions.  Return NULL_TREE if no simplification can be made.
8008
   FUNC is the corresponding MPFR exponent function.  */
8009
 
8010
static tree
8011
fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8012
                       int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8013
{
8014
  if (validate_arg (arg, REAL_TYPE))
8015
    {
8016
      tree type = TREE_TYPE (TREE_TYPE (fndecl));
8017
      tree res;
8018
 
8019
      /* Calculate the result when the argument is a constant.  */
8020
      if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8021
        return res;
8022
 
8023
      /* Optimize expN(logN(x)) = x.  */
8024
      if (flag_unsafe_math_optimizations)
8025
        {
8026
          const enum built_in_function fcode = builtin_mathfn_code (arg);
8027
 
8028
          if ((func == mpfr_exp
8029
               && (fcode == BUILT_IN_LOG
8030
                   || fcode == BUILT_IN_LOGF
8031
                   || fcode == BUILT_IN_LOGL))
8032
              || (func == mpfr_exp2
8033
                  && (fcode == BUILT_IN_LOG2
8034
                      || fcode == BUILT_IN_LOG2F
8035
                      || fcode == BUILT_IN_LOG2L))
8036
              || (func == mpfr_exp10
8037
                  && (fcode == BUILT_IN_LOG10
8038
                      || fcode == BUILT_IN_LOG10F
8039
                      || fcode == BUILT_IN_LOG10L)))
8040
            return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8041
        }
8042
    }
8043
 
8044
  return NULL_TREE;
8045
}
8046
 
8047
/* Return true if VAR is a VAR_DECL or a component thereof.  */
8048
 
8049
static bool
8050
var_decl_component_p (tree var)
8051
{
8052
  tree inner = var;
8053
  while (handled_component_p (inner))
8054
    inner = TREE_OPERAND (inner, 0);
8055
  return SSA_VAR_P (inner);
8056
}
8057
 
8058
/* Fold function call to builtin memset.  Return
8059
   NULL_TREE if no simplification can be made.  */
8060
 
8061
static tree
8062
fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8063
                     tree type, bool ignore)
8064
{
8065
  tree var, ret, etype;
8066
  unsigned HOST_WIDE_INT length, cval;
8067
 
8068
  if (! validate_arg (dest, POINTER_TYPE)
8069
      || ! validate_arg (c, INTEGER_TYPE)
8070
      || ! validate_arg (len, INTEGER_TYPE))
8071
    return NULL_TREE;
8072
 
8073
  if (! host_integerp (len, 1))
8074
    return NULL_TREE;
8075
 
8076
  /* If the LEN parameter is zero, return DEST.  */
8077
  if (integer_zerop (len))
8078
    return omit_one_operand_loc (loc, type, dest, c);
8079
 
8080
  if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8081
    return NULL_TREE;
8082
 
8083
  var = dest;
8084
  STRIP_NOPS (var);
8085
  if (TREE_CODE (var) != ADDR_EXPR)
8086
    return NULL_TREE;
8087
 
8088
  var = TREE_OPERAND (var, 0);
8089
  if (TREE_THIS_VOLATILE (var))
8090
    return NULL_TREE;
8091
 
8092
  etype = TREE_TYPE (var);
8093
  if (TREE_CODE (etype) == ARRAY_TYPE)
8094
    etype = TREE_TYPE (etype);
8095
 
8096
  if (!INTEGRAL_TYPE_P (etype)
8097
      && !POINTER_TYPE_P (etype))
8098
    return NULL_TREE;
8099
 
8100
  if (! var_decl_component_p (var))
8101
    return NULL_TREE;
8102
 
8103
  length = tree_low_cst (len, 1);
8104
  if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8105
      || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8106
         < (int) length)
8107
    return NULL_TREE;
8108
 
8109
  if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8110
    return NULL_TREE;
8111
 
8112
  if (integer_zerop (c))
8113
    cval = 0;
8114
  else
8115
    {
8116
      if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8117
        return NULL_TREE;
8118
 
8119
      cval = tree_low_cst (c, 1);
8120
      cval &= 0xff;
8121
      cval |= cval << 8;
8122
      cval |= cval << 16;
8123
      cval |= (cval << 31) << 1;
8124
    }
8125
 
8126
  ret = build_int_cst_type (etype, cval);
8127
  var = build_fold_indirect_ref_loc (loc,
8128
                                 fold_convert_loc (loc,
8129
                                                   build_pointer_type (etype),
8130
                                                   dest));
8131
  ret = build2 (MODIFY_EXPR, etype, var, ret);
8132
  if (ignore)
8133
    return ret;
8134
 
8135
  return omit_one_operand_loc (loc, type, dest, ret);
8136
}
8137
 
8138
/* Fold function call to builtin memset.  Return
8139
   NULL_TREE if no simplification can be made.  */
8140
 
8141
static tree
8142
fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8143
{
8144
  if (! validate_arg (dest, POINTER_TYPE)
8145
      || ! validate_arg (size, INTEGER_TYPE))
8146
    return NULL_TREE;
8147
 
8148
  if (!ignore)
8149
    return NULL_TREE;
8150
 
8151
  /* New argument list transforming bzero(ptr x, int y) to
8152
     memset(ptr x, int 0, size_t y).   This is done this way
8153
     so that if it isn't expanded inline, we fallback to
8154
     calling bzero instead of memset.  */
8155
 
8156
  return fold_builtin_memset (loc, dest, integer_zero_node,
8157
                              fold_convert_loc (loc, sizetype, size),
8158
                              void_type_node, ignore);
8159
}
8160
 
8161
/* Fold function call to builtin mem{{,p}cpy,move}.  Return
8162
   NULL_TREE if no simplification can be made.
8163
   If ENDP is 0, return DEST (like memcpy).
8164
   If ENDP is 1, return DEST+LEN (like mempcpy).
8165
   If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8166
   If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8167
   (memmove).   */
8168
 
8169
static tree
8170
fold_builtin_memory_op (location_t loc, tree dest, tree src,
8171
                        tree len, tree type, bool ignore, int endp)
8172
{
8173
  tree destvar, srcvar, expr;
8174
 
8175
  if (! validate_arg (dest, POINTER_TYPE)
8176
      || ! validate_arg (src, POINTER_TYPE)
8177
      || ! validate_arg (len, INTEGER_TYPE))
8178
    return NULL_TREE;
8179
 
8180
  /* If the LEN parameter is zero, return DEST.  */
8181
  if (integer_zerop (len))
8182
    return omit_one_operand_loc (loc, type, dest, src);
8183
 
8184
  /* If SRC and DEST are the same (and not volatile), return
8185
     DEST{,+LEN,+LEN-1}.  */
8186
  if (operand_equal_p (src, dest, 0))
8187
    expr = len;
8188
  else
8189
    {
8190
      tree srctype, desttype;
8191
      int src_align, dest_align;
8192
 
8193
      if (endp == 3)
8194
        {
8195
          src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8196
          dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8197
 
8198
          /* Both DEST and SRC must be pointer types.
8199
             ??? This is what old code did.  Is the testing for pointer types
8200
             really mandatory?
8201
 
8202
             If either SRC is readonly or length is 1, we can use memcpy.  */
8203
          if (!dest_align || !src_align)
8204
            return NULL_TREE;
8205
          if (readonly_data_expr (src)
8206
              || (host_integerp (len, 1)
8207
                  && (MIN (src_align, dest_align) / BITS_PER_UNIT
8208
                      >= tree_low_cst (len, 1))))
8209
            {
8210
              tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8211
              if (!fn)
8212
                return NULL_TREE;
8213
              return build_call_expr_loc (loc, fn, 3, dest, src, len);
8214
            }
8215
 
8216
          /* If *src and *dest can't overlap, optimize into memcpy as well.  */
8217
          srcvar = build_fold_indirect_ref_loc (loc, src);
8218
          destvar = build_fold_indirect_ref_loc (loc, dest);
8219
          if (srcvar
8220
              && !TREE_THIS_VOLATILE (srcvar)
8221
              && destvar
8222
              && !TREE_THIS_VOLATILE (destvar))
8223
            {
8224
              tree src_base, dest_base, fn;
8225
              HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8226
              HOST_WIDE_INT size = -1;
8227
              HOST_WIDE_INT maxsize = -1;
8228
 
8229
              src_base = srcvar;
8230
              if (handled_component_p (src_base))
8231
                src_base = get_ref_base_and_extent (src_base, &src_offset,
8232
                                                    &size, &maxsize);
8233
              dest_base = destvar;
8234
              if (handled_component_p (dest_base))
8235
                dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8236
                                                     &size, &maxsize);
8237
              if (host_integerp (len, 1))
8238
                {
8239
                  maxsize = tree_low_cst (len, 1);
8240
                  if (maxsize
8241
                      > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8242
                    maxsize = -1;
8243
                  else
8244
                    maxsize *= BITS_PER_UNIT;
8245
                }
8246
              else
8247
                maxsize = -1;
8248
              if (SSA_VAR_P (src_base)
8249
                  && SSA_VAR_P (dest_base))
8250
                {
8251
                  if (operand_equal_p (src_base, dest_base, 0)
8252
                      && ranges_overlap_p (src_offset, maxsize,
8253
                                           dest_offset, maxsize))
8254
                    return NULL_TREE;
8255
                }
8256
              else if (TREE_CODE (src_base) == INDIRECT_REF
8257
                       && TREE_CODE (dest_base) == INDIRECT_REF)
8258
                {
8259
                  if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8260
                                         TREE_OPERAND (dest_base, 0), 0)
8261
                      || ranges_overlap_p (src_offset, maxsize,
8262
                                           dest_offset, maxsize))
8263
                    return NULL_TREE;
8264
                }
8265
              else
8266
                return NULL_TREE;
8267
 
8268
              fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8269
              if (!fn)
8270
                return NULL_TREE;
8271
              return build_call_expr_loc (loc, fn, 3, dest, src, len);
8272
            }
8273
          return NULL_TREE;
8274
        }
8275
 
8276
      if (!host_integerp (len, 0))
8277
        return NULL_TREE;
8278
      /* FIXME:
8279
         This logic lose for arguments like (type *)malloc (sizeof (type)),
8280
         since we strip the casts of up to VOID return value from malloc.
8281
         Perhaps we ought to inherit type from non-VOID argument here?  */
8282
      STRIP_NOPS (src);
8283
      STRIP_NOPS (dest);
8284
      /* As we fold (void *)(p + CST) to (void *)p + CST undo this here.  */
8285
      if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8286
        {
8287
          tree tem = TREE_OPERAND (src, 0);
8288
          STRIP_NOPS (tem);
8289
          if (tem != TREE_OPERAND (src, 0))
8290
            src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8291
        }
8292
      if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8293
        {
8294
          tree tem = TREE_OPERAND (dest, 0);
8295
          STRIP_NOPS (tem);
8296
          if (tem != TREE_OPERAND (dest, 0))
8297
            dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8298
        }
8299
      srctype = TREE_TYPE (TREE_TYPE (src));
8300
      if (srctype
8301
          && TREE_CODE (srctype) == ARRAY_TYPE
8302
          && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8303
        {
8304
          srctype = TREE_TYPE (srctype);
8305
          STRIP_NOPS (src);
8306
          src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8307
        }
8308
      desttype = TREE_TYPE (TREE_TYPE (dest));
8309
      if (desttype
8310
          && TREE_CODE (desttype) == ARRAY_TYPE
8311
          && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8312
        {
8313
          desttype = TREE_TYPE (desttype);
8314
          STRIP_NOPS (dest);
8315
          dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8316
        }
8317
      if (!srctype || !desttype
8318
          || !TYPE_SIZE_UNIT (srctype)
8319
          || !TYPE_SIZE_UNIT (desttype)
8320
          || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8321
          || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8322
          || TYPE_VOLATILE (srctype)
8323
          || TYPE_VOLATILE (desttype))
8324
        return NULL_TREE;
8325
 
8326
      src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8327
      dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8328
      if (dest_align < (int) TYPE_ALIGN (desttype)
8329
          || src_align < (int) TYPE_ALIGN (srctype))
8330
        return NULL_TREE;
8331
 
8332
      if (!ignore)
8333
        dest = builtin_save_expr (dest);
8334
 
8335
      srcvar = NULL_TREE;
8336
      if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8337
        {
8338
          srcvar = build_fold_indirect_ref_loc (loc, src);
8339
          if (TREE_THIS_VOLATILE (srcvar))
8340
            return NULL_TREE;
8341
          else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8342
            srcvar = NULL_TREE;
8343
          /* With memcpy, it is possible to bypass aliasing rules, so without
8344
             this check i.e. execute/20060930-2.c would be misoptimized,
8345
             because it use conflicting alias set to hold argument for the
8346
             memcpy call.  This check is probably unnecessary with
8347
             -fno-strict-aliasing.  Similarly for destvar.  See also
8348
             PR29286.  */
8349
          else if (!var_decl_component_p (srcvar))
8350
            srcvar = NULL_TREE;
8351
        }
8352
 
8353
      destvar = NULL_TREE;
8354
      if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8355
        {
8356
          destvar = build_fold_indirect_ref_loc (loc, dest);
8357
          if (TREE_THIS_VOLATILE (destvar))
8358
            return NULL_TREE;
8359
          else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8360
            destvar = NULL_TREE;
8361
          else if (!var_decl_component_p (destvar))
8362
            destvar = NULL_TREE;
8363
        }
8364
 
8365
      if (srcvar == NULL_TREE && destvar == NULL_TREE)
8366
        return NULL_TREE;
8367
 
8368
      if (srcvar == NULL_TREE)
8369
        {
8370
          tree srcptype;
8371
          if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8372
            return NULL_TREE;
8373
 
8374
          srctype = build_qualified_type (desttype, 0);
8375
          if (src_align < (int) TYPE_ALIGN (srctype))
8376
            {
8377
              if (AGGREGATE_TYPE_P (srctype)
8378
                  || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8379
                return NULL_TREE;
8380
 
8381
              srctype = build_variant_type_copy (srctype);
8382
              TYPE_ALIGN (srctype) = src_align;
8383
              TYPE_USER_ALIGN (srctype) = 1;
8384
              TYPE_PACKED (srctype) = 1;
8385
            }
8386
          srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8387
          src = fold_convert_loc (loc, srcptype, src);
8388
          srcvar = build_fold_indirect_ref_loc (loc, src);
8389
        }
8390
      else if (destvar == NULL_TREE)
8391
        {
8392
          tree destptype;
8393
          if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8394
            return NULL_TREE;
8395
 
8396
          desttype = build_qualified_type (srctype, 0);
8397
          if (dest_align < (int) TYPE_ALIGN (desttype))
8398
            {
8399
              if (AGGREGATE_TYPE_P (desttype)
8400
                  || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8401
                return NULL_TREE;
8402
 
8403
              desttype = build_variant_type_copy (desttype);
8404
              TYPE_ALIGN (desttype) = dest_align;
8405
              TYPE_USER_ALIGN (desttype) = 1;
8406
              TYPE_PACKED (desttype) = 1;
8407
            }
8408
          destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8409
          dest = fold_convert_loc (loc, destptype, dest);
8410
          destvar = build_fold_indirect_ref_loc (loc, dest);
8411
        }
8412
 
8413
      if (srctype == desttype
8414
          || (gimple_in_ssa_p (cfun)
8415
              && useless_type_conversion_p (desttype, srctype)))
8416
        expr = srcvar;
8417
      else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8418
           || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8419
          && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8420
              || POINTER_TYPE_P (TREE_TYPE (destvar))))
8421
        expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8422
      else
8423
        expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8424
                            TREE_TYPE (destvar), srcvar);
8425
      expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8426
    }
8427
 
8428
  if (ignore)
8429
    return expr;
8430
 
8431
  if (endp == 0 || endp == 3)
8432
    return omit_one_operand_loc (loc, type, dest, expr);
8433
 
8434
  if (expr == len)
8435
    expr = NULL_TREE;
8436
 
8437
  if (endp == 2)
8438
    len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8439
                       ssize_int (1));
8440
 
8441
  len = fold_convert_loc (loc, sizetype, len);
8442
  dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8443
  dest = fold_convert_loc (loc, type, dest);
8444
  if (expr)
8445
    dest = omit_one_operand_loc (loc, type, dest, expr);
8446
  return dest;
8447
}
8448
 
8449
/* Fold function call to builtin strcpy with arguments DEST and SRC.
8450
   If LEN is not NULL, it represents the length of the string to be
8451
   copied.  Return NULL_TREE if no simplification can be made.  */
8452
 
8453
tree
8454
fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8455
{
8456
  tree fn;
8457
 
8458
  if (!validate_arg (dest, POINTER_TYPE)
8459
      || !validate_arg (src, POINTER_TYPE))
8460
    return NULL_TREE;
8461
 
8462
  /* If SRC and DEST are the same (and not volatile), return DEST.  */
8463
  if (operand_equal_p (src, dest, 0))
8464
    return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8465
 
8466
  if (optimize_function_for_size_p (cfun))
8467
    return NULL_TREE;
8468
 
8469
  fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8470
  if (!fn)
8471
    return NULL_TREE;
8472
 
8473
  if (!len)
8474
    {
8475
      len = c_strlen (src, 1);
8476
      if (! len || TREE_SIDE_EFFECTS (len))
8477
        return NULL_TREE;
8478
    }
8479
 
8480
  len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8481
  return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8482
                           build_call_expr_loc (loc, fn, 3, dest, src, len));
8483
}
8484
 
8485
/* Fold function call to builtin stpcpy with arguments DEST and SRC.
8486
   Return NULL_TREE if no simplification can be made.  */
8487
 
8488
static tree
8489
fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8490
{
8491
  tree fn, len, lenp1, call, type;
8492
 
8493
  if (!validate_arg (dest, POINTER_TYPE)
8494
      || !validate_arg (src, POINTER_TYPE))
8495
    return NULL_TREE;
8496
 
8497
  len = c_strlen (src, 1);
8498
  if (!len
8499
      || TREE_CODE (len) != INTEGER_CST)
8500
    return NULL_TREE;
8501
 
8502
  if (optimize_function_for_size_p (cfun)
8503
      /* If length is zero it's small enough.  */
8504
      && !integer_zerop (len))
8505
    return NULL_TREE;
8506
 
8507
  fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8508
  if (!fn)
8509
    return NULL_TREE;
8510
 
8511
  lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8512
  /* We use dest twice in building our expression.  Save it from
8513
     multiple expansions.  */
8514
  dest = builtin_save_expr (dest);
8515
  call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8516
 
8517
  type = TREE_TYPE (TREE_TYPE (fndecl));
8518
  len = fold_convert_loc (loc, sizetype, len);
8519
  dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8520
  dest = fold_convert_loc (loc, type, dest);
8521
  dest = omit_one_operand_loc (loc, type, dest, call);
8522
  return dest;
8523
}
8524
 
8525
/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8526
   If SLEN is not NULL, it represents the length of the source string.
8527
   Return NULL_TREE if no simplification can be made.  */
8528
 
8529
tree
8530
fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8531
                      tree src, tree len, tree slen)
8532
{
8533
  tree fn;
8534
 
8535
  if (!validate_arg (dest, POINTER_TYPE)
8536
      || !validate_arg (src, POINTER_TYPE)
8537
      || !validate_arg (len, INTEGER_TYPE))
8538
    return NULL_TREE;
8539
 
8540
  /* If the LEN parameter is zero, return DEST.  */
8541
  if (integer_zerop (len))
8542
    return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8543
 
8544
  /* We can't compare slen with len as constants below if len is not a
8545
     constant.  */
8546
  if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8547
    return NULL_TREE;
8548
 
8549
  if (!slen)
8550
    slen = c_strlen (src, 1);
8551
 
8552
  /* Now, we must be passed a constant src ptr parameter.  */
8553
  if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8554
    return NULL_TREE;
8555
 
8556
  slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8557
 
8558
  /* We do not support simplification of this case, though we do
8559
     support it when expanding trees into RTL.  */
8560
  /* FIXME: generate a call to __builtin_memset.  */
8561
  if (tree_int_cst_lt (slen, len))
8562
    return NULL_TREE;
8563
 
8564
  /* OK transform into builtin memcpy.  */
8565
  fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8566
  if (!fn)
8567
    return NULL_TREE;
8568
  return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8569
                           build_call_expr_loc (loc, fn, 3, dest, src, len));
8570
}
8571
 
8572
/* Fold function call to builtin memchr.  ARG1, ARG2 and LEN are the
8573
   arguments to the call, and TYPE is its return type.
8574
   Return NULL_TREE if no simplification can be made.  */
8575
 
8576
static tree
8577
fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8578
{
8579
  if (!validate_arg (arg1, POINTER_TYPE)
8580
      || !validate_arg (arg2, INTEGER_TYPE)
8581
      || !validate_arg (len, INTEGER_TYPE))
8582
    return NULL_TREE;
8583
  else
8584
    {
8585
      const char *p1;
8586
 
8587
      if (TREE_CODE (arg2) != INTEGER_CST
8588
          || !host_integerp (len, 1))
8589
        return NULL_TREE;
8590
 
8591
      p1 = c_getstr (arg1);
8592
      if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8593
        {
8594
          char c;
8595
          const char *r;
8596
          tree tem;
8597
 
8598
          if (target_char_cast (arg2, &c))
8599
            return NULL_TREE;
8600
 
8601
          r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8602
 
8603
          if (r == NULL)
8604
            return build_int_cst (TREE_TYPE (arg1), 0);
8605
 
8606
          tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8607
                             size_int (r - p1));
8608
          return fold_convert_loc (loc, type, tem);
8609
        }
8610
      return NULL_TREE;
8611
    }
8612
}
8613
 
8614
/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8615
   Return NULL_TREE if no simplification can be made.  */
8616
 
8617
static tree
8618
fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8619
{
8620
  const char *p1, *p2;
8621
 
8622
  if (!validate_arg (arg1, POINTER_TYPE)
8623
      || !validate_arg (arg2, POINTER_TYPE)
8624
      || !validate_arg (len, INTEGER_TYPE))
8625
    return NULL_TREE;
8626
 
8627
  /* If the LEN parameter is zero, return zero.  */
8628
  if (integer_zerop (len))
8629
    return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8630
                              arg1, arg2);
8631
 
8632
  /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
8633
  if (operand_equal_p (arg1, arg2, 0))
8634
    return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8635
 
8636
  p1 = c_getstr (arg1);
8637
  p2 = c_getstr (arg2);
8638
 
8639
  /* If all arguments are constant, and the value of len is not greater
8640
     than the lengths of arg1 and arg2, evaluate at compile-time.  */
8641
  if (host_integerp (len, 1) && p1 && p2
8642
      && compare_tree_int (len, strlen (p1) + 1) <= 0
8643
      && compare_tree_int (len, strlen (p2) + 1) <= 0)
8644
    {
8645
      const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8646
 
8647
      if (r > 0)
8648
        return integer_one_node;
8649
      else if (r < 0)
8650
        return integer_minus_one_node;
8651
      else
8652
        return integer_zero_node;
8653
    }
8654
 
8655
  /* If len parameter is one, return an expression corresponding to
8656
     (*(const unsigned char*)arg1 - (const unsigned char*)arg2).  */
8657
  if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8658
    {
8659
      tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8660
      tree cst_uchar_ptr_node
8661
        = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8662
 
8663
      tree ind1
8664
        = fold_convert_loc (loc, integer_type_node,
8665
                            build1 (INDIRECT_REF, cst_uchar_node,
8666
                                    fold_convert_loc (loc,
8667
                                                      cst_uchar_ptr_node,
8668
                                                      arg1)));
8669
      tree ind2
8670
        = fold_convert_loc (loc, integer_type_node,
8671
                            build1 (INDIRECT_REF, cst_uchar_node,
8672
                                    fold_convert_loc (loc,
8673
                                                      cst_uchar_ptr_node,
8674
                                                      arg2)));
8675
      return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8676
    }
8677
 
8678
  return NULL_TREE;
8679
}
8680
 
8681
/* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8682
   Return NULL_TREE if no simplification can be made.  */
8683
 
8684
static tree
8685
fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8686
{
8687
  const char *p1, *p2;
8688
 
8689
  if (!validate_arg (arg1, POINTER_TYPE)
8690
      || !validate_arg (arg2, POINTER_TYPE))
8691
    return NULL_TREE;
8692
 
8693
  /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
8694
  if (operand_equal_p (arg1, arg2, 0))
8695
    return integer_zero_node;
8696
 
8697
  p1 = c_getstr (arg1);
8698
  p2 = c_getstr (arg2);
8699
 
8700
  if (p1 && p2)
8701
    {
8702
      const int i = strcmp (p1, p2);
8703
      if (i < 0)
8704
        return integer_minus_one_node;
8705
      else if (i > 0)
8706
        return integer_one_node;
8707
      else
8708
        return integer_zero_node;
8709
    }
8710
 
8711
  /* If the second arg is "", return *(const unsigned char*)arg1.  */
8712
  if (p2 && *p2 == '\0')
8713
    {
8714
      tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8715
      tree cst_uchar_ptr_node
8716
        = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8717
 
8718
      return fold_convert_loc (loc, integer_type_node,
8719
                               build1 (INDIRECT_REF, cst_uchar_node,
8720
                                       fold_convert_loc (loc,
8721
                                                         cst_uchar_ptr_node,
8722
                                                         arg1)));
8723
    }
8724
 
8725
  /* If the first arg is "", return -*(const unsigned char*)arg2.  */
8726
  if (p1 && *p1 == '\0')
8727
    {
8728
      tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8729
      tree cst_uchar_ptr_node
8730
        = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8731
 
8732
      tree temp
8733
        = fold_convert_loc (loc, integer_type_node,
8734
                            build1 (INDIRECT_REF, cst_uchar_node,
8735
                                    fold_convert_loc (loc,
8736
                                                      cst_uchar_ptr_node,
8737
                                                      arg2)));
8738
      return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8739
    }
8740
 
8741
  return NULL_TREE;
8742
}
8743
 
8744
/* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8745
   Return NULL_TREE if no simplification can be made.  */
8746
 
8747
static tree
8748
fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8749
{
8750
  const char *p1, *p2;
8751
 
8752
  if (!validate_arg (arg1, POINTER_TYPE)
8753
      || !validate_arg (arg2, POINTER_TYPE)
8754
      || !validate_arg (len, INTEGER_TYPE))
8755
    return NULL_TREE;
8756
 
8757
  /* If the LEN parameter is zero, return zero.  */
8758
  if (integer_zerop (len))
8759
    return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8760
                              arg1, arg2);
8761
 
8762
  /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
8763
  if (operand_equal_p (arg1, arg2, 0))
8764
    return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8765
 
8766
  p1 = c_getstr (arg1);
8767
  p2 = c_getstr (arg2);
8768
 
8769
  if (host_integerp (len, 1) && p1 && p2)
8770
    {
8771
      const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8772
      if (i > 0)
8773
        return integer_one_node;
8774
      else if (i < 0)
8775
        return integer_minus_one_node;
8776
      else
8777
        return integer_zero_node;
8778
    }
8779
 
8780
  /* If the second arg is "", and the length is greater than zero,
8781
     return *(const unsigned char*)arg1.  */
8782
  if (p2 && *p2 == '\0'
8783
      && TREE_CODE (len) == INTEGER_CST
8784
      && tree_int_cst_sgn (len) == 1)
8785
    {
8786
      tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8787
      tree cst_uchar_ptr_node
8788
        = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8789
 
8790
      return fold_convert_loc (loc, integer_type_node,
8791
                               build1 (INDIRECT_REF, cst_uchar_node,
8792
                                       fold_convert_loc (loc,
8793
                                                         cst_uchar_ptr_node,
8794
                                                         arg1)));
8795
    }
8796
 
8797
  /* If the first arg is "", and the length is greater than zero,
8798
     return -*(const unsigned char*)arg2.  */
8799
  if (p1 && *p1 == '\0'
8800
      && TREE_CODE (len) == INTEGER_CST
8801
      && tree_int_cst_sgn (len) == 1)
8802
    {
8803
      tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8804
      tree cst_uchar_ptr_node
8805
        = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8806
 
8807
      tree temp = fold_convert_loc (loc, integer_type_node,
8808
                                    build1 (INDIRECT_REF, cst_uchar_node,
8809
                                            fold_convert_loc (loc,
8810
                                                              cst_uchar_ptr_node,
8811
                                                              arg2)));
8812
      return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8813
    }
8814
 
8815
  /* If len parameter is one, return an expression corresponding to
8816
     (*(const unsigned char*)arg1 - (const unsigned char*)arg2).  */
8817
  if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8818
    {
8819
      tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8820
      tree cst_uchar_ptr_node
8821
        = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8822
 
8823
      tree ind1 = fold_convert_loc (loc, integer_type_node,
8824
                                    build1 (INDIRECT_REF, cst_uchar_node,
8825
                                            fold_convert_loc (loc,
8826
                                                              cst_uchar_ptr_node,
8827
                                                              arg1)));
8828
      tree ind2 = fold_convert_loc (loc, integer_type_node,
8829
                                    build1 (INDIRECT_REF, cst_uchar_node,
8830
                                            fold_convert_loc (loc,
8831
                                                              cst_uchar_ptr_node,
8832
                                                              arg2)));
8833
      return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8834
    }
8835
 
8836
  return NULL_TREE;
8837
}
8838
 
8839
/* Fold function call to builtin signbit, signbitf or signbitl with argument
8840
   ARG.  Return NULL_TREE if no simplification can be made.  */
8841
 
8842
static tree
8843
fold_builtin_signbit (location_t loc, tree arg, tree type)
8844
{
8845
  tree temp;
8846
 
8847
  if (!validate_arg (arg, REAL_TYPE))
8848
    return NULL_TREE;
8849
 
8850
  /* If ARG is a compile-time constant, determine the result.  */
8851
  if (TREE_CODE (arg) == REAL_CST
8852
      && !TREE_OVERFLOW (arg))
8853
    {
8854
      REAL_VALUE_TYPE c;
8855
 
8856
      c = TREE_REAL_CST (arg);
8857
      temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8858
      return fold_convert_loc (loc, type, temp);
8859
    }
8860
 
8861
  /* If ARG is non-negative, the result is always zero.  */
8862
  if (tree_expr_nonnegative_p (arg))
8863
    return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8864
 
8865
  /* If ARG's format doesn't have signed zeros, return "arg < 0.0".  */
8866
  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8867
    return fold_build2_loc (loc, LT_EXPR, type, arg,
8868
                        build_real (TREE_TYPE (arg), dconst0));
8869
 
8870
  return NULL_TREE;
8871
}
8872
 
8873
/* Fold function call to builtin copysign, copysignf or copysignl with
8874
   arguments ARG1 and ARG2.  Return NULL_TREE if no simplification can
8875
   be made.  */
8876
 
8877
static tree
8878
fold_builtin_copysign (location_t loc, tree fndecl,
8879
                       tree arg1, tree arg2, tree type)
8880
{
8881
  tree tem;
8882
 
8883
  if (!validate_arg (arg1, REAL_TYPE)
8884
      || !validate_arg (arg2, REAL_TYPE))
8885
    return NULL_TREE;
8886
 
8887
  /* copysign(X,X) is X.  */
8888
  if (operand_equal_p (arg1, arg2, 0))
8889
    return fold_convert_loc (loc, type, arg1);
8890
 
8891
  /* If ARG1 and ARG2 are compile-time constants, determine the result.  */
8892
  if (TREE_CODE (arg1) == REAL_CST
8893
      && TREE_CODE (arg2) == REAL_CST
8894
      && !TREE_OVERFLOW (arg1)
8895
      && !TREE_OVERFLOW (arg2))
8896
    {
8897
      REAL_VALUE_TYPE c1, c2;
8898
 
8899
      c1 = TREE_REAL_CST (arg1);
8900
      c2 = TREE_REAL_CST (arg2);
8901
      /* c1.sign := c2.sign.  */
8902
      real_copysign (&c1, &c2);
8903
      return build_real (type, c1);
8904
    }
8905
 
8906
  /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8907
     Remember to evaluate Y for side-effects.  */
8908
  if (tree_expr_nonnegative_p (arg2))
8909
    return omit_one_operand_loc (loc, type,
8910
                             fold_build1_loc (loc, ABS_EXPR, type, arg1),
8911
                             arg2);
8912
 
8913
  /* Strip sign changing operations for the first argument.  */
8914
  tem = fold_strip_sign_ops (arg1);
8915
  if (tem)
8916
    return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8917
 
8918
  return NULL_TREE;
8919
}
8920
 
8921
/* Fold a call to builtin isascii with argument ARG.  */
8922
 
8923
static tree
8924
fold_builtin_isascii (location_t loc, tree arg)
8925
{
8926
  if (!validate_arg (arg, INTEGER_TYPE))
8927
    return NULL_TREE;
8928
  else
8929
    {
8930
      /* Transform isascii(c) -> ((c & ~0x7f) == 0).  */
8931
      arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8932
                         build_int_cst (NULL_TREE,
8933
                                        ~ (unsigned HOST_WIDE_INT) 0x7f));
8934
      return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8935
                          arg, integer_zero_node);
8936
    }
8937
}
8938
 
8939
/* Fold a call to builtin toascii with argument ARG.  */
8940
 
8941
static tree
8942
fold_builtin_toascii (location_t loc, tree arg)
8943
{
8944
  if (!validate_arg (arg, INTEGER_TYPE))
8945
    return NULL_TREE;
8946
 
8947
  /* Transform toascii(c) -> (c & 0x7f).  */
8948
  return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8949
                      build_int_cst (NULL_TREE, 0x7f));
8950
}
8951
 
8952
/* Fold a call to builtin isdigit with argument ARG.  */
8953
 
8954
static tree
8955
fold_builtin_isdigit (location_t loc, tree arg)
8956
{
8957
  if (!validate_arg (arg, INTEGER_TYPE))
8958
    return NULL_TREE;
8959
  else
8960
    {
8961
      /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9.  */
8962
      /* According to the C standard, isdigit is unaffected by locale.
8963
         However, it definitely is affected by the target character set.  */
8964
      unsigned HOST_WIDE_INT target_digit0
8965
        = lang_hooks.to_target_charset ('0');
8966
 
8967
      if (target_digit0 == 0)
8968
        return NULL_TREE;
8969
 
8970
      arg = fold_convert_loc (loc, unsigned_type_node, arg);
8971
      arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8972
                         build_int_cst (unsigned_type_node, target_digit0));
8973
      return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8974
                          build_int_cst (unsigned_type_node, 9));
8975
    }
8976
}
8977
 
8978
/* Fold a call to fabs, fabsf or fabsl with argument ARG.  */
8979
 
8980
static tree
8981
fold_builtin_fabs (location_t loc, tree arg, tree type)
8982
{
8983
  if (!validate_arg (arg, REAL_TYPE))
8984
    return NULL_TREE;
8985
 
8986
  arg = fold_convert_loc (loc, type, arg);
8987
  if (TREE_CODE (arg) == REAL_CST)
8988
    return fold_abs_const (arg, type);
8989
  return fold_build1_loc (loc, ABS_EXPR, type, arg);
8990
}
8991
 
8992
/* Fold a call to abs, labs, llabs or imaxabs with argument ARG.  */
8993
 
8994
static tree
8995
fold_builtin_abs (location_t loc, tree arg, tree type)
8996
{
8997
  if (!validate_arg (arg, INTEGER_TYPE))
8998
    return NULL_TREE;
8999
 
9000
  arg = fold_convert_loc (loc, type, arg);
9001
  if (TREE_CODE (arg) == INTEGER_CST)
9002
    return fold_abs_const (arg, type);
9003
  return fold_build1_loc (loc, ABS_EXPR, type, arg);
9004
}
9005
 
9006
/* Fold a call to builtin fmin or fmax.  */
9007
 
9008
static tree
9009
fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9010
                        tree type, bool max)
9011
{
9012
  if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9013
    {
9014
      /* Calculate the result when the argument is a constant.  */
9015
      tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9016
 
9017
      if (res)
9018
        return res;
9019
 
9020
      /* If either argument is NaN, return the other one.  Avoid the
9021
         transformation if we get (and honor) a signalling NaN.  Using
9022
         omit_one_operand() ensures we create a non-lvalue.  */
9023
      if (TREE_CODE (arg0) == REAL_CST
9024
          && real_isnan (&TREE_REAL_CST (arg0))
9025
          && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9026
              || ! TREE_REAL_CST (arg0).signalling))
9027
        return omit_one_operand_loc (loc, type, arg1, arg0);
9028
      if (TREE_CODE (arg1) == REAL_CST
9029
          && real_isnan (&TREE_REAL_CST (arg1))
9030
          && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9031
              || ! TREE_REAL_CST (arg1).signalling))
9032
        return omit_one_operand_loc (loc, type, arg0, arg1);
9033
 
9034
      /* Transform fmin/fmax(x,x) -> x.  */
9035
      if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9036
        return omit_one_operand_loc (loc, type, arg0, arg1);
9037
 
9038
      /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR.  C99 requires these
9039
         functions to return the numeric arg if the other one is NaN.
9040
         These tree codes don't honor that, so only transform if
9041
         -ffinite-math-only is set.  C99 doesn't require -0.0 to be
9042
         handled, so we don't have to worry about it either.  */
9043
      if (flag_finite_math_only)
9044
        return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9045
                            fold_convert_loc (loc, type, arg0),
9046
                            fold_convert_loc (loc, type, arg1));
9047
    }
9048
  return NULL_TREE;
9049
}
9050
 
9051
/* Fold a call to builtin carg(a+bi) -> atan2(b,a).  */
9052
 
9053
static tree
9054
fold_builtin_carg (location_t loc, tree arg, tree type)
9055
{
9056
  if (validate_arg (arg, COMPLEX_TYPE)
9057
      && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9058
    {
9059
      tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9060
 
9061
      if (atan2_fn)
9062
        {
9063
          tree new_arg = builtin_save_expr (arg);
9064
          tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9065
          tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9066
          return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9067
        }
9068
    }
9069
 
9070
  return NULL_TREE;
9071
}
9072
 
9073
/* Fold a call to builtin logb/ilogb.  */
9074
 
9075
static tree
9076
fold_builtin_logb (location_t loc, tree arg, tree rettype)
9077
{
9078
  if (! validate_arg (arg, REAL_TYPE))
9079
    return NULL_TREE;
9080
 
9081
  STRIP_NOPS (arg);
9082
 
9083
  if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9084
    {
9085
      const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9086
 
9087
      switch (value->cl)
9088
      {
9089
      case rvc_nan:
9090
      case rvc_inf:
9091
        /* If arg is Inf or NaN and we're logb, return it.  */
9092
        if (TREE_CODE (rettype) == REAL_TYPE)
9093
          return fold_convert_loc (loc, rettype, arg);
9094
        /* Fall through... */
9095
      case rvc_zero:
9096
        /* Zero may set errno and/or raise an exception for logb, also
9097
           for ilogb we don't know FP_ILOGB0.  */
9098
        return NULL_TREE;
9099
      case rvc_normal:
9100
        /* For normal numbers, proceed iff radix == 2.  In GCC,
9101
           normalized significands are in the range [0.5, 1.0).  We
9102
           want the exponent as if they were [1.0, 2.0) so get the
9103
           exponent and subtract 1.  */
9104
        if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9105
          return fold_convert_loc (loc, rettype,
9106
                                   build_int_cst (NULL_TREE,
9107
                                                  REAL_EXP (value)-1));
9108
        break;
9109
      }
9110
    }
9111
 
9112
  return NULL_TREE;
9113
}
9114
 
9115
/* Fold a call to builtin significand, if radix == 2.  */
9116
 
9117
static tree
9118
fold_builtin_significand (location_t loc, tree arg, tree rettype)
9119
{
9120
  if (! validate_arg (arg, REAL_TYPE))
9121
    return NULL_TREE;
9122
 
9123
  STRIP_NOPS (arg);
9124
 
9125
  if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9126
    {
9127
      const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9128
 
9129
      switch (value->cl)
9130
      {
9131
      case rvc_zero:
9132
      case rvc_nan:
9133
      case rvc_inf:
9134
        /* If arg is +-0, +-Inf or +-NaN, then return it.  */
9135
        return fold_convert_loc (loc, rettype, arg);
9136
      case rvc_normal:
9137
        /* For normal numbers, proceed iff radix == 2.  */
9138
        if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9139
          {
9140
            REAL_VALUE_TYPE result = *value;
9141
            /* In GCC, normalized significands are in the range [0.5,
9142
               1.0).  We want them to be [1.0, 2.0) so set the
9143
               exponent to 1.  */
9144
            SET_REAL_EXP (&result, 1);
9145
            return build_real (rettype, result);
9146
          }
9147
        break;
9148
      }
9149
    }
9150
 
9151
  return NULL_TREE;
9152
}
9153
 
9154
/* Fold a call to builtin frexp, we can assume the base is 2.  */
9155
 
9156
static tree
9157
fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9158
{
9159
  if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9160
    return NULL_TREE;
9161
 
9162
  STRIP_NOPS (arg0);
9163
 
9164
  if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9165
    return NULL_TREE;
9166
 
9167
  arg1 = build_fold_indirect_ref_loc (loc, arg1);
9168
 
9169
  /* Proceed if a valid pointer type was passed in.  */
9170
  if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9171
    {
9172
      const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9173
      tree frac, exp;
9174
 
9175
      switch (value->cl)
9176
      {
9177
      case rvc_zero:
9178
        /* For +-0, return (*exp = 0, +-0).  */
9179
        exp = integer_zero_node;
9180
        frac = arg0;
9181
        break;
9182
      case rvc_nan:
9183
      case rvc_inf:
9184
        /* For +-NaN or +-Inf, *exp is unspecified, return arg0.  */
9185
        return omit_one_operand_loc (loc, rettype, arg0, arg1);
9186
      case rvc_normal:
9187
        {
9188
          /* Since the frexp function always expects base 2, and in
9189
             GCC normalized significands are already in the range
9190
             [0.5, 1.0), we have exactly what frexp wants.  */
9191
          REAL_VALUE_TYPE frac_rvt = *value;
9192
          SET_REAL_EXP (&frac_rvt, 0);
9193
          frac = build_real (rettype, frac_rvt);
9194
          exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9195
        }
9196
        break;
9197
      default:
9198
        gcc_unreachable ();
9199
      }
9200
 
9201
      /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9202
      arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9203
      TREE_SIDE_EFFECTS (arg1) = 1;
9204
      return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9205
    }
9206
 
9207
  return NULL_TREE;
9208
}
9209
 
9210
/* Fold a call to builtin ldexp or scalbn/scalbln.  If LDEXP is true
9211
   then we can assume the base is two.  If it's false, then we have to
9212
   check the mode of the TYPE parameter in certain cases.  */
9213
 
9214
static tree
9215
fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9216
                            tree type, bool ldexp)
9217
{
9218
  if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9219
    {
9220
      STRIP_NOPS (arg0);
9221
      STRIP_NOPS (arg1);
9222
 
9223
      /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0.  */
9224
      if (real_zerop (arg0) || integer_zerop (arg1)
9225
          || (TREE_CODE (arg0) == REAL_CST
9226
              && !real_isfinite (&TREE_REAL_CST (arg0))))
9227
        return omit_one_operand_loc (loc, type, arg0, arg1);
9228
 
9229
      /* If both arguments are constant, then try to evaluate it.  */
9230
      if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9231
          && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9232
          && host_integerp (arg1, 0))
9233
        {
9234
          /* Bound the maximum adjustment to twice the range of the
9235
             mode's valid exponents.  Use abs to ensure the range is
9236
             positive as a sanity check.  */
9237
          const long max_exp_adj = 2 *
9238
            labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9239
                 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9240
 
9241
          /* Get the user-requested adjustment.  */
9242
          const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9243
 
9244
          /* The requested adjustment must be inside this range.  This
9245
             is a preliminary cap to avoid things like overflow, we
9246
             may still fail to compute the result for other reasons.  */
9247
          if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9248
            {
9249
              REAL_VALUE_TYPE initial_result;
9250
 
9251
              real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9252
 
9253
              /* Ensure we didn't overflow.  */
9254
              if (! real_isinf (&initial_result))
9255
                {
9256
                  const REAL_VALUE_TYPE trunc_result
9257
                    = real_value_truncate (TYPE_MODE (type), initial_result);
9258
 
9259
                  /* Only proceed if the target mode can hold the
9260
                     resulting value.  */
9261
                  if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9262
                    return build_real (type, trunc_result);
9263
                }
9264
            }
9265
        }
9266
    }
9267
 
9268
  return NULL_TREE;
9269
}
9270
 
9271
/* Fold a call to builtin modf.  */
9272
 
9273
static tree
9274
fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9275
{
9276
  if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9277
    return NULL_TREE;
9278
 
9279
  STRIP_NOPS (arg0);
9280
 
9281
  if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9282
    return NULL_TREE;
9283
 
9284
  arg1 = build_fold_indirect_ref_loc (loc, arg1);
9285
 
9286
  /* Proceed if a valid pointer type was passed in.  */
9287
  if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9288
    {
9289
      const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9290
      REAL_VALUE_TYPE trunc, frac;
9291
 
9292
      switch (value->cl)
9293
      {
9294
      case rvc_nan:
9295
      case rvc_zero:
9296
        /* For +-NaN or +-0, return (*arg1 = arg0, arg0).  */
9297
        trunc = frac = *value;
9298
        break;
9299
      case rvc_inf:
9300
        /* For +-Inf, return (*arg1 = arg0, +-0).  */
9301
        frac = dconst0;
9302
        frac.sign = value->sign;
9303
        trunc = *value;
9304
        break;
9305
      case rvc_normal:
9306
        /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)).  */
9307
        real_trunc (&trunc, VOIDmode, value);
9308
        real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9309
        /* If the original number was negative and already
9310
           integral, then the fractional part is -0.0.  */
9311
        if (value->sign && frac.cl == rvc_zero)
9312
          frac.sign = value->sign;
9313
        break;
9314
      }
9315
 
9316
      /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9317
      arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9318
                          build_real (rettype, trunc));
9319
      TREE_SIDE_EFFECTS (arg1) = 1;
9320
      return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9321
                          build_real (rettype, frac));
9322
    }
9323
 
9324
  return NULL_TREE;
9325
}
9326
 
9327
/* Given a location LOC, an interclass builtin function decl FNDECL
9328
   and its single argument ARG, return an folded expression computing
9329
   the same, or NULL_TREE if we either couldn't or didn't want to fold
9330
   (the latter happen if there's an RTL instruction available).  */
9331
 
9332
static tree
9333
fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9334
{
9335
  enum machine_mode mode;
9336
 
9337
  if (!validate_arg (arg, REAL_TYPE))
9338
    return NULL_TREE;
9339
 
9340
  if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9341
    return NULL_TREE;
9342
 
9343
  mode = TYPE_MODE (TREE_TYPE (arg));
9344
 
9345
  /* If there is no optab, try generic code.  */
9346
  switch (DECL_FUNCTION_CODE (fndecl))
9347
    {
9348
      tree result;
9349
 
9350
    CASE_FLT_FN (BUILT_IN_ISINF):
9351
      {
9352
        /* isinf(x) -> isgreater(fabs(x),DBL_MAX).  */
9353
        tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9354
        tree const type = TREE_TYPE (arg);
9355
        REAL_VALUE_TYPE r;
9356
        char buf[128];
9357
 
9358
        get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9359
        real_from_string (&r, buf);
9360
        result = build_call_expr (isgr_fn, 2,
9361
                                  fold_build1_loc (loc, ABS_EXPR, type, arg),
9362
                                  build_real (type, r));
9363
        return result;
9364
      }
9365
    CASE_FLT_FN (BUILT_IN_FINITE):
9366
    case BUILT_IN_ISFINITE:
9367
      {
9368
        /* isfinite(x) -> islessequal(fabs(x),DBL_MAX).  */
9369
        tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9370
        tree const type = TREE_TYPE (arg);
9371
        REAL_VALUE_TYPE r;
9372
        char buf[128];
9373
 
9374
        get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9375
        real_from_string (&r, buf);
9376
        result = build_call_expr (isle_fn, 2,
9377
                                  fold_build1_loc (loc, ABS_EXPR, type, arg),
9378
                                  build_real (type, r));
9379
        /*result = fold_build2_loc (loc, UNGT_EXPR,
9380
                                  TREE_TYPE (TREE_TYPE (fndecl)),
9381
                                  fold_build1_loc (loc, ABS_EXPR, type, arg),
9382
                                  build_real (type, r));
9383
        result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9384
                                  TREE_TYPE (TREE_TYPE (fndecl)),
9385
                                  result);*/
9386
        return result;
9387
      }
9388
    case BUILT_IN_ISNORMAL:
9389
      {
9390
        /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9391
           islessequal(fabs(x),DBL_MAX).  */
9392
        tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9393
        tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9394
        tree const type = TREE_TYPE (arg);
9395
        REAL_VALUE_TYPE rmax, rmin;
9396
        char buf[128];
9397
 
9398
        get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9399
        real_from_string (&rmax, buf);
9400
        sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9401
        real_from_string (&rmin, buf);
9402
        arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9403
        result = build_call_expr (isle_fn, 2, arg,
9404
                                  build_real (type, rmax));
9405
        result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9406
                              build_call_expr (isge_fn, 2, arg,
9407
                                               build_real (type, rmin)));
9408
        return result;
9409
      }
9410
    default:
9411
      break;
9412
    }
9413
 
9414
  return NULL_TREE;
9415
}
9416
 
9417
/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9418
   ARG is the argument for the call.  */
9419
 
9420
static tree
9421
fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9422
{
9423
  tree type = TREE_TYPE (TREE_TYPE (fndecl));
9424
  REAL_VALUE_TYPE r;
9425
 
9426
  if (!validate_arg (arg, REAL_TYPE))
9427
    return NULL_TREE;
9428
 
9429
  switch (builtin_index)
9430
    {
9431
    case BUILT_IN_ISINF:
9432
      if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9433
        return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9434
 
9435
      if (TREE_CODE (arg) == REAL_CST)
9436
        {
9437
          r = TREE_REAL_CST (arg);
9438
          if (real_isinf (&r))
9439
            return real_compare (GT_EXPR, &r, &dconst0)
9440
                   ? integer_one_node : integer_minus_one_node;
9441
          else
9442
            return integer_zero_node;
9443
        }
9444
 
9445
      return NULL_TREE;
9446
 
9447
    case BUILT_IN_ISINF_SIGN:
9448
      {
9449
        /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9450
        /* In a boolean context, GCC will fold the inner COND_EXPR to
9451
           1.  So e.g. "if (isinf_sign(x))" would be folded to just
9452
           "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9453
        tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9454
        tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9455
        tree tmp = NULL_TREE;
9456
 
9457
        arg = builtin_save_expr (arg);
9458
 
9459
        if (signbit_fn && isinf_fn)
9460
          {
9461
            tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9462
            tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9463
 
9464
            signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9465
                                        signbit_call, integer_zero_node);
9466
            isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9467
                                      isinf_call, integer_zero_node);
9468
 
9469
            tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9470
                               integer_minus_one_node, integer_one_node);
9471
            tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9472
                               isinf_call, tmp,
9473
                               integer_zero_node);
9474
          }
9475
 
9476
        return tmp;
9477
      }
9478
 
9479
    case BUILT_IN_ISFINITE:
9480
      if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9481
          && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9482
        return omit_one_operand_loc (loc, type, integer_one_node, arg);
9483
 
9484
      if (TREE_CODE (arg) == REAL_CST)
9485
        {
9486
          r = TREE_REAL_CST (arg);
9487
          return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9488
        }
9489
 
9490
      return NULL_TREE;
9491
 
9492
    case BUILT_IN_ISNAN:
9493
      if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9494
        return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9495
 
9496
      if (TREE_CODE (arg) == REAL_CST)
9497
        {
9498
          r = TREE_REAL_CST (arg);
9499
          return real_isnan (&r) ? integer_one_node : integer_zero_node;
9500
        }
9501
 
9502
      arg = builtin_save_expr (arg);
9503
      return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9504
 
9505
    default:
9506
      gcc_unreachable ();
9507
    }
9508
}
9509
 
9510
/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9511
   This builtin will generate code to return the appropriate floating
9512
   point classification depending on the value of the floating point
9513
   number passed in.  The possible return values must be supplied as
9514
   int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9515
   FP_NORMAL, FP_SUBNORMAL and FP_ZERO.  The ellipses is for exactly
9516
   one floating point argument which is "type generic".  */
9517
 
9518
static tree
9519
fold_builtin_fpclassify (location_t loc, tree exp)
9520
{
9521
  tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9522
    arg, type, res, tmp;
9523
  enum machine_mode mode;
9524
  REAL_VALUE_TYPE r;
9525
  char buf[128];
9526
 
9527
  /* Verify the required arguments in the original call.  */
9528
  if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9529
                         INTEGER_TYPE, INTEGER_TYPE,
9530
                         INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9531
    return NULL_TREE;
9532
 
9533
  fp_nan = CALL_EXPR_ARG (exp, 0);
9534
  fp_infinite = CALL_EXPR_ARG (exp, 1);
9535
  fp_normal = CALL_EXPR_ARG (exp, 2);
9536
  fp_subnormal = CALL_EXPR_ARG (exp, 3);
9537
  fp_zero = CALL_EXPR_ARG (exp, 4);
9538
  arg = CALL_EXPR_ARG (exp, 5);
9539
  type = TREE_TYPE (arg);
9540
  mode = TYPE_MODE (type);
9541
  arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9542
 
9543
  /* fpclassify(x) ->
9544
       isnan(x) ? FP_NAN :
9545
         (fabs(x) == Inf ? FP_INFINITE :
9546
           (fabs(x) >= DBL_MIN ? FP_NORMAL :
9547
             (x == 0 ? FP_ZERO : FP_SUBNORMAL))).  */
9548
 
9549
  tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9550
                     build_real (type, dconst0));
9551
  res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9552
                     tmp, fp_zero, fp_subnormal);
9553
 
9554
  sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9555
  real_from_string (&r, buf);
9556
  tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9557
                     arg, build_real (type, r));
9558
  res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9559
 
9560
  if (HONOR_INFINITIES (mode))
9561
    {
9562
      real_inf (&r);
9563
      tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9564
                         build_real (type, r));
9565
      res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9566
                         fp_infinite, res);
9567
    }
9568
 
9569
  if (HONOR_NANS (mode))
9570
    {
9571
      tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9572
      res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9573
    }
9574
 
9575
  return res;
9576
}
9577
 
9578
/* Fold a call to an unordered comparison function such as
9579
   __builtin_isgreater().  FNDECL is the FUNCTION_DECL for the function
9580
   being called and ARG0 and ARG1 are the arguments for the call.
9581
   UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9582
   the opposite of the desired result.  UNORDERED_CODE is used
9583
   for modes that can hold NaNs and ORDERED_CODE is used for
9584
   the rest.  */
9585
 
9586
static tree
9587
fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9588
                            enum tree_code unordered_code,
9589
                            enum tree_code ordered_code)
9590
{
9591
  tree type = TREE_TYPE (TREE_TYPE (fndecl));
9592
  enum tree_code code;
9593
  tree type0, type1;
9594
  enum tree_code code0, code1;
9595
  tree cmp_type = NULL_TREE;
9596
 
9597
  type0 = TREE_TYPE (arg0);
9598
  type1 = TREE_TYPE (arg1);
9599
 
9600
  code0 = TREE_CODE (type0);
9601
  code1 = TREE_CODE (type1);
9602
 
9603
  if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9604
    /* Choose the wider of two real types.  */
9605
    cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9606
      ? type0 : type1;
9607
  else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9608
    cmp_type = type0;
9609
  else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9610
    cmp_type = type1;
9611
 
9612
  arg0 = fold_convert_loc (loc, cmp_type, arg0);
9613
  arg1 = fold_convert_loc (loc, cmp_type, arg1);
9614
 
9615
  if (unordered_code == UNORDERED_EXPR)
9616
    {
9617
      if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9618
        return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9619
      return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9620
    }
9621
 
9622
  code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9623
                                                   : ordered_code;
9624
  return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9625
                      fold_build2_loc (loc, code, type, arg0, arg1));
9626
}
9627
 
9628
/* Fold a call to built-in function FNDECL with 0 arguments.
9629
   IGNORE is true if the result of the function call is ignored.  This
9630
   function returns NULL_TREE if no simplification was possible.  */
9631
 
9632
static tree
9633
fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9634
{
9635
  tree type = TREE_TYPE (TREE_TYPE (fndecl));
9636
  enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9637
  switch (fcode)
9638
    {
9639
    CASE_FLT_FN (BUILT_IN_INF):
9640
    case BUILT_IN_INFD32:
9641
    case BUILT_IN_INFD64:
9642
    case BUILT_IN_INFD128:
9643
      return fold_builtin_inf (loc, type, true);
9644
 
9645
    CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9646
      return fold_builtin_inf (loc, type, false);
9647
 
9648
    case BUILT_IN_CLASSIFY_TYPE:
9649
      return fold_builtin_classify_type (NULL_TREE);
9650
 
9651
    default:
9652
      break;
9653
    }
9654
  return NULL_TREE;
9655
}
9656
 
9657
/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9658
   IGNORE is true if the result of the function call is ignored.  This
9659
   function returns NULL_TREE if no simplification was possible.  */
9660
 
9661
static tree
9662
fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9663
{
9664
  tree type = TREE_TYPE (TREE_TYPE (fndecl));
9665
  enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9666
  switch (fcode)
9667
    {
9668
 
9669
    case BUILT_IN_CONSTANT_P:
9670
      {
9671
        tree val = fold_builtin_constant_p (arg0);
9672
 
9673
        /* Gimplification will pull the CALL_EXPR for the builtin out of
9674
           an if condition.  When not optimizing, we'll not CSE it back.
9675
           To avoid link error types of regressions, return false now.  */
9676
        if (!val && !optimize)
9677
          val = integer_zero_node;
9678
 
9679
        return val;
9680
      }
9681
 
9682
    case BUILT_IN_CLASSIFY_TYPE:
9683
      return fold_builtin_classify_type (arg0);
9684
 
9685
    case BUILT_IN_STRLEN:
9686
      return fold_builtin_strlen (loc, type, arg0);
9687
 
9688
    CASE_FLT_FN (BUILT_IN_FABS):
9689
      return fold_builtin_fabs (loc, arg0, type);
9690
 
9691
    case BUILT_IN_ABS:
9692
    case BUILT_IN_LABS:
9693
    case BUILT_IN_LLABS:
9694
    case BUILT_IN_IMAXABS:
9695
      return fold_builtin_abs (loc, arg0, type);
9696
 
9697
    CASE_FLT_FN (BUILT_IN_CONJ):
9698
      if (validate_arg (arg0, COMPLEX_TYPE)
9699
        && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9700
        return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9701
    break;
9702
 
9703
    CASE_FLT_FN (BUILT_IN_CREAL):
9704
      if (validate_arg (arg0, COMPLEX_TYPE)
9705
        && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9706
        return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9707
    break;
9708
 
9709
    CASE_FLT_FN (BUILT_IN_CIMAG):
9710
      if (validate_arg (arg0, COMPLEX_TYPE)
9711
          && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9712
        return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9713
    break;
9714
 
9715
    CASE_FLT_FN (BUILT_IN_CCOS):
9716
      return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9717
 
9718
    CASE_FLT_FN (BUILT_IN_CCOSH):
9719
      return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9720
 
9721
    CASE_FLT_FN (BUILT_IN_CSIN):
9722
      if (validate_arg (arg0, COMPLEX_TYPE)
9723
          && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9724
        return do_mpc_arg1 (arg0, type, mpc_sin);
9725
    break;
9726
 
9727
    CASE_FLT_FN (BUILT_IN_CSINH):
9728
      if (validate_arg (arg0, COMPLEX_TYPE)
9729
          && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9730
        return do_mpc_arg1 (arg0, type, mpc_sinh);
9731
    break;
9732
 
9733
    CASE_FLT_FN (BUILT_IN_CTAN):
9734
      if (validate_arg (arg0, COMPLEX_TYPE)
9735
          && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9736
        return do_mpc_arg1 (arg0, type, mpc_tan);
9737
    break;
9738
 
9739
    CASE_FLT_FN (BUILT_IN_CTANH):
9740
      if (validate_arg (arg0, COMPLEX_TYPE)
9741
          && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9742
        return do_mpc_arg1 (arg0, type, mpc_tanh);
9743
    break;
9744
 
9745
    CASE_FLT_FN (BUILT_IN_CLOG):
9746
      if (validate_arg (arg0, COMPLEX_TYPE)
9747
          && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9748
        return do_mpc_arg1 (arg0, type, mpc_log);
9749
    break;
9750
 
9751
    CASE_FLT_FN (BUILT_IN_CSQRT):
9752
      if (validate_arg (arg0, COMPLEX_TYPE)
9753
          && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9754
        return do_mpc_arg1 (arg0, type, mpc_sqrt);
9755
    break;
9756
 
9757
    CASE_FLT_FN (BUILT_IN_CASIN):
9758
      if (validate_arg (arg0, COMPLEX_TYPE)
9759
          && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9760
        return do_mpc_arg1 (arg0, type, mpc_asin);
9761
    break;
9762
 
9763
    CASE_FLT_FN (BUILT_IN_CACOS):
9764
      if (validate_arg (arg0, COMPLEX_TYPE)
9765
          && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9766
        return do_mpc_arg1 (arg0, type, mpc_acos);
9767
    break;
9768
 
9769
    CASE_FLT_FN (BUILT_IN_CATAN):
9770
      if (validate_arg (arg0, COMPLEX_TYPE)
9771
          && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9772
        return do_mpc_arg1 (arg0, type, mpc_atan);
9773
    break;
9774
 
9775
    CASE_FLT_FN (BUILT_IN_CASINH):
9776
      if (validate_arg (arg0, COMPLEX_TYPE)
9777
          && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9778
        return do_mpc_arg1 (arg0, type, mpc_asinh);
9779
    break;
9780
 
9781
    CASE_FLT_FN (BUILT_IN_CACOSH):
9782
      if (validate_arg (arg0, COMPLEX_TYPE)
9783
          && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9784
        return do_mpc_arg1 (arg0, type, mpc_acosh);
9785
    break;
9786
 
9787
    CASE_FLT_FN (BUILT_IN_CATANH):
9788
      if (validate_arg (arg0, COMPLEX_TYPE)
9789
          && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9790
        return do_mpc_arg1 (arg0, type, mpc_atanh);
9791
    break;
9792
 
9793
    CASE_FLT_FN (BUILT_IN_CABS):
9794
      return fold_builtin_cabs (loc, arg0, type, fndecl);
9795
 
9796
    CASE_FLT_FN (BUILT_IN_CARG):
9797
      return fold_builtin_carg (loc, arg0, type);
9798
 
9799
    CASE_FLT_FN (BUILT_IN_SQRT):
9800
      return fold_builtin_sqrt (loc, arg0, type);
9801
 
9802
    CASE_FLT_FN (BUILT_IN_CBRT):
9803
      return fold_builtin_cbrt (loc, arg0, type);
9804
 
9805
    CASE_FLT_FN (BUILT_IN_ASIN):
9806
      if (validate_arg (arg0, REAL_TYPE))
9807
        return do_mpfr_arg1 (arg0, type, mpfr_asin,
9808
                             &dconstm1, &dconst1, true);
9809
    break;
9810
 
9811
    CASE_FLT_FN (BUILT_IN_ACOS):
9812
      if (validate_arg (arg0, REAL_TYPE))
9813
        return do_mpfr_arg1 (arg0, type, mpfr_acos,
9814
                             &dconstm1, &dconst1, true);
9815
    break;
9816
 
9817
    CASE_FLT_FN (BUILT_IN_ATAN):
9818
      if (validate_arg (arg0, REAL_TYPE))
9819
        return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9820
    break;
9821
 
9822
    CASE_FLT_FN (BUILT_IN_ASINH):
9823
      if (validate_arg (arg0, REAL_TYPE))
9824
        return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9825
    break;
9826
 
9827
    CASE_FLT_FN (BUILT_IN_ACOSH):
9828
      if (validate_arg (arg0, REAL_TYPE))
9829
        return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9830
                             &dconst1, NULL, true);
9831
    break;
9832
 
9833
    CASE_FLT_FN (BUILT_IN_ATANH):
9834
      if (validate_arg (arg0, REAL_TYPE))
9835
        return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9836
                             &dconstm1, &dconst1, false);
9837
    break;
9838
 
9839
    CASE_FLT_FN (BUILT_IN_SIN):
9840
      if (validate_arg (arg0, REAL_TYPE))
9841
        return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9842
    break;
9843
 
9844
    CASE_FLT_FN (BUILT_IN_COS):
9845
      return fold_builtin_cos (loc, arg0, type, fndecl);
9846
 
9847
    CASE_FLT_FN (BUILT_IN_TAN):
9848
      return fold_builtin_tan (arg0, type);
9849
 
9850
    CASE_FLT_FN (BUILT_IN_CEXP):
9851
      return fold_builtin_cexp (loc, arg0, type);
9852
 
9853
    CASE_FLT_FN (BUILT_IN_CEXPI):
9854
      if (validate_arg (arg0, REAL_TYPE))
9855
        return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9856
    break;
9857
 
9858
    CASE_FLT_FN (BUILT_IN_SINH):
9859
      if (validate_arg (arg0, REAL_TYPE))
9860
        return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9861
    break;
9862
 
9863
    CASE_FLT_FN (BUILT_IN_COSH):
9864
      return fold_builtin_cosh (loc, arg0, type, fndecl);
9865
 
9866
    CASE_FLT_FN (BUILT_IN_TANH):
9867
      if (validate_arg (arg0, REAL_TYPE))
9868
        return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9869
    break;
9870
 
9871
    CASE_FLT_FN (BUILT_IN_ERF):
9872
      if (validate_arg (arg0, REAL_TYPE))
9873
        return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9874
    break;
9875
 
9876
    CASE_FLT_FN (BUILT_IN_ERFC):
9877
      if (validate_arg (arg0, REAL_TYPE))
9878
        return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9879
    break;
9880
 
9881
    CASE_FLT_FN (BUILT_IN_TGAMMA):
9882
      if (validate_arg (arg0, REAL_TYPE))
9883
        return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9884
    break;
9885
 
9886
    CASE_FLT_FN (BUILT_IN_EXP):
9887
      return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9888
 
9889
    CASE_FLT_FN (BUILT_IN_EXP2):
9890
      return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9891
 
9892
    CASE_FLT_FN (BUILT_IN_EXP10):
9893
    CASE_FLT_FN (BUILT_IN_POW10):
9894
      return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9895
 
9896
    CASE_FLT_FN (BUILT_IN_EXPM1):
9897
      if (validate_arg (arg0, REAL_TYPE))
9898
        return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9899
    break;
9900
 
9901
    CASE_FLT_FN (BUILT_IN_LOG):
9902
    return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9903
 
9904
    CASE_FLT_FN (BUILT_IN_LOG2):
9905
      return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9906
 
9907
    CASE_FLT_FN (BUILT_IN_LOG10):
9908
      return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9909
 
9910
    CASE_FLT_FN (BUILT_IN_LOG1P):
9911
      if (validate_arg (arg0, REAL_TYPE))
9912
        return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9913
                             &dconstm1, NULL, false);
9914
    break;
9915
 
9916
    CASE_FLT_FN (BUILT_IN_J0):
9917
      if (validate_arg (arg0, REAL_TYPE))
9918
        return do_mpfr_arg1 (arg0, type, mpfr_j0,
9919
                             NULL, NULL, 0);
9920
    break;
9921
 
9922
    CASE_FLT_FN (BUILT_IN_J1):
9923
      if (validate_arg (arg0, REAL_TYPE))
9924
        return do_mpfr_arg1 (arg0, type, mpfr_j1,
9925
                             NULL, NULL, 0);
9926
    break;
9927
 
9928
    CASE_FLT_FN (BUILT_IN_Y0):
9929
      if (validate_arg (arg0, REAL_TYPE))
9930
        return do_mpfr_arg1 (arg0, type, mpfr_y0,
9931
                             &dconst0, NULL, false);
9932
    break;
9933
 
9934
    CASE_FLT_FN (BUILT_IN_Y1):
9935
      if (validate_arg (arg0, REAL_TYPE))
9936
        return do_mpfr_arg1 (arg0, type, mpfr_y1,
9937
                             &dconst0, NULL, false);
9938
    break;
9939
 
9940
    CASE_FLT_FN (BUILT_IN_NAN):
9941
    case BUILT_IN_NAND32:
9942
    case BUILT_IN_NAND64:
9943
    case BUILT_IN_NAND128:
9944
      return fold_builtin_nan (arg0, type, true);
9945
 
9946
    CASE_FLT_FN (BUILT_IN_NANS):
9947
      return fold_builtin_nan (arg0, type, false);
9948
 
9949
    CASE_FLT_FN (BUILT_IN_FLOOR):
9950
      return fold_builtin_floor (loc, fndecl, arg0);
9951
 
9952
    CASE_FLT_FN (BUILT_IN_CEIL):
9953
      return fold_builtin_ceil (loc, fndecl, arg0);
9954
 
9955
    CASE_FLT_FN (BUILT_IN_TRUNC):
9956
      return fold_builtin_trunc (loc, fndecl, arg0);
9957
 
9958
    CASE_FLT_FN (BUILT_IN_ROUND):
9959
      return fold_builtin_round (loc, fndecl, arg0);
9960
 
9961
    CASE_FLT_FN (BUILT_IN_NEARBYINT):
9962
    CASE_FLT_FN (BUILT_IN_RINT):
9963
      return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9964
 
9965
    CASE_FLT_FN (BUILT_IN_LCEIL):
9966
    CASE_FLT_FN (BUILT_IN_LLCEIL):
9967
    CASE_FLT_FN (BUILT_IN_LFLOOR):
9968
    CASE_FLT_FN (BUILT_IN_LLFLOOR):
9969
    CASE_FLT_FN (BUILT_IN_LROUND):
9970
    CASE_FLT_FN (BUILT_IN_LLROUND):
9971
      return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9972
 
9973
    CASE_FLT_FN (BUILT_IN_LRINT):
9974
    CASE_FLT_FN (BUILT_IN_LLRINT):
9975
      return fold_fixed_mathfn (loc, fndecl, arg0);
9976
 
9977
    case BUILT_IN_BSWAP32:
9978
    case BUILT_IN_BSWAP64:
9979
      return fold_builtin_bswap (fndecl, arg0);
9980
 
9981
    CASE_INT_FN (BUILT_IN_FFS):
9982
    CASE_INT_FN (BUILT_IN_CLZ):
9983
    CASE_INT_FN (BUILT_IN_CTZ):
9984
    CASE_INT_FN (BUILT_IN_POPCOUNT):
9985
    CASE_INT_FN (BUILT_IN_PARITY):
9986
      return fold_builtin_bitop (fndecl, arg0);
9987
 
9988
    CASE_FLT_FN (BUILT_IN_SIGNBIT):
9989
      return fold_builtin_signbit (loc, arg0, type);
9990
 
9991
    CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9992
      return fold_builtin_significand (loc, arg0, type);
9993
 
9994
    CASE_FLT_FN (BUILT_IN_ILOGB):
9995
    CASE_FLT_FN (BUILT_IN_LOGB):
9996
      return fold_builtin_logb (loc, arg0, type);
9997
 
9998
    case BUILT_IN_ISASCII:
9999
      return fold_builtin_isascii (loc, arg0);
10000
 
10001
    case BUILT_IN_TOASCII:
10002
      return fold_builtin_toascii (loc, arg0);
10003
 
10004
    case BUILT_IN_ISDIGIT:
10005
      return fold_builtin_isdigit (loc, arg0);
10006
 
10007
    CASE_FLT_FN (BUILT_IN_FINITE):
10008
    case BUILT_IN_FINITED32:
10009
    case BUILT_IN_FINITED64:
10010
    case BUILT_IN_FINITED128:
10011
    case BUILT_IN_ISFINITE:
10012
      {
10013
        tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10014
        if (ret)
10015
          return ret;
10016
        return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10017
      }
10018
 
10019
    CASE_FLT_FN (BUILT_IN_ISINF):
10020
    case BUILT_IN_ISINFD32:
10021
    case BUILT_IN_ISINFD64:
10022
    case BUILT_IN_ISINFD128:
10023
      {
10024
        tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10025
        if (ret)
10026
          return ret;
10027
        return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10028
      }
10029
 
10030
    case BUILT_IN_ISNORMAL:
10031
      return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10032
 
10033
    case BUILT_IN_ISINF_SIGN:
10034
      return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10035
 
10036
    CASE_FLT_FN (BUILT_IN_ISNAN):
10037
    case BUILT_IN_ISNAND32:
10038
    case BUILT_IN_ISNAND64:
10039
    case BUILT_IN_ISNAND128:
10040
      return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10041
 
10042
    case BUILT_IN_PRINTF:
10043
    case BUILT_IN_PRINTF_UNLOCKED:
10044
    case BUILT_IN_VPRINTF:
10045
      return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10046
 
10047
    default:
10048
      break;
10049
    }
10050
 
10051
  return NULL_TREE;
10052
 
10053
}
10054
 
10055
/* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10056
   IGNORE is true if the result of the function call is ignored.  This
10057
   function returns NULL_TREE if no simplification was possible.  */
10058
 
10059
static tree
10060
fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10061
{
10062
  tree type = TREE_TYPE (TREE_TYPE (fndecl));
10063
  enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10064
 
10065
  switch (fcode)
10066
    {
10067
    CASE_FLT_FN (BUILT_IN_JN):
10068
      if (validate_arg (arg0, INTEGER_TYPE)
10069
          && validate_arg (arg1, REAL_TYPE))
10070
        return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10071
    break;
10072
 
10073
    CASE_FLT_FN (BUILT_IN_YN):
10074
      if (validate_arg (arg0, INTEGER_TYPE)
10075
          && validate_arg (arg1, REAL_TYPE))
10076
        return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10077
                                 &dconst0, false);
10078
    break;
10079
 
10080
    CASE_FLT_FN (BUILT_IN_DREM):
10081
    CASE_FLT_FN (BUILT_IN_REMAINDER):
10082
      if (validate_arg (arg0, REAL_TYPE)
10083
          && validate_arg(arg1, REAL_TYPE))
10084
        return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10085
    break;
10086
 
10087
    CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10088
    CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10089
      if (validate_arg (arg0, REAL_TYPE)
10090
          && validate_arg(arg1, POINTER_TYPE))
10091
        return do_mpfr_lgamma_r (arg0, arg1, type);
10092
    break;
10093
 
10094
    CASE_FLT_FN (BUILT_IN_ATAN2):
10095
      if (validate_arg (arg0, REAL_TYPE)
10096
          && validate_arg(arg1, REAL_TYPE))
10097
        return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10098
    break;
10099
 
10100
    CASE_FLT_FN (BUILT_IN_FDIM):
10101
      if (validate_arg (arg0, REAL_TYPE)
10102
          && validate_arg(arg1, REAL_TYPE))
10103
        return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10104
    break;
10105
 
10106
    CASE_FLT_FN (BUILT_IN_HYPOT):
10107
      return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10108
 
10109
    CASE_FLT_FN (BUILT_IN_CPOW):
10110
      if (validate_arg (arg0, COMPLEX_TYPE)
10111
          && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10112
          && validate_arg (arg1, COMPLEX_TYPE)
10113
          && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10114
        return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10115
    break;
10116
 
10117
    CASE_FLT_FN (BUILT_IN_LDEXP):
10118
      return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10119
    CASE_FLT_FN (BUILT_IN_SCALBN):
10120
    CASE_FLT_FN (BUILT_IN_SCALBLN):
10121
      return fold_builtin_load_exponent (loc, arg0, arg1,
10122
                                         type, /*ldexp=*/false);
10123
 
10124
    CASE_FLT_FN (BUILT_IN_FREXP):
10125
      return fold_builtin_frexp (loc, arg0, arg1, type);
10126
 
10127
    CASE_FLT_FN (BUILT_IN_MODF):
10128
      return fold_builtin_modf (loc, arg0, arg1, type);
10129
 
10130
    case BUILT_IN_BZERO:
10131
      return fold_builtin_bzero (loc, arg0, arg1, ignore);
10132
 
10133
    case BUILT_IN_FPUTS:
10134
      return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10135
 
10136
    case BUILT_IN_FPUTS_UNLOCKED:
10137
      return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10138
 
10139
    case BUILT_IN_STRSTR:
10140
      return fold_builtin_strstr (loc, arg0, arg1, type);
10141
 
10142
    case BUILT_IN_STRCAT:
10143
      return fold_builtin_strcat (loc, arg0, arg1);
10144
 
10145
    case BUILT_IN_STRSPN:
10146
      return fold_builtin_strspn (loc, arg0, arg1);
10147
 
10148
    case BUILT_IN_STRCSPN:
10149
      return fold_builtin_strcspn (loc, arg0, arg1);
10150
 
10151
    case BUILT_IN_STRCHR:
10152
    case BUILT_IN_INDEX:
10153
      return fold_builtin_strchr (loc, arg0, arg1, type);
10154
 
10155
    case BUILT_IN_STRRCHR:
10156
    case BUILT_IN_RINDEX:
10157
      return fold_builtin_strrchr (loc, arg0, arg1, type);
10158
 
10159
    case BUILT_IN_STRCPY:
10160
      return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10161
 
10162
    case BUILT_IN_STPCPY:
10163
      if (ignore)
10164
        {
10165
          tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10166
          if (!fn)
10167
            break;
10168
 
10169
          return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10170
        }
10171
      else
10172
        return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10173
      break;
10174
 
10175
    case BUILT_IN_STRCMP:
10176
      return fold_builtin_strcmp (loc, arg0, arg1);
10177
 
10178
    case BUILT_IN_STRPBRK:
10179
      return fold_builtin_strpbrk (loc, arg0, arg1, type);
10180
 
10181
    case BUILT_IN_EXPECT:
10182
      return fold_builtin_expect (loc, arg0, arg1);
10183
 
10184
    CASE_FLT_FN (BUILT_IN_POW):
10185
      return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10186
 
10187
    CASE_FLT_FN (BUILT_IN_POWI):
10188
      return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10189
 
10190
    CASE_FLT_FN (BUILT_IN_COPYSIGN):
10191
      return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10192
 
10193
    CASE_FLT_FN (BUILT_IN_FMIN):
10194
      return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10195
 
10196
    CASE_FLT_FN (BUILT_IN_FMAX):
10197
      return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10198
 
10199
    case BUILT_IN_ISGREATER:
10200
      return fold_builtin_unordered_cmp (loc, fndecl,
10201
                                         arg0, arg1, UNLE_EXPR, LE_EXPR);
10202
    case BUILT_IN_ISGREATEREQUAL:
10203
      return fold_builtin_unordered_cmp (loc, fndecl,
10204
                                         arg0, arg1, UNLT_EXPR, LT_EXPR);
10205
    case BUILT_IN_ISLESS:
10206
      return fold_builtin_unordered_cmp (loc, fndecl,
10207
                                         arg0, arg1, UNGE_EXPR, GE_EXPR);
10208
    case BUILT_IN_ISLESSEQUAL:
10209
      return fold_builtin_unordered_cmp (loc, fndecl,
10210
                                         arg0, arg1, UNGT_EXPR, GT_EXPR);
10211
    case BUILT_IN_ISLESSGREATER:
10212
      return fold_builtin_unordered_cmp (loc, fndecl,
10213
                                         arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10214
    case BUILT_IN_ISUNORDERED:
10215
      return fold_builtin_unordered_cmp (loc, fndecl,
10216
                                         arg0, arg1, UNORDERED_EXPR,
10217
                                         NOP_EXPR);
10218
 
10219
      /* We do the folding for va_start in the expander.  */
10220
    case BUILT_IN_VA_START:
10221
      break;
10222
 
10223
    case BUILT_IN_SPRINTF:
10224
      return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10225
 
10226
    case BUILT_IN_OBJECT_SIZE:
10227
      return fold_builtin_object_size (arg0, arg1);
10228
 
10229
    case BUILT_IN_PRINTF:
10230
    case BUILT_IN_PRINTF_UNLOCKED:
10231
    case BUILT_IN_VPRINTF:
10232
      return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10233
 
10234
    case BUILT_IN_PRINTF_CHK:
10235
    case BUILT_IN_VPRINTF_CHK:
10236
      if (!validate_arg (arg0, INTEGER_TYPE)
10237
          || TREE_SIDE_EFFECTS (arg0))
10238
        return NULL_TREE;
10239
      else
10240
        return fold_builtin_printf (loc, fndecl,
10241
                                    arg1, NULL_TREE, ignore, fcode);
10242
    break;
10243
 
10244
    case BUILT_IN_FPRINTF:
10245
    case BUILT_IN_FPRINTF_UNLOCKED:
10246
    case BUILT_IN_VFPRINTF:
10247
      return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10248
                                   ignore, fcode);
10249
 
10250
    default:
10251
      break;
10252
    }
10253
  return NULL_TREE;
10254
}
10255
 
10256
/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10257
   and ARG2.  IGNORE is true if the result of the function call is ignored.
10258
   This function returns NULL_TREE if no simplification was possible.  */
10259
 
10260
static tree
10261
fold_builtin_3 (location_t loc, tree fndecl,
10262
                tree arg0, tree arg1, tree arg2, bool ignore)
10263
{
10264
  tree type = TREE_TYPE (TREE_TYPE (fndecl));
10265
  enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10266
  switch (fcode)
10267
    {
10268
 
10269
    CASE_FLT_FN (BUILT_IN_SINCOS):
10270
      return fold_builtin_sincos (loc, arg0, arg1, arg2);
10271
 
10272
    CASE_FLT_FN (BUILT_IN_FMA):
10273
      if (validate_arg (arg0, REAL_TYPE)
10274
          && validate_arg(arg1, REAL_TYPE)
10275
          && validate_arg(arg2, REAL_TYPE))
10276
        return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10277
    break;
10278
 
10279
    CASE_FLT_FN (BUILT_IN_REMQUO):
10280
      if (validate_arg (arg0, REAL_TYPE)
10281
          && validate_arg(arg1, REAL_TYPE)
10282
          && validate_arg(arg2, POINTER_TYPE))
10283
        return do_mpfr_remquo (arg0, arg1, arg2);
10284
    break;
10285
 
10286
    case BUILT_IN_MEMSET:
10287
      return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10288
 
10289
    case BUILT_IN_BCOPY:
10290
      return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10291
                                     void_type_node, true, /*endp=*/3);
10292
 
10293
    case BUILT_IN_MEMCPY:
10294
      return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10295
                                     type, ignore, /*endp=*/0);
10296
 
10297
    case BUILT_IN_MEMPCPY:
10298
      return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10299
                                     type, ignore, /*endp=*/1);
10300
 
10301
    case BUILT_IN_MEMMOVE:
10302
      return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10303
                                     type, ignore, /*endp=*/3);
10304
 
10305
    case BUILT_IN_STRNCAT:
10306
      return fold_builtin_strncat (loc, arg0, arg1, arg2);
10307
 
10308
    case BUILT_IN_STRNCPY:
10309
      return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10310
 
10311
    case BUILT_IN_STRNCMP:
10312
      return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10313
 
10314
    case BUILT_IN_MEMCHR:
10315
      return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10316
 
10317
    case BUILT_IN_BCMP:
10318
    case BUILT_IN_MEMCMP:
10319
      return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10320
 
10321
    case BUILT_IN_SPRINTF:
10322
      return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10323
 
10324
    case BUILT_IN_STRCPY_CHK:
10325
    case BUILT_IN_STPCPY_CHK:
10326
      return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10327
                                      ignore, fcode);
10328
 
10329
    case BUILT_IN_STRCAT_CHK:
10330
      return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10331
 
10332
    case BUILT_IN_PRINTF_CHK:
10333
    case BUILT_IN_VPRINTF_CHK:
10334
      if (!validate_arg (arg0, INTEGER_TYPE)
10335
          || TREE_SIDE_EFFECTS (arg0))
10336
        return NULL_TREE;
10337
      else
10338
        return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10339
    break;
10340
 
10341
    case BUILT_IN_FPRINTF:
10342
    case BUILT_IN_FPRINTF_UNLOCKED:
10343
    case BUILT_IN_VFPRINTF:
10344
      return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10345
                                   ignore, fcode);
10346
 
10347
    case BUILT_IN_FPRINTF_CHK:
10348
    case BUILT_IN_VFPRINTF_CHK:
10349
      if (!validate_arg (arg1, INTEGER_TYPE)
10350
          || TREE_SIDE_EFFECTS (arg1))
10351
        return NULL_TREE;
10352
      else
10353
        return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10354
                                     ignore, fcode);
10355
 
10356
    default:
10357
      break;
10358
    }
10359
  return NULL_TREE;
10360
}
10361
 
10362
/* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10363
   ARG2, and ARG3.  IGNORE is true if the result of the function call is
10364
   ignored.  This function returns NULL_TREE if no simplification was
10365
   possible.  */
10366
 
10367
static tree
10368
fold_builtin_4 (location_t loc, tree fndecl,
10369
                tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10370
{
10371
  enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10372
 
10373
  switch (fcode)
10374
    {
10375
    case BUILT_IN_MEMCPY_CHK:
10376
    case BUILT_IN_MEMPCPY_CHK:
10377
    case BUILT_IN_MEMMOVE_CHK:
10378
    case BUILT_IN_MEMSET_CHK:
10379
      return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10380
                                      NULL_TREE, ignore,
10381
                                      DECL_FUNCTION_CODE (fndecl));
10382
 
10383
    case BUILT_IN_STRNCPY_CHK:
10384
      return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10385
 
10386
    case BUILT_IN_STRNCAT_CHK:
10387
      return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10388
 
10389
    case BUILT_IN_FPRINTF_CHK:
10390
    case BUILT_IN_VFPRINTF_CHK:
10391
      if (!validate_arg (arg1, INTEGER_TYPE)
10392
          || TREE_SIDE_EFFECTS (arg1))
10393
        return NULL_TREE;
10394
      else
10395
        return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10396
                                     ignore, fcode);
10397
    break;
10398
 
10399
    default:
10400
      break;
10401
    }
10402
  return NULL_TREE;
10403
}
10404
 
10405
/* Fold a call to built-in function FNDECL.  ARGS is an array of NARGS
10406
    arguments, where NARGS <= 4.  IGNORE is true if the result of the
10407
    function call is ignored.  This function returns NULL_TREE if no
10408
    simplification was possible.  Note that this only folds builtins with
10409
    fixed argument patterns.  Foldings that do varargs-to-varargs
10410
    transformations, or that match calls with more than 4 arguments,
10411
    need to be handled with fold_builtin_varargs instead.  */
10412
 
10413
#define MAX_ARGS_TO_FOLD_BUILTIN 4
10414
 
10415
static tree
10416
fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10417
{
10418
  tree ret = NULL_TREE;
10419
 
10420
  switch (nargs)
10421
    {
10422
    case 0:
10423
      ret = fold_builtin_0 (loc, fndecl, ignore);
10424
      break;
10425
    case 1:
10426
      ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10427
      break;
10428
    case 2:
10429
      ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10430
      break;
10431
    case 3:
10432
      ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10433
      break;
10434
    case 4:
10435
      ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10436
                            ignore);
10437
      break;
10438
    default:
10439
      break;
10440
    }
10441
  if (ret)
10442
    {
10443
      ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10444
      SET_EXPR_LOCATION (ret, loc);
10445
      TREE_NO_WARNING (ret) = 1;
10446
      return ret;
10447
    }
10448
  return NULL_TREE;
10449
}
10450
 
10451
/* Builtins with folding operations that operate on "..." arguments
10452
   need special handling; we need to store the arguments in a convenient
10453
   data structure before attempting any folding.  Fortunately there are
10454
   only a few builtins that fall into this category.  FNDECL is the
10455
   function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10456
   result of the function call is ignored.  */
10457
 
10458
static tree
10459
fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10460
                      bool ignore ATTRIBUTE_UNUSED)
10461
{
10462
  enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10463
  tree ret = NULL_TREE;
10464
 
10465
  switch (fcode)
10466
    {
10467
    case BUILT_IN_SPRINTF_CHK:
10468
    case BUILT_IN_VSPRINTF_CHK:
10469
      ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10470
      break;
10471
 
10472
    case BUILT_IN_SNPRINTF_CHK:
10473
    case BUILT_IN_VSNPRINTF_CHK:
10474
      ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10475
      break;
10476
 
10477
    case BUILT_IN_FPCLASSIFY:
10478
      ret = fold_builtin_fpclassify (loc, exp);
10479
      break;
10480
 
10481
    default:
10482
      break;
10483
    }
10484
  if (ret)
10485
    {
10486
      ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10487
      SET_EXPR_LOCATION (ret, loc);
10488
      TREE_NO_WARNING (ret) = 1;
10489
      return ret;
10490
    }
10491
  return NULL_TREE;
10492
}
10493
 
10494
/* Return true if FNDECL shouldn't be folded right now.
10495
   If a built-in function has an inline attribute always_inline
10496
   wrapper, defer folding it after always_inline functions have
10497
   been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10498
   might not be performed.  */
10499
 
10500
static bool
10501
avoid_folding_inline_builtin (tree fndecl)
10502
{
10503
  return (DECL_DECLARED_INLINE_P (fndecl)
10504
          && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10505
          && cfun
10506
          && !cfun->always_inline_functions_inlined
10507
          && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10508
}
10509
 
10510
/* A wrapper function for builtin folding that prevents warnings for
10511
   "statement without effect" and the like, caused by removing the
10512
   call node earlier than the warning is generated.  */
10513
 
10514
tree
10515
fold_call_expr (location_t loc, tree exp, bool ignore)
10516
{
10517
  tree ret = NULL_TREE;
10518
  tree fndecl = get_callee_fndecl (exp);
10519
  if (fndecl
10520
      && TREE_CODE (fndecl) == FUNCTION_DECL
10521
      && DECL_BUILT_IN (fndecl)
10522
      /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10523
         yet.  Defer folding until we see all the arguments
10524
         (after inlining).  */
10525
      && !CALL_EXPR_VA_ARG_PACK (exp))
10526
    {
10527
      int nargs = call_expr_nargs (exp);
10528
 
10529
      /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10530
         instead last argument is __builtin_va_arg_pack ().  Defer folding
10531
         even in that case, until arguments are finalized.  */
10532
      if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10533
        {
10534
          tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10535
          if (fndecl2
10536
              && TREE_CODE (fndecl2) == FUNCTION_DECL
10537
              && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10538
              && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10539
            return NULL_TREE;
10540
        }
10541
 
10542
      if (avoid_folding_inline_builtin (fndecl))
10543
        return NULL_TREE;
10544
 
10545
      /* FIXME: Don't use a list in this interface.  */
10546
      if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10547
          return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10548
      else
10549
        {
10550
          if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10551
            {
10552
              tree *args = CALL_EXPR_ARGP (exp);
10553
              ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10554
            }
10555
          if (!ret)
10556
            ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10557
          if (ret)
10558
            return ret;
10559
        }
10560
    }
10561
  return NULL_TREE;
10562
}
10563
 
10564
/* Conveniently construct a function call expression.  FNDECL names the
10565
    function to be called and ARGLIST is a TREE_LIST of arguments.  */
10566
 
10567
tree
10568
build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10569
{
10570
  tree fntype = TREE_TYPE (fndecl);
10571
  tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10572
  int n = list_length (arglist);
10573
  tree *argarray = (tree *) alloca (n * sizeof (tree));
10574
  int i;
10575
 
10576
  for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10577
    argarray[i] = TREE_VALUE (arglist);
10578
  return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10579
}
10580
 
10581
/* Conveniently construct a function call expression.  FNDECL names the
10582
   function to be called, N is the number of arguments, and the "..."
10583
   parameters are the argument expressions.  */
10584
 
10585
tree
10586
build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10587
{
10588
  va_list ap;
10589
  tree fntype = TREE_TYPE (fndecl);
10590
  tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10591
  tree *argarray = (tree *) alloca (n * sizeof (tree));
10592
  int i;
10593
 
10594
  va_start (ap, n);
10595
  for (i = 0; i < n; i++)
10596
    argarray[i] = va_arg (ap, tree);
10597
  va_end (ap);
10598
  return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10599
}
10600
 
10601
/* Like build_call_expr_loc (UNKNOWN_LOCATION, ...).  Duplicated because
10602
   varargs macros aren't supported by all bootstrap compilers.  */
10603
 
10604
tree
10605
build_call_expr (tree fndecl, int n, ...)
10606
{
10607
  va_list ap;
10608
  tree fntype = TREE_TYPE (fndecl);
10609
  tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10610
  tree *argarray = (tree *) alloca (n * sizeof (tree));
10611
  int i;
10612
 
10613
  va_start (ap, n);
10614
  for (i = 0; i < n; i++)
10615
    argarray[i] = va_arg (ap, tree);
10616
  va_end (ap);
10617
  return fold_builtin_call_array (UNKNOWN_LOCATION, TREE_TYPE (fntype),
10618
                                  fn, n, argarray);
10619
}
10620
 
10621
/* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10622
   N arguments are passed in the array ARGARRAY.  */
10623
 
10624
tree
10625
fold_builtin_call_array (location_t loc, tree type,
10626
                         tree fn,
10627
                         int n,
10628
                         tree *argarray)
10629
{
10630
  tree ret = NULL_TREE;
10631
  int i;
10632
   tree exp;
10633
 
10634
  if (TREE_CODE (fn) == ADDR_EXPR)
10635
  {
10636
    tree fndecl = TREE_OPERAND (fn, 0);
10637
    if (TREE_CODE (fndecl) == FUNCTION_DECL
10638
        && DECL_BUILT_IN (fndecl))
10639
      {
10640
        /* If last argument is __builtin_va_arg_pack (), arguments to this
10641
           function are not finalized yet.  Defer folding until they are.  */
10642
        if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10643
          {
10644
            tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10645
            if (fndecl2
10646
                && TREE_CODE (fndecl2) == FUNCTION_DECL
10647
                && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10648
                && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10649
              return build_call_array_loc (loc, type, fn, n, argarray);
10650
          }
10651
        if (avoid_folding_inline_builtin (fndecl))
10652
          return build_call_array_loc (loc, type, fn, n, argarray);
10653
        if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10654
          {
10655
            tree arglist = NULL_TREE;
10656
            for (i = n - 1; i >= 0; i--)
10657
              arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10658
            ret = targetm.fold_builtin (fndecl, arglist, false);
10659
            if (ret)
10660
              return ret;
10661
            return build_call_array_loc (loc, type, fn, n, argarray);
10662
          }
10663
        else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10664
          {
10665
            /* First try the transformations that don't require consing up
10666
               an exp.  */
10667
            ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10668
            if (ret)
10669
              return ret;
10670
          }
10671
 
10672
        /* If we got this far, we need to build an exp.  */
10673
        exp = build_call_array_loc (loc, type, fn, n, argarray);
10674
        ret = fold_builtin_varargs (loc, fndecl, exp, false);
10675
        return ret ? ret : exp;
10676
      }
10677
  }
10678
 
10679
  return build_call_array_loc (loc, type, fn, n, argarray);
10680
}
10681
 
10682
/* Construct a new CALL_EXPR using the tail of the argument list of EXP
10683
   along with N new arguments specified as the "..." parameters.  SKIP
10684
   is the number of arguments in EXP to be omitted.  This function is used
10685
   to do varargs-to-varargs transformations.  */
10686
 
10687
static tree
10688
rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10689
{
10690
  int oldnargs = call_expr_nargs (exp);
10691
  int nargs = oldnargs - skip + n;
10692
  tree fntype = TREE_TYPE (fndecl);
10693
  tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10694
  tree *buffer;
10695
 
10696
  if (n > 0)
10697
    {
10698
      int i, j;
10699
      va_list ap;
10700
 
10701
      buffer = XALLOCAVEC (tree, nargs);
10702
      va_start (ap, n);
10703
      for (i = 0; i < n; i++)
10704
        buffer[i] = va_arg (ap, tree);
10705
      va_end (ap);
10706
      for (j = skip; j < oldnargs; j++, i++)
10707
        buffer[i] = CALL_EXPR_ARG (exp, j);
10708
    }
10709
  else
10710
    buffer = CALL_EXPR_ARGP (exp) + skip;
10711
 
10712
  return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10713
}
10714
 
10715
/* Validate a single argument ARG against a tree code CODE representing
10716
   a type.  */
10717
 
10718
static bool
10719
validate_arg (const_tree arg, enum tree_code code)
10720
{
10721
  if (!arg)
10722
    return false;
10723
  else if (code == POINTER_TYPE)
10724
    return POINTER_TYPE_P (TREE_TYPE (arg));
10725
  else if (code == INTEGER_TYPE)
10726
    return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10727
  return code == TREE_CODE (TREE_TYPE (arg));
10728
}
10729
 
10730
/* This function validates the types of a function call argument list
10731
   against a specified list of tree_codes.  If the last specifier is a 0,
10732
   that represents an ellipses, otherwise the last specifier must be a
10733
   VOID_TYPE.
10734
 
10735
   This is the GIMPLE version of validate_arglist.  Eventually we want to
10736
   completely convert builtins.c to work from GIMPLEs and the tree based
10737
   validate_arglist will then be removed.  */
10738
 
10739
bool
10740
validate_gimple_arglist (const_gimple call, ...)
10741
{
10742
  enum tree_code code;
10743
  bool res = 0;
10744
  va_list ap;
10745
  const_tree arg;
10746
  size_t i;
10747
 
10748
  va_start (ap, call);
10749
  i = 0;
10750
 
10751
  do
10752
    {
10753
      code = (enum tree_code) va_arg (ap, int);
10754
      switch (code)
10755
        {
10756
        case 0:
10757
          /* This signifies an ellipses, any further arguments are all ok.  */
10758
          res = true;
10759
          goto end;
10760
        case VOID_TYPE:
10761
          /* This signifies an endlink, if no arguments remain, return
10762
             true, otherwise return false.  */
10763
          res = (i == gimple_call_num_args (call));
10764
          goto end;
10765
        default:
10766
          /* If no parameters remain or the parameter's code does not
10767
             match the specified code, return false.  Otherwise continue
10768
             checking any remaining arguments.  */
10769
          arg = gimple_call_arg (call, i++);
10770
          if (!validate_arg (arg, code))
10771
            goto end;
10772
          break;
10773
        }
10774
    }
10775
  while (1);
10776
 
10777
  /* We need gotos here since we can only have one VA_CLOSE in a
10778
     function.  */
10779
 end: ;
10780
  va_end (ap);
10781
 
10782
  return res;
10783
}
10784
 
10785
/* This function validates the types of a function call argument list
10786
   against a specified list of tree_codes.  If the last specifier is a 0,
10787
   that represents an ellipses, otherwise the last specifier must be a
10788
   VOID_TYPE.  */
10789
 
10790
bool
10791
validate_arglist (const_tree callexpr, ...)
10792
{
10793
  enum tree_code code;
10794
  bool res = 0;
10795
  va_list ap;
10796
  const_call_expr_arg_iterator iter;
10797
  const_tree arg;
10798
 
10799
  va_start (ap, callexpr);
10800
  init_const_call_expr_arg_iterator (callexpr, &iter);
10801
 
10802
  do
10803
    {
10804
      code = (enum tree_code) va_arg (ap, int);
10805
      switch (code)
10806
        {
10807
        case 0:
10808
          /* This signifies an ellipses, any further arguments are all ok.  */
10809
          res = true;
10810
          goto end;
10811
        case VOID_TYPE:
10812
          /* This signifies an endlink, if no arguments remain, return
10813
             true, otherwise return false.  */
10814
          res = !more_const_call_expr_args_p (&iter);
10815
          goto end;
10816
        default:
10817
          /* If no parameters remain or the parameter's code does not
10818
             match the specified code, return false.  Otherwise continue
10819
             checking any remaining arguments.  */
10820
          arg = next_const_call_expr_arg (&iter);
10821
          if (!validate_arg (arg, code))
10822
            goto end;
10823
          break;
10824
        }
10825
    }
10826
  while (1);
10827
 
10828
  /* We need gotos here since we can only have one VA_CLOSE in a
10829
     function.  */
10830
 end: ;
10831
  va_end (ap);
10832
 
10833
  return res;
10834
}
10835
 
10836
/* Default target-specific builtin expander that does nothing.  */
10837
 
10838
rtx
10839
default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10840
                        rtx target ATTRIBUTE_UNUSED,
10841
                        rtx subtarget ATTRIBUTE_UNUSED,
10842
                        enum machine_mode mode ATTRIBUTE_UNUSED,
10843
                        int ignore ATTRIBUTE_UNUSED)
10844
{
10845
  return NULL_RTX;
10846
}
10847
 
10848
/* Returns true is EXP represents data that would potentially reside
10849
   in a readonly section.  */
10850
 
10851
static bool
10852
readonly_data_expr (tree exp)
10853
{
10854
  STRIP_NOPS (exp);
10855
 
10856
  if (TREE_CODE (exp) != ADDR_EXPR)
10857
    return false;
10858
 
10859
  exp = get_base_address (TREE_OPERAND (exp, 0));
10860
  if (!exp)
10861
    return false;
10862
 
10863
  /* Make sure we call decl_readonly_section only for trees it
10864
     can handle (since it returns true for everything it doesn't
10865
     understand).  */
10866
  if (TREE_CODE (exp) == STRING_CST
10867
      || TREE_CODE (exp) == CONSTRUCTOR
10868
      || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10869
    return decl_readonly_section (exp, 0);
10870
  else
10871
    return false;
10872
}
10873
 
10874
/* Simplify a call to the strstr builtin.  S1 and S2 are the arguments
10875
   to the call, and TYPE is its return type.
10876
 
10877
   Return NULL_TREE if no simplification was possible, otherwise return the
10878
   simplified form of the call as a tree.
10879
 
10880
   The simplified form may be a constant or other expression which
10881
   computes the same value, but in a more efficient manner (including
10882
   calls to other builtin functions).
10883
 
10884
   The call may contain arguments which need to be evaluated, but
10885
   which are not useful to determine the result of the call.  In
10886
   this case we return a chain of COMPOUND_EXPRs.  The LHS of each
10887
   COMPOUND_EXPR will be an argument which must be evaluated.
10888
   COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
10889
   COMPOUND_EXPR in the chain will contain the tree for the simplified
10890
   form of the builtin function call.  */
10891
 
10892
static tree
10893
fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10894
{
10895
  if (!validate_arg (s1, POINTER_TYPE)
10896
      || !validate_arg (s2, POINTER_TYPE))
10897
    return NULL_TREE;
10898
  else
10899
    {
10900
      tree fn;
10901
      const char *p1, *p2;
10902
 
10903
      p2 = c_getstr (s2);
10904
      if (p2 == NULL)
10905
        return NULL_TREE;
10906
 
10907
      p1 = c_getstr (s1);
10908
      if (p1 != NULL)
10909
        {
10910
          const char *r = strstr (p1, p2);
10911
          tree tem;
10912
 
10913
          if (r == NULL)
10914
            return build_int_cst (TREE_TYPE (s1), 0);
10915
 
10916
          /* Return an offset into the constant string argument.  */
10917
          tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10918
                             s1, size_int (r - p1));
10919
          return fold_convert_loc (loc, type, tem);
10920
        }
10921
 
10922
      /* The argument is const char *, and the result is char *, so we need
10923
         a type conversion here to avoid a warning.  */
10924
      if (p2[0] == '\0')
10925
        return fold_convert_loc (loc, type, s1);
10926
 
10927
      if (p2[1] != '\0')
10928
        return NULL_TREE;
10929
 
10930
      fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10931
      if (!fn)
10932
        return NULL_TREE;
10933
 
10934
      /* New argument list transforming strstr(s1, s2) to
10935
         strchr(s1, s2[0]).  */
10936
      return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10937
    }
10938
}
10939
 
10940
/* Simplify a call to the strchr builtin.  S1 and S2 are the arguments to
10941
   the call, and TYPE is its return type.
10942
 
10943
   Return NULL_TREE if no simplification was possible, otherwise return the
10944
   simplified form of the call as a tree.
10945
 
10946
   The simplified form may be a constant or other expression which
10947
   computes the same value, but in a more efficient manner (including
10948
   calls to other builtin functions).
10949
 
10950
   The call may contain arguments which need to be evaluated, but
10951
   which are not useful to determine the result of the call.  In
10952
   this case we return a chain of COMPOUND_EXPRs.  The LHS of each
10953
   COMPOUND_EXPR will be an argument which must be evaluated.
10954
   COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
10955
   COMPOUND_EXPR in the chain will contain the tree for the simplified
10956
   form of the builtin function call.  */
10957
 
10958
static tree
10959
fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10960
{
10961
  if (!validate_arg (s1, POINTER_TYPE)
10962
      || !validate_arg (s2, INTEGER_TYPE))
10963
    return NULL_TREE;
10964
  else
10965
    {
10966
      const char *p1;
10967
 
10968
      if (TREE_CODE (s2) != INTEGER_CST)
10969
        return NULL_TREE;
10970
 
10971
      p1 = c_getstr (s1);
10972
      if (p1 != NULL)
10973
        {
10974
          char c;
10975
          const char *r;
10976
          tree tem;
10977
 
10978
          if (target_char_cast (s2, &c))
10979
            return NULL_TREE;
10980
 
10981
          r = strchr (p1, c);
10982
 
10983
          if (r == NULL)
10984
            return build_int_cst (TREE_TYPE (s1), 0);
10985
 
10986
          /* Return an offset into the constant string argument.  */
10987
          tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10988
                             s1, size_int (r - p1));
10989
          return fold_convert_loc (loc, type, tem);
10990
        }
10991
      return NULL_TREE;
10992
    }
10993
}
10994
 
10995
/* Simplify a call to the strrchr builtin.  S1 and S2 are the arguments to
10996
   the call, and TYPE is its return type.
10997
 
10998
   Return NULL_TREE if no simplification was possible, otherwise return the
10999
   simplified form of the call as a tree.
11000
 
11001
   The simplified form may be a constant or other expression which
11002
   computes the same value, but in a more efficient manner (including
11003
   calls to other builtin functions).
11004
 
11005
   The call may contain arguments which need to be evaluated, but
11006
   which are not useful to determine the result of the call.  In
11007
   this case we return a chain of COMPOUND_EXPRs.  The LHS of each
11008
   COMPOUND_EXPR will be an argument which must be evaluated.
11009
   COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
11010
   COMPOUND_EXPR in the chain will contain the tree for the simplified
11011
   form of the builtin function call.  */
11012
 
11013
static tree
11014
fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11015
{
11016
  if (!validate_arg (s1, POINTER_TYPE)
11017
      || !validate_arg (s2, INTEGER_TYPE))
11018
    return NULL_TREE;
11019
  else
11020
    {
11021
      tree fn;
11022
      const char *p1;
11023
 
11024
      if (TREE_CODE (s2) != INTEGER_CST)
11025
        return NULL_TREE;
11026
 
11027
      p1 = c_getstr (s1);
11028
      if (p1 != NULL)
11029
        {
11030
          char c;
11031
          const char *r;
11032
          tree tem;
11033
 
11034
          if (target_char_cast (s2, &c))
11035
            return NULL_TREE;
11036
 
11037
          r = strrchr (p1, c);
11038
 
11039
          if (r == NULL)
11040
            return build_int_cst (TREE_TYPE (s1), 0);
11041
 
11042
          /* Return an offset into the constant string argument.  */
11043
          tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11044
                             s1, size_int (r - p1));
11045
          return fold_convert_loc (loc, type, tem);
11046
        }
11047
 
11048
      if (! integer_zerop (s2))
11049
        return NULL_TREE;
11050
 
11051
      fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11052
      if (!fn)
11053
        return NULL_TREE;
11054
 
11055
      /* Transform strrchr(s1, '\0') to strchr(s1, '\0').  */
11056
      return build_call_expr_loc (loc, fn, 2, s1, s2);
11057
    }
11058
}
11059
 
11060
/* Simplify a call to the strpbrk builtin.  S1 and S2 are the arguments
11061
   to the call, and TYPE is its return type.
11062
 
11063
   Return NULL_TREE if no simplification was possible, otherwise return the
11064
   simplified form of the call as a tree.
11065
 
11066
   The simplified form may be a constant or other expression which
11067
   computes the same value, but in a more efficient manner (including
11068
   calls to other builtin functions).
11069
 
11070
   The call may contain arguments which need to be evaluated, but
11071
   which are not useful to determine the result of the call.  In
11072
   this case we return a chain of COMPOUND_EXPRs.  The LHS of each
11073
   COMPOUND_EXPR will be an argument which must be evaluated.
11074
   COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
11075
   COMPOUND_EXPR in the chain will contain the tree for the simplified
11076
   form of the builtin function call.  */
11077
 
11078
static tree
11079
fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11080
{
11081
  if (!validate_arg (s1, POINTER_TYPE)
11082
      || !validate_arg (s2, POINTER_TYPE))
11083
    return NULL_TREE;
11084
  else
11085
    {
11086
      tree fn;
11087
      const char *p1, *p2;
11088
 
11089
      p2 = c_getstr (s2);
11090
      if (p2 == NULL)
11091
        return NULL_TREE;
11092
 
11093
      p1 = c_getstr (s1);
11094
      if (p1 != NULL)
11095
        {
11096
          const char *r = strpbrk (p1, p2);
11097
          tree tem;
11098
 
11099
          if (r == NULL)
11100
            return build_int_cst (TREE_TYPE (s1), 0);
11101
 
11102
          /* Return an offset into the constant string argument.  */
11103
          tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11104
                             s1, size_int (r - p1));
11105
          return fold_convert_loc (loc, type, tem);
11106
        }
11107
 
11108
      if (p2[0] == '\0')
11109
        /* strpbrk(x, "") == NULL.
11110
           Evaluate and ignore s1 in case it had side-effects.  */
11111
        return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11112
 
11113
      if (p2[1] != '\0')
11114
        return NULL_TREE;  /* Really call strpbrk.  */
11115
 
11116
      fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11117
      if (!fn)
11118
        return NULL_TREE;
11119
 
11120
      /* New argument list transforming strpbrk(s1, s2) to
11121
         strchr(s1, s2[0]).  */
11122
      return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11123
    }
11124
}
11125
 
11126
/* Simplify a call to the strcat builtin.  DST and SRC are the arguments
11127
   to the call.
11128
 
11129
   Return NULL_TREE if no simplification was possible, otherwise return the
11130
   simplified form of the call as a tree.
11131
 
11132
   The simplified form may be a constant or other expression which
11133
   computes the same value, but in a more efficient manner (including
11134
   calls to other builtin functions).
11135
 
11136
   The call may contain arguments which need to be evaluated, but
11137
   which are not useful to determine the result of the call.  In
11138
   this case we return a chain of COMPOUND_EXPRs.  The LHS of each
11139
   COMPOUND_EXPR will be an argument which must be evaluated.
11140
   COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
11141
   COMPOUND_EXPR in the chain will contain the tree for the simplified
11142
   form of the builtin function call.  */
11143
 
11144
static tree
11145
fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11146
{
11147
  if (!validate_arg (dst, POINTER_TYPE)
11148
      || !validate_arg (src, POINTER_TYPE))
11149
    return NULL_TREE;
11150
  else
11151
    {
11152
      const char *p = c_getstr (src);
11153
 
11154
      /* If the string length is zero, return the dst parameter.  */
11155
      if (p && *p == '\0')
11156
        return dst;
11157
 
11158
      if (optimize_insn_for_speed_p ())
11159
        {
11160
          /* See if we can store by pieces into (dst + strlen(dst)).  */
11161
          tree newdst, call;
11162
          tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11163
          tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11164
 
11165
          if (!strlen_fn || !strcpy_fn)
11166
            return NULL_TREE;
11167
 
11168
          /* If we don't have a movstr we don't want to emit an strcpy
11169
             call.  We have to do that if the length of the source string
11170
             isn't computable (in that case we can use memcpy probably
11171
             later expanding to a sequence of mov instructions).  If we
11172
             have movstr instructions we can emit strcpy calls.  */
11173
          if (!HAVE_movstr)
11174
            {
11175
              tree len = c_strlen (src, 1);
11176
              if (! len || TREE_SIDE_EFFECTS (len))
11177
                return NULL_TREE;
11178
            }
11179
 
11180
          /* Stabilize the argument list.  */
11181
          dst = builtin_save_expr (dst);
11182
 
11183
          /* Create strlen (dst).  */
11184
          newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11185
          /* Create (dst p+ strlen (dst)).  */
11186
 
11187
          newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11188
                                TREE_TYPE (dst), dst, newdst);
11189
          newdst = builtin_save_expr (newdst);
11190
 
11191
          call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11192
          return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11193
        }
11194
      return NULL_TREE;
11195
    }
11196
}
11197
 
11198
/* Simplify a call to the strncat builtin.  DST, SRC, and LEN are the
11199
   arguments to the call.
11200
 
11201
   Return NULL_TREE if no simplification was possible, otherwise return the
11202
   simplified form of the call as a tree.
11203
 
11204
   The simplified form may be a constant or other expression which
11205
   computes the same value, but in a more efficient manner (including
11206
   calls to other builtin functions).
11207
 
11208
   The call may contain arguments which need to be evaluated, but
11209
   which are not useful to determine the result of the call.  In
11210
   this case we return a chain of COMPOUND_EXPRs.  The LHS of each
11211
   COMPOUND_EXPR will be an argument which must be evaluated.
11212
   COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
11213
   COMPOUND_EXPR in the chain will contain the tree for the simplified
11214
   form of the builtin function call.  */
11215
 
11216
static tree
11217
fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11218
{
11219
  if (!validate_arg (dst, POINTER_TYPE)
11220
      || !validate_arg (src, POINTER_TYPE)
11221
      || !validate_arg (len, INTEGER_TYPE))
11222
    return NULL_TREE;
11223
  else
11224
    {
11225
      const char *p = c_getstr (src);
11226
 
11227
      /* If the requested length is zero, or the src parameter string
11228
         length is zero, return the dst parameter.  */
11229
      if (integer_zerop (len) || (p && *p == '\0'))
11230
        return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11231
 
11232
      /* If the requested len is greater than or equal to the string
11233
         length, call strcat.  */
11234
      if (TREE_CODE (len) == INTEGER_CST && p
11235
          && compare_tree_int (len, strlen (p)) >= 0)
11236
        {
11237
          tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11238
 
11239
          /* If the replacement _DECL isn't initialized, don't do the
11240
             transformation.  */
11241
          if (!fn)
11242
            return NULL_TREE;
11243
 
11244
          return build_call_expr_loc (loc, fn, 2, dst, src);
11245
        }
11246
      return NULL_TREE;
11247
    }
11248
}
11249
 
11250
/* Simplify a call to the strspn builtin.  S1 and S2 are the arguments
11251
   to the call.
11252
 
11253
   Return NULL_TREE if no simplification was possible, otherwise return the
11254
   simplified form of the call as a tree.
11255
 
11256
   The simplified form may be a constant or other expression which
11257
   computes the same value, but in a more efficient manner (including
11258
   calls to other builtin functions).
11259
 
11260
   The call may contain arguments which need to be evaluated, but
11261
   which are not useful to determine the result of the call.  In
11262
   this case we return a chain of COMPOUND_EXPRs.  The LHS of each
11263
   COMPOUND_EXPR will be an argument which must be evaluated.
11264
   COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
11265
   COMPOUND_EXPR in the chain will contain the tree for the simplified
11266
   form of the builtin function call.  */
11267
 
11268
static tree
11269
fold_builtin_strspn (location_t loc, tree s1, tree s2)
11270
{
11271
  if (!validate_arg (s1, POINTER_TYPE)
11272
      || !validate_arg (s2, POINTER_TYPE))
11273
    return NULL_TREE;
11274
  else
11275
    {
11276
      const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11277
 
11278
      /* If both arguments are constants, evaluate at compile-time.  */
11279
      if (p1 && p2)
11280
        {
11281
          const size_t r = strspn (p1, p2);
11282
          return size_int (r);
11283
        }
11284
 
11285
      /* If either argument is "", return NULL_TREE.  */
11286
      if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11287
        /* Evaluate and ignore both arguments in case either one has
11288
           side-effects.  */
11289
        return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11290
                                  s1, s2);
11291
      return NULL_TREE;
11292
    }
11293
}
11294
 
11295
/* Simplify a call to the strcspn builtin.  S1 and S2 are the arguments
11296
   to the call.
11297
 
11298
   Return NULL_TREE if no simplification was possible, otherwise return the
11299
   simplified form of the call as a tree.
11300
 
11301
   The simplified form may be a constant or other expression which
11302
   computes the same value, but in a more efficient manner (including
11303
   calls to other builtin functions).
11304
 
11305
   The call may contain arguments which need to be evaluated, but
11306
   which are not useful to determine the result of the call.  In
11307
   this case we return a chain of COMPOUND_EXPRs.  The LHS of each
11308
   COMPOUND_EXPR will be an argument which must be evaluated.
11309
   COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
11310
   COMPOUND_EXPR in the chain will contain the tree for the simplified
11311
   form of the builtin function call.  */
11312
 
11313
static tree
11314
fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11315
{
11316
  if (!validate_arg (s1, POINTER_TYPE)
11317
      || !validate_arg (s2, POINTER_TYPE))
11318
    return NULL_TREE;
11319
  else
11320
    {
11321
      const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11322
 
11323
      /* If both arguments are constants, evaluate at compile-time.  */
11324
      if (p1 && p2)
11325
        {
11326
          const size_t r = strcspn (p1, p2);
11327
          return size_int (r);
11328
        }
11329
 
11330
      /* If the first argument is "", return NULL_TREE.  */
11331
      if (p1 && *p1 == '\0')
11332
        {
11333
          /* Evaluate and ignore argument s2 in case it has
11334
             side-effects.  */
11335
          return omit_one_operand_loc (loc, size_type_node,
11336
                                   size_zero_node, s2);
11337
        }
11338
 
11339
      /* If the second argument is "", return __builtin_strlen(s1).  */
11340
      if (p2 && *p2 == '\0')
11341
        {
11342
          tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11343
 
11344
          /* If the replacement _DECL isn't initialized, don't do the
11345
             transformation.  */
11346
          if (!fn)
11347
            return NULL_TREE;
11348
 
11349
          return build_call_expr_loc (loc, fn, 1, s1);
11350
        }
11351
      return NULL_TREE;
11352
    }
11353
}
11354
 
11355
/* Fold a call to the fputs builtin.  ARG0 and ARG1 are the arguments
11356
   to the call.  IGNORE is true if the value returned
11357
   by the builtin will be ignored.  UNLOCKED is true is true if this
11358
   actually a call to fputs_unlocked.  If LEN in non-NULL, it represents
11359
   the known length of the string.  Return NULL_TREE if no simplification
11360
   was possible.  */
11361
 
11362
tree
11363
fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11364
                    bool ignore, bool unlocked, tree len)
11365
{
11366
  /* If we're using an unlocked function, assume the other unlocked
11367
     functions exist explicitly.  */
11368
  tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11369
    : implicit_built_in_decls[BUILT_IN_FPUTC];
11370
  tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11371
    : implicit_built_in_decls[BUILT_IN_FWRITE];
11372
 
11373
  /* If the return value is used, don't do the transformation.  */
11374
  if (!ignore)
11375
    return NULL_TREE;
11376
 
11377
  /* Verify the arguments in the original call.  */
11378
  if (!validate_arg (arg0, POINTER_TYPE)
11379
      || !validate_arg (arg1, POINTER_TYPE))
11380
    return NULL_TREE;
11381
 
11382
  if (! len)
11383
    len = c_strlen (arg0, 0);
11384
 
11385
  /* Get the length of the string passed to fputs.  If the length
11386
     can't be determined, punt.  */
11387
  if (!len
11388
      || TREE_CODE (len) != INTEGER_CST)
11389
    return NULL_TREE;
11390
 
11391
  switch (compare_tree_int (len, 1))
11392
    {
11393
    case -1: /* length is 0, delete the call entirely .  */
11394
      return omit_one_operand_loc (loc, integer_type_node,
11395
                               integer_zero_node, arg1);;
11396
 
11397
    case 0: /* length is 1, call fputc.  */
11398
      {
11399
        const char *p = c_getstr (arg0);
11400
 
11401
        if (p != NULL)
11402
          {
11403
            if (fn_fputc)
11404
              return build_call_expr_loc (loc, fn_fputc, 2,
11405
                                      build_int_cst (NULL_TREE, p[0]), arg1);
11406
            else
11407
              return NULL_TREE;
11408
          }
11409
      }
11410
      /* FALLTHROUGH */
11411
    case 1: /* length is greater than 1, call fwrite.  */
11412
      {
11413
        /* If optimizing for size keep fputs.  */
11414
        if (optimize_function_for_size_p (cfun))
11415
          return NULL_TREE;
11416
        /* New argument list transforming fputs(string, stream) to
11417
           fwrite(string, 1, len, stream).  */
11418
        if (fn_fwrite)
11419
          return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11420
                                  size_one_node, len, arg1);
11421
        else
11422
          return NULL_TREE;
11423
      }
11424
    default:
11425
      gcc_unreachable ();
11426
    }
11427
  return NULL_TREE;
11428
}
11429
 
11430
/* Fold the next_arg or va_start call EXP. Returns true if there was an error
11431
   produced.  False otherwise.  This is done so that we don't output the error
11432
   or warning twice or three times.  */
11433
 
11434
bool
11435
fold_builtin_next_arg (tree exp, bool va_start_p)
11436
{
11437
  tree fntype = TREE_TYPE (current_function_decl);
11438
  int nargs = call_expr_nargs (exp);
11439
  tree arg;
11440
 
11441
  if (TYPE_ARG_TYPES (fntype) == 0
11442
      || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11443
          == void_type_node))
11444
    {
11445
      error ("%<va_start%> used in function with fixed args");
11446
      return true;
11447
    }
11448
 
11449
  if (va_start_p)
11450
    {
11451
      if (va_start_p && (nargs != 2))
11452
        {
11453
          error ("wrong number of arguments to function %<va_start%>");
11454
          return true;
11455
        }
11456
      arg = CALL_EXPR_ARG (exp, 1);
11457
    }
11458
  /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11459
     when we checked the arguments and if needed issued a warning.  */
11460
  else
11461
    {
11462
      if (nargs == 0)
11463
        {
11464
          /* Evidently an out of date version of <stdarg.h>; can't validate
11465
             va_start's second argument, but can still work as intended.  */
11466
          warning (0, "%<__builtin_next_arg%> called without an argument");
11467
          return true;
11468
        }
11469
      else if (nargs > 1)
11470
        {
11471
          error ("wrong number of arguments to function %<__builtin_next_arg%>");
11472
          return true;
11473
        }
11474
      arg = CALL_EXPR_ARG (exp, 0);
11475
    }
11476
 
11477
  if (TREE_CODE (arg) == SSA_NAME)
11478
    arg = SSA_NAME_VAR (arg);
11479
 
11480
  /* We destructively modify the call to be __builtin_va_start (ap, 0)
11481
     or __builtin_next_arg (0) the first time we see it, after checking
11482
     the arguments and if needed issuing a warning.  */
11483
  if (!integer_zerop (arg))
11484
    {
11485
      tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11486
 
11487
      /* Strip off all nops for the sake of the comparison.  This
11488
         is not quite the same as STRIP_NOPS.  It does more.
11489
         We must also strip off INDIRECT_EXPR for C++ reference
11490
         parameters.  */
11491
      while (CONVERT_EXPR_P (arg)
11492
             || TREE_CODE (arg) == INDIRECT_REF)
11493
        arg = TREE_OPERAND (arg, 0);
11494
      if (arg != last_parm)
11495
        {
11496
          /* FIXME: Sometimes with the tree optimizers we can get the
11497
             not the last argument even though the user used the last
11498
             argument.  We just warn and set the arg to be the last
11499
             argument so that we will get wrong-code because of
11500
             it.  */
11501
          warning (0, "second parameter of %<va_start%> not last named argument");
11502
        }
11503
 
11504
      /* Undefined by C99 7.15.1.4p4 (va_start):
11505
         "If the parameter parmN is declared with the register storage
11506
         class, with a function or array type, or with a type that is
11507
         not compatible with the type that results after application of
11508
         the default argument promotions, the behavior is undefined."
11509
      */
11510
      else if (DECL_REGISTER (arg))
11511
        warning (0, "undefined behaviour when second parameter of "
11512
                 "%<va_start%> is declared with %<register%> storage");
11513
 
11514
      /* We want to verify the second parameter just once before the tree
11515
         optimizers are run and then avoid keeping it in the tree,
11516
         as otherwise we could warn even for correct code like:
11517
         void foo (int i, ...)
11518
         { va_list ap; i++; va_start (ap, i); va_end (ap); }  */
11519
      if (va_start_p)
11520
        CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11521
      else
11522
        CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11523
    }
11524
  return false;
11525
}
11526
 
11527
 
11528
/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11529
   ORIG may be null if this is a 2-argument call.  We don't attempt to
11530
   simplify calls with more than 3 arguments.
11531
 
11532
   Return NULL_TREE if no simplification was possible, otherwise return the
11533
   simplified form of the call as a tree.  If IGNORED is true, it means that
11534
   the caller does not use the returned value of the function.  */
11535
 
11536
static tree
11537
fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11538
                      tree orig, int ignored)
11539
{
11540
  tree call, retval;
11541
  const char *fmt_str = NULL;
11542
 
11543
  /* Verify the required arguments in the original call.  We deal with two
11544
     types of sprintf() calls: 'sprintf (str, fmt)' and
11545
     'sprintf (dest, "%s", orig)'.  */
11546
  if (!validate_arg (dest, POINTER_TYPE)
11547
      || !validate_arg (fmt, POINTER_TYPE))
11548
    return NULL_TREE;
11549
  if (orig && !validate_arg (orig, POINTER_TYPE))
11550
    return NULL_TREE;
11551
 
11552
  /* Check whether the format is a literal string constant.  */
11553
  fmt_str = c_getstr (fmt);
11554
  if (fmt_str == NULL)
11555
    return NULL_TREE;
11556
 
11557
  call = NULL_TREE;
11558
  retval = NULL_TREE;
11559
 
11560
  if (!init_target_chars ())
11561
    return NULL_TREE;
11562
 
11563
  /* If the format doesn't contain % args or %%, use strcpy.  */
11564
  if (strchr (fmt_str, target_percent) == NULL)
11565
    {
11566
      tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11567
 
11568
      if (!fn)
11569
        return NULL_TREE;
11570
 
11571
      /* Don't optimize sprintf (buf, "abc", ptr++).  */
11572
      if (orig)
11573
        return NULL_TREE;
11574
 
11575
      /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11576
         'format' is known to contain no % formats.  */
11577
      call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11578
      if (!ignored)
11579
        retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11580
    }
11581
 
11582
  /* If the format is "%s", use strcpy if the result isn't used.  */
11583
  else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11584
    {
11585
      tree fn;
11586
      fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11587
 
11588
      if (!fn)
11589
        return NULL_TREE;
11590
 
11591
      /* Don't crash on sprintf (str1, "%s").  */
11592
      if (!orig)
11593
        return NULL_TREE;
11594
 
11595
      /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2).  */
11596
      if (!ignored)
11597
        {
11598
          retval = c_strlen (orig, 1);
11599
          if (!retval || TREE_CODE (retval) != INTEGER_CST)
11600
            return NULL_TREE;
11601
        }
11602
      call = build_call_expr_loc (loc, fn, 2, dest, orig);
11603
    }
11604
 
11605
  if (call && retval)
11606
    {
11607
      retval = fold_convert_loc
11608
        (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11609
         retval);
11610
      return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11611
    }
11612
  else
11613
    return call;
11614
}
11615
 
11616
/* Expand a call EXP to __builtin_object_size.  */
11617
 
11618
rtx
11619
expand_builtin_object_size (tree exp)
11620
{
11621
  tree ost;
11622
  int object_size_type;
11623
  tree fndecl = get_callee_fndecl (exp);
11624
 
11625
  if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11626
    {
11627
      error ("%Kfirst argument of %D must be a pointer, second integer constant",
11628
             exp, fndecl);
11629
      expand_builtin_trap ();
11630
      return const0_rtx;
11631
    }
11632
 
11633
  ost = CALL_EXPR_ARG (exp, 1);
11634
  STRIP_NOPS (ost);
11635
 
11636
  if (TREE_CODE (ost) != INTEGER_CST
11637
      || tree_int_cst_sgn (ost) < 0
11638
      || compare_tree_int (ost, 3) > 0)
11639
    {
11640
      error ("%Klast argument of %D is not integer constant between 0 and 3",
11641
             exp, fndecl);
11642
      expand_builtin_trap ();
11643
      return const0_rtx;
11644
    }
11645
 
11646
  object_size_type = tree_low_cst (ost, 0);
11647
 
11648
  return object_size_type < 2 ? constm1_rtx : const0_rtx;
11649
}
11650
 
11651
/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11652
   FCODE is the BUILT_IN_* to use.
11653
   Return NULL_RTX if we failed; the caller should emit a normal call,
11654
   otherwise try to get the result in TARGET, if convenient (and in
11655
   mode MODE if that's convenient).  */
11656
 
11657
static rtx
11658
expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11659
                           enum built_in_function fcode)
11660
{
11661
  tree dest, src, len, size;
11662
 
11663
  if (!validate_arglist (exp,
11664
                         POINTER_TYPE,
11665
                         fcode == BUILT_IN_MEMSET_CHK
11666
                         ? INTEGER_TYPE : POINTER_TYPE,
11667
                         INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11668
    return NULL_RTX;
11669
 
11670
  dest = CALL_EXPR_ARG (exp, 0);
11671
  src = CALL_EXPR_ARG (exp, 1);
11672
  len = CALL_EXPR_ARG (exp, 2);
11673
  size = CALL_EXPR_ARG (exp, 3);
11674
 
11675
  if (! host_integerp (size, 1))
11676
    return NULL_RTX;
11677
 
11678
  if (host_integerp (len, 1) || integer_all_onesp (size))
11679
    {
11680
      tree fn;
11681
 
11682
      if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11683
        {
11684
          warning_at (tree_nonartificial_location (exp),
11685
                      0, "%Kcall to %D will always overflow destination buffer",
11686
                      exp, get_callee_fndecl (exp));
11687
          return NULL_RTX;
11688
        }
11689
 
11690
      fn = NULL_TREE;
11691
      /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11692
         mem{cpy,pcpy,move,set} is available.  */
11693
      switch (fcode)
11694
        {
11695
        case BUILT_IN_MEMCPY_CHK:
11696
          fn = built_in_decls[BUILT_IN_MEMCPY];
11697
          break;
11698
        case BUILT_IN_MEMPCPY_CHK:
11699
          fn = built_in_decls[BUILT_IN_MEMPCPY];
11700
          break;
11701
        case BUILT_IN_MEMMOVE_CHK:
11702
          fn = built_in_decls[BUILT_IN_MEMMOVE];
11703
          break;
11704
        case BUILT_IN_MEMSET_CHK:
11705
          fn = built_in_decls[BUILT_IN_MEMSET];
11706
          break;
11707
        default:
11708
          break;
11709
        }
11710
 
11711
      if (! fn)
11712
        return NULL_RTX;
11713
 
11714
      fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11715
      gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11716
      CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11717
      return expand_expr (fn, target, mode, EXPAND_NORMAL);
11718
    }
11719
  else if (fcode == BUILT_IN_MEMSET_CHK)
11720
    return NULL_RTX;
11721
  else
11722
    {
11723
      unsigned int dest_align
11724
        = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11725
 
11726
      /* If DEST is not a pointer type, call the normal function.  */
11727
      if (dest_align == 0)
11728
        return NULL_RTX;
11729
 
11730
      /* If SRC and DEST are the same (and not volatile), do nothing.  */
11731
      if (operand_equal_p (src, dest, 0))
11732
        {
11733
          tree expr;
11734
 
11735
          if (fcode != BUILT_IN_MEMPCPY_CHK)
11736
            {
11737
              /* Evaluate and ignore LEN in case it has side-effects.  */
11738
              expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11739
              return expand_expr (dest, target, mode, EXPAND_NORMAL);
11740
            }
11741
 
11742
          expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11743
          return expand_expr (expr, target, mode, EXPAND_NORMAL);
11744
        }
11745
 
11746
      /* __memmove_chk special case.  */
11747
      if (fcode == BUILT_IN_MEMMOVE_CHK)
11748
        {
11749
          unsigned int src_align
11750
            = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11751
 
11752
          if (src_align == 0)
11753
            return NULL_RTX;
11754
 
11755
          /* If src is categorized for a readonly section we can use
11756
             normal __memcpy_chk.  */
11757
          if (readonly_data_expr (src))
11758
            {
11759
              tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11760
              if (!fn)
11761
                return NULL_RTX;
11762
              fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11763
                                          dest, src, len, size);
11764
              gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11765
              CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11766
              return expand_expr (fn, target, mode, EXPAND_NORMAL);
11767
            }
11768
        }
11769
      return NULL_RTX;
11770
    }
11771
}
11772
 
11773
/* Emit warning if a buffer overflow is detected at compile time.  */
11774
 
11775
static void
11776
maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11777
{
11778
  int is_strlen = 0;
11779
  tree len, size;
11780
  location_t loc = tree_nonartificial_location (exp);
11781
 
11782
  switch (fcode)
11783
    {
11784
    case BUILT_IN_STRCPY_CHK:
11785
    case BUILT_IN_STPCPY_CHK:
11786
    /* For __strcat_chk the warning will be emitted only if overflowing
11787
       by at least strlen (dest) + 1 bytes.  */
11788
    case BUILT_IN_STRCAT_CHK:
11789
      len = CALL_EXPR_ARG (exp, 1);
11790
      size = CALL_EXPR_ARG (exp, 2);
11791
      is_strlen = 1;
11792
      break;
11793
    case BUILT_IN_STRNCAT_CHK:
11794
    case BUILT_IN_STRNCPY_CHK:
11795
      len = CALL_EXPR_ARG (exp, 2);
11796
      size = CALL_EXPR_ARG (exp, 3);
11797
      break;
11798
    case BUILT_IN_SNPRINTF_CHK:
11799
    case BUILT_IN_VSNPRINTF_CHK:
11800
      len = CALL_EXPR_ARG (exp, 1);
11801
      size = CALL_EXPR_ARG (exp, 3);
11802
      break;
11803
    default:
11804
      gcc_unreachable ();
11805
    }
11806
 
11807
  if (!len || !size)
11808
    return;
11809
 
11810
  if (! host_integerp (size, 1) || integer_all_onesp (size))
11811
    return;
11812
 
11813
  if (is_strlen)
11814
    {
11815
      len = c_strlen (len, 1);
11816
      if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11817
        return;
11818
    }
11819
  else if (fcode == BUILT_IN_STRNCAT_CHK)
11820
    {
11821
      tree src = CALL_EXPR_ARG (exp, 1);
11822
      if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11823
        return;
11824
      src = c_strlen (src, 1);
11825
      if (! src || ! host_integerp (src, 1))
11826
        {
11827
          warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11828
                      exp, get_callee_fndecl (exp));
11829
          return;
11830
        }
11831
      else if (tree_int_cst_lt (src, size))
11832
        return;
11833
    }
11834
  else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11835
    return;
11836
 
11837
  warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11838
              exp, get_callee_fndecl (exp));
11839
}
11840
 
11841
/* Emit warning if a buffer overflow is detected at compile time
11842
   in __sprintf_chk/__vsprintf_chk calls.  */
11843
 
11844
static void
11845
maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11846
{
11847
  tree size, len, fmt;
11848
  const char *fmt_str;
11849
  int nargs = call_expr_nargs (exp);
11850
 
11851
  /* Verify the required arguments in the original call.  */
11852
 
11853
  if (nargs < 4)
11854
    return;
11855
  size = CALL_EXPR_ARG (exp, 2);
11856
  fmt = CALL_EXPR_ARG (exp, 3);
11857
 
11858
  if (! host_integerp (size, 1) || integer_all_onesp (size))
11859
    return;
11860
 
11861
  /* Check whether the format is a literal string constant.  */
11862
  fmt_str = c_getstr (fmt);
11863
  if (fmt_str == NULL)
11864
    return;
11865
 
11866
  if (!init_target_chars ())
11867
    return;
11868
 
11869
  /* If the format doesn't contain % args or %%, we know its size.  */
11870
  if (strchr (fmt_str, target_percent) == 0)
11871
    len = build_int_cstu (size_type_node, strlen (fmt_str));
11872
  /* If the format is "%s" and first ... argument is a string literal,
11873
     we know it too.  */
11874
  else if (fcode == BUILT_IN_SPRINTF_CHK
11875
           && strcmp (fmt_str, target_percent_s) == 0)
11876
    {
11877
      tree arg;
11878
 
11879
      if (nargs < 5)
11880
        return;
11881
      arg = CALL_EXPR_ARG (exp, 4);
11882
      if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11883
        return;
11884
 
11885
      len = c_strlen (arg, 1);
11886
      if (!len || ! host_integerp (len, 1))
11887
        return;
11888
    }
11889
  else
11890
    return;
11891
 
11892
  if (! tree_int_cst_lt (len, size))
11893
    warning_at (tree_nonartificial_location (exp),
11894
                0, "%Kcall to %D will always overflow destination buffer",
11895
                exp, get_callee_fndecl (exp));
11896
}
11897
 
11898
/* Emit warning if a free is called with address of a variable.  */
11899
 
11900
static void
11901
maybe_emit_free_warning (tree exp)
11902
{
11903
  tree arg = CALL_EXPR_ARG (exp, 0);
11904
 
11905
  STRIP_NOPS (arg);
11906
  if (TREE_CODE (arg) != ADDR_EXPR)
11907
    return;
11908
 
11909
  arg = get_base_address (TREE_OPERAND (arg, 0));
11910
  if (arg == NULL || INDIRECT_REF_P (arg))
11911
    return;
11912
 
11913
  if (SSA_VAR_P (arg))
11914
    warning_at (tree_nonartificial_location (exp),
11915
                0, "%Kattempt to free a non-heap object %qD", exp, arg);
11916
  else
11917
    warning_at (tree_nonartificial_location (exp),
11918
                0, "%Kattempt to free a non-heap object", exp);
11919
}
11920
 
11921
/* Fold a call to __builtin_object_size with arguments PTR and OST,
11922
   if possible.  */
11923
 
11924
tree
11925
fold_builtin_object_size (tree ptr, tree ost)
11926
{
11927
  tree ret = NULL_TREE;
11928
  int object_size_type;
11929
 
11930
  if (!validate_arg (ptr, POINTER_TYPE)
11931
      || !validate_arg (ost, INTEGER_TYPE))
11932
    return NULL_TREE;
11933
 
11934
  STRIP_NOPS (ost);
11935
 
11936
  if (TREE_CODE (ost) != INTEGER_CST
11937
      || tree_int_cst_sgn (ost) < 0
11938
      || compare_tree_int (ost, 3) > 0)
11939
    return NULL_TREE;
11940
 
11941
  object_size_type = tree_low_cst (ost, 0);
11942
 
11943
  /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11944
     if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11945
     and (size_t) 0 for types 2 and 3.  */
11946
  if (TREE_SIDE_EFFECTS (ptr))
11947
    return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11948
 
11949
  if (TREE_CODE (ptr) == ADDR_EXPR)
11950
    ret = build_int_cstu (size_type_node,
11951
                          compute_builtin_object_size (ptr, object_size_type));
11952
 
11953
  else if (TREE_CODE (ptr) == SSA_NAME)
11954
    {
11955
      unsigned HOST_WIDE_INT bytes;
11956
 
11957
      /* If object size is not known yet, delay folding until
11958
       later.  Maybe subsequent passes will help determining
11959
       it.  */
11960
      bytes = compute_builtin_object_size (ptr, object_size_type);
11961
      if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11962
                                             ? -1 : 0))
11963
        ret = build_int_cstu (size_type_node, bytes);
11964
    }
11965
 
11966
  if (ret)
11967
    {
11968
      unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11969
      HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11970
      if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11971
        ret = NULL_TREE;
11972
    }
11973
 
11974
  return ret;
11975
}
11976
 
11977
/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11978
   DEST, SRC, LEN, and SIZE are the arguments to the call.
11979
   IGNORE is true, if return value can be ignored.  FCODE is the BUILT_IN_*
11980
   code of the builtin.  If MAXLEN is not NULL, it is maximum length
11981
   passed as third argument.  */
11982
 
11983
tree
11984
fold_builtin_memory_chk (location_t loc, tree fndecl,
11985
                         tree dest, tree src, tree len, tree size,
11986
                         tree maxlen, bool ignore,
11987
                         enum built_in_function fcode)
11988
{
11989
  tree fn;
11990
 
11991
  if (!validate_arg (dest, POINTER_TYPE)
11992
      || !validate_arg (src,
11993
                        (fcode == BUILT_IN_MEMSET_CHK
11994
                         ? INTEGER_TYPE : POINTER_TYPE))
11995
      || !validate_arg (len, INTEGER_TYPE)
11996
      || !validate_arg (size, INTEGER_TYPE))
11997
    return NULL_TREE;
11998
 
11999
  /* If SRC and DEST are the same (and not volatile), return DEST
12000
     (resp. DEST+LEN for __mempcpy_chk).  */
12001
  if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12002
    {
12003
      if (fcode != BUILT_IN_MEMPCPY_CHK)
12004
        return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12005
                                 dest, len);
12006
      else
12007
        {
12008
          tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12009
                                   dest, len);
12010
          return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12011
        }
12012
    }
12013
 
12014
  if (! host_integerp (size, 1))
12015
    return NULL_TREE;
12016
 
12017
  if (! integer_all_onesp (size))
12018
    {
12019
      if (! host_integerp (len, 1))
12020
        {
12021
          /* If LEN is not constant, try MAXLEN too.
12022
             For MAXLEN only allow optimizing into non-_ocs function
12023
             if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
12024
          if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12025
            {
12026
              if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12027
                {
12028
                  /* (void) __mempcpy_chk () can be optimized into
12029
                     (void) __memcpy_chk ().  */
12030
                  fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12031
                  if (!fn)
12032
                    return NULL_TREE;
12033
 
12034
                  return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12035
                }
12036
              return NULL_TREE;
12037
            }
12038
        }
12039
      else
12040
        maxlen = len;
12041
 
12042
      if (tree_int_cst_lt (size, maxlen))
12043
        return NULL_TREE;
12044
    }
12045
 
12046
  fn = NULL_TREE;
12047
  /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12048
     mem{cpy,pcpy,move,set} is available.  */
12049
  switch (fcode)
12050
    {
12051
    case BUILT_IN_MEMCPY_CHK:
12052
      fn = built_in_decls[BUILT_IN_MEMCPY];
12053
      break;
12054
    case BUILT_IN_MEMPCPY_CHK:
12055
      fn = built_in_decls[BUILT_IN_MEMPCPY];
12056
      break;
12057
    case BUILT_IN_MEMMOVE_CHK:
12058
      fn = built_in_decls[BUILT_IN_MEMMOVE];
12059
      break;
12060
    case BUILT_IN_MEMSET_CHK:
12061
      fn = built_in_decls[BUILT_IN_MEMSET];
12062
      break;
12063
    default:
12064
      break;
12065
    }
12066
 
12067
  if (!fn)
12068
    return NULL_TREE;
12069
 
12070
  return build_call_expr_loc (loc, fn, 3, dest, src, len);
12071
}
12072
 
12073
/* Fold a call to the __st[rp]cpy_chk builtin.
12074
   DEST, SRC, and SIZE are the arguments to the call.
12075
   IGNORE is true if return value can be ignored.  FCODE is the BUILT_IN_*
12076
   code of the builtin.  If MAXLEN is not NULL, it is maximum length of
12077
   strings passed as second argument.  */
12078
 
12079
tree
12080
fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12081
                         tree src, tree size,
12082
                         tree maxlen, bool ignore,
12083
                         enum built_in_function fcode)
12084
{
12085
  tree len, fn;
12086
 
12087
  if (!validate_arg (dest, POINTER_TYPE)
12088
      || !validate_arg (src, POINTER_TYPE)
12089
      || !validate_arg (size, INTEGER_TYPE))
12090
    return NULL_TREE;
12091
 
12092
  /* If SRC and DEST are the same (and not volatile), return DEST.  */
12093
  if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12094
    return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12095
 
12096
  if (! host_integerp (size, 1))
12097
    return NULL_TREE;
12098
 
12099
  if (! integer_all_onesp (size))
12100
    {
12101
      len = c_strlen (src, 1);
12102
      if (! len || ! host_integerp (len, 1))
12103
        {
12104
          /* If LEN is not constant, try MAXLEN too.
12105
             For MAXLEN only allow optimizing into non-_ocs function
12106
             if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
12107
          if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12108
            {
12109
              if (fcode == BUILT_IN_STPCPY_CHK)
12110
                {
12111
                  if (! ignore)
12112
                    return NULL_TREE;
12113
 
12114
                  /* If return value of __stpcpy_chk is ignored,
12115
                     optimize into __strcpy_chk.  */
12116
                  fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12117
                  if (!fn)
12118
                    return NULL_TREE;
12119
 
12120
                  return build_call_expr_loc (loc, fn, 3, dest, src, size);
12121
                }
12122
 
12123
              if (! len || TREE_SIDE_EFFECTS (len))
12124
                return NULL_TREE;
12125
 
12126
              /* If c_strlen returned something, but not a constant,
12127
                 transform __strcpy_chk into __memcpy_chk.  */
12128
              fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12129
              if (!fn)
12130
                return NULL_TREE;
12131
 
12132
              len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12133
              return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12134
                                       build_call_expr_loc (loc, fn, 4,
12135
                                                        dest, src, len, size));
12136
            }
12137
        }
12138
      else
12139
        maxlen = len;
12140
 
12141
      if (! tree_int_cst_lt (maxlen, size))
12142
        return NULL_TREE;
12143
    }
12144
 
12145
  /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available.  */
12146
  fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12147
                      ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12148
  if (!fn)
12149
    return NULL_TREE;
12150
 
12151
  return build_call_expr_loc (loc, fn, 2, dest, src);
12152
}
12153
 
12154
/* Fold a call to the __strncpy_chk builtin.  DEST, SRC, LEN, and SIZE
12155
   are the arguments to the call.  If MAXLEN is not NULL, it is maximum
12156
   length passed as third argument.  */
12157
 
12158
tree
12159
fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12160
                          tree len, tree size, tree maxlen)
12161
{
12162
  tree fn;
12163
 
12164
  if (!validate_arg (dest, POINTER_TYPE)
12165
      || !validate_arg (src, POINTER_TYPE)
12166
      || !validate_arg (len, INTEGER_TYPE)
12167
      || !validate_arg (size, INTEGER_TYPE))
12168
    return NULL_TREE;
12169
 
12170
  if (! host_integerp (size, 1))
12171
    return NULL_TREE;
12172
 
12173
  if (! integer_all_onesp (size))
12174
    {
12175
      if (! host_integerp (len, 1))
12176
        {
12177
          /* If LEN is not constant, try MAXLEN too.
12178
             For MAXLEN only allow optimizing into non-_ocs function
12179
             if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
12180
          if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12181
            return NULL_TREE;
12182
        }
12183
      else
12184
        maxlen = len;
12185
 
12186
      if (tree_int_cst_lt (size, maxlen))
12187
        return NULL_TREE;
12188
    }
12189
 
12190
  /* If __builtin_strncpy_chk is used, assume strncpy is available.  */
12191
  fn = built_in_decls[BUILT_IN_STRNCPY];
12192
  if (!fn)
12193
    return NULL_TREE;
12194
 
12195
  return build_call_expr_loc (loc, fn, 3, dest, src, len);
12196
}
12197
 
12198
/* Fold a call to the __strcat_chk builtin FNDECL.  DEST, SRC, and SIZE
12199
   are the arguments to the call.  */
12200
 
12201
static tree
12202
fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12203
                         tree src, tree size)
12204
{
12205
  tree fn;
12206
  const char *p;
12207
 
12208
  if (!validate_arg (dest, POINTER_TYPE)
12209
      || !validate_arg (src, POINTER_TYPE)
12210
      || !validate_arg (size, INTEGER_TYPE))
12211
    return NULL_TREE;
12212
 
12213
  p = c_getstr (src);
12214
  /* If the SRC parameter is "", return DEST.  */
12215
  if (p && *p == '\0')
12216
    return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12217
 
12218
  if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12219
    return NULL_TREE;
12220
 
12221
  /* If __builtin_strcat_chk is used, assume strcat is available.  */
12222
  fn = built_in_decls[BUILT_IN_STRCAT];
12223
  if (!fn)
12224
    return NULL_TREE;
12225
 
12226
  return build_call_expr_loc (loc, fn, 2, dest, src);
12227
}
12228
 
12229
/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12230
   LEN, and SIZE.  */
12231
 
12232
static tree
12233
fold_builtin_strncat_chk (location_t loc, tree fndecl,
12234
                          tree dest, tree src, tree len, tree size)
12235
{
12236
  tree fn;
12237
  const char *p;
12238
 
12239
  if (!validate_arg (dest, POINTER_TYPE)
12240
      || !validate_arg (src, POINTER_TYPE)
12241
      || !validate_arg (size, INTEGER_TYPE)
12242
      || !validate_arg (size, INTEGER_TYPE))
12243
    return NULL_TREE;
12244
 
12245
  p = c_getstr (src);
12246
  /* If the SRC parameter is "" or if LEN is 0, return DEST.  */
12247
  if (p && *p == '\0')
12248
    return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12249
  else if (integer_zerop (len))
12250
    return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12251
 
12252
  if (! host_integerp (size, 1))
12253
    return NULL_TREE;
12254
 
12255
  if (! integer_all_onesp (size))
12256
    {
12257
      tree src_len = c_strlen (src, 1);
12258
      if (src_len
12259
          && host_integerp (src_len, 1)
12260
          && host_integerp (len, 1)
12261
          && ! tree_int_cst_lt (len, src_len))
12262
        {
12263
          /* If LEN >= strlen (SRC), optimize into __strcat_chk.  */
12264
          fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12265
          if (!fn)
12266
            return NULL_TREE;
12267
 
12268
          return build_call_expr_loc (loc, fn, 3, dest, src, size);
12269
        }
12270
      return NULL_TREE;
12271
    }
12272
 
12273
  /* If __builtin_strncat_chk is used, assume strncat is available.  */
12274
  fn = built_in_decls[BUILT_IN_STRNCAT];
12275
  if (!fn)
12276
    return NULL_TREE;
12277
 
12278
  return build_call_expr_loc (loc, fn, 3, dest, src, len);
12279
}
12280
 
12281
/* Fold a call EXP to __{,v}sprintf_chk.  Return NULL_TREE if
12282
   a normal call should be emitted rather than expanding the function
12283
   inline.  FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK.  */
12284
 
12285
static tree
12286
fold_builtin_sprintf_chk (location_t loc, tree exp,
12287
                          enum built_in_function fcode)
12288
{
12289
  tree dest, size, len, fn, fmt, flag;
12290
  const char *fmt_str;
12291
  int nargs = call_expr_nargs (exp);
12292
 
12293
  /* Verify the required arguments in the original call.  */
12294
  if (nargs < 4)
12295
    return NULL_TREE;
12296
  dest = CALL_EXPR_ARG (exp, 0);
12297
  if (!validate_arg (dest, POINTER_TYPE))
12298
    return NULL_TREE;
12299
  flag = CALL_EXPR_ARG (exp, 1);
12300
  if (!validate_arg (flag, INTEGER_TYPE))
12301
    return NULL_TREE;
12302
  size = CALL_EXPR_ARG (exp, 2);
12303
  if (!validate_arg (size, INTEGER_TYPE))
12304
    return NULL_TREE;
12305
  fmt = CALL_EXPR_ARG (exp, 3);
12306
  if (!validate_arg (fmt, POINTER_TYPE))
12307
    return NULL_TREE;
12308
 
12309
  if (! host_integerp (size, 1))
12310
    return NULL_TREE;
12311
 
12312
  len = NULL_TREE;
12313
 
12314
  if (!init_target_chars ())
12315
    return NULL_TREE;
12316
 
12317
  /* Check whether the format is a literal string constant.  */
12318
  fmt_str = c_getstr (fmt);
12319
  if (fmt_str != NULL)
12320
    {
12321
      /* If the format doesn't contain % args or %%, we know the size.  */
12322
      if (strchr (fmt_str, target_percent) == 0)
12323
        {
12324
          if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12325
            len = build_int_cstu (size_type_node, strlen (fmt_str));
12326
        }
12327
      /* If the format is "%s" and first ... argument is a string literal,
12328
         we know the size too.  */
12329
      else if (fcode == BUILT_IN_SPRINTF_CHK
12330
               && strcmp (fmt_str, target_percent_s) == 0)
12331
        {
12332
          tree arg;
12333
 
12334
          if (nargs == 5)
12335
            {
12336
              arg = CALL_EXPR_ARG (exp, 4);
12337
              if (validate_arg (arg, POINTER_TYPE))
12338
                {
12339
                  len = c_strlen (arg, 1);
12340
                  if (! len || ! host_integerp (len, 1))
12341
                    len = NULL_TREE;
12342
                }
12343
            }
12344
        }
12345
    }
12346
 
12347
  if (! integer_all_onesp (size))
12348
    {
12349
      if (! len || ! tree_int_cst_lt (len, size))
12350
        return NULL_TREE;
12351
    }
12352
 
12353
  /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12354
     or if format doesn't contain % chars or is "%s".  */
12355
  if (! integer_zerop (flag))
12356
    {
12357
      if (fmt_str == NULL)
12358
        return NULL_TREE;
12359
      if (strchr (fmt_str, target_percent) != NULL
12360
          && strcmp (fmt_str, target_percent_s))
12361
        return NULL_TREE;
12362
    }
12363
 
12364
  /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available.  */
12365
  fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12366
                      ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12367
  if (!fn)
12368
    return NULL_TREE;
12369
 
12370
  return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12371
}
12372
 
12373
/* Fold a call EXP to {,v}snprintf.  Return NULL_TREE if
12374
   a normal call should be emitted rather than expanding the function
12375
   inline.  FCODE is either BUILT_IN_SNPRINTF_CHK or
12376
   BUILT_IN_VSNPRINTF_CHK.  If MAXLEN is not NULL, it is maximum length
12377
   passed as second argument.  */
12378
 
12379
tree
12380
fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12381
                           enum built_in_function fcode)
12382
{
12383
  tree dest, size, len, fn, fmt, flag;
12384
  const char *fmt_str;
12385
 
12386
  /* Verify the required arguments in the original call.  */
12387
  if (call_expr_nargs (exp) < 5)
12388
    return NULL_TREE;
12389
  dest = CALL_EXPR_ARG (exp, 0);
12390
  if (!validate_arg (dest, POINTER_TYPE))
12391
    return NULL_TREE;
12392
  len = CALL_EXPR_ARG (exp, 1);
12393
  if (!validate_arg (len, INTEGER_TYPE))
12394
    return NULL_TREE;
12395
  flag = CALL_EXPR_ARG (exp, 2);
12396
  if (!validate_arg (flag, INTEGER_TYPE))
12397
    return NULL_TREE;
12398
  size = CALL_EXPR_ARG (exp, 3);
12399
  if (!validate_arg (size, INTEGER_TYPE))
12400
    return NULL_TREE;
12401
  fmt = CALL_EXPR_ARG (exp, 4);
12402
  if (!validate_arg (fmt, POINTER_TYPE))
12403
    return NULL_TREE;
12404
 
12405
  if (! host_integerp (size, 1))
12406
    return NULL_TREE;
12407
 
12408
  if (! integer_all_onesp (size))
12409
    {
12410
      if (! host_integerp (len, 1))
12411
        {
12412
          /* If LEN is not constant, try MAXLEN too.
12413
             For MAXLEN only allow optimizing into non-_ocs function
12414
             if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
12415
          if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12416
            return NULL_TREE;
12417
        }
12418
      else
12419
        maxlen = len;
12420
 
12421
      if (tree_int_cst_lt (size, maxlen))
12422
        return NULL_TREE;
12423
    }
12424
 
12425
  if (!init_target_chars ())
12426
    return NULL_TREE;
12427
 
12428
  /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12429
     or if format doesn't contain % chars or is "%s".  */
12430
  if (! integer_zerop (flag))
12431
    {
12432
      fmt_str = c_getstr (fmt);
12433
      if (fmt_str == NULL)
12434
        return NULL_TREE;
12435
      if (strchr (fmt_str, target_percent) != NULL
12436
          && strcmp (fmt_str, target_percent_s))
12437
        return NULL_TREE;
12438
    }
12439
 
12440
  /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12441
     available.  */
12442
  fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12443
                      ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12444
  if (!fn)
12445
    return NULL_TREE;
12446
 
12447
  return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12448
}
12449
 
12450
/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12451
   FMT and ARG are the arguments to the call; we don't fold cases with
12452
   more than 2 arguments, and ARG may be null if this is a 1-argument case.
12453
 
12454
   Return NULL_TREE if no simplification was possible, otherwise return the
12455
   simplified form of the call as a tree.  FCODE is the BUILT_IN_*
12456
   code of the function to be simplified.  */
12457
 
12458
static tree
12459
fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12460
                     tree arg, bool ignore,
12461
                     enum built_in_function fcode)
12462
{
12463
  tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12464
  const char *fmt_str = NULL;
12465
 
12466
  /* If the return value is used, don't do the transformation.  */
12467
  if (! ignore)
12468
    return NULL_TREE;
12469
 
12470
  /* Verify the required arguments in the original call.  */
12471
  if (!validate_arg (fmt, POINTER_TYPE))
12472
    return NULL_TREE;
12473
 
12474
  /* Check whether the format is a literal string constant.  */
12475
  fmt_str = c_getstr (fmt);
12476
  if (fmt_str == NULL)
12477
    return NULL_TREE;
12478
 
12479
  if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12480
    {
12481
      /* If we're using an unlocked function, assume the other
12482
         unlocked functions exist explicitly.  */
12483
      fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12484
      fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12485
    }
12486
  else
12487
    {
12488
      fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12489
      fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12490
    }
12491
 
12492
  if (!init_target_chars ())
12493
    return NULL_TREE;
12494
 
12495
  if (strcmp (fmt_str, target_percent_s) == 0
12496
      || strchr (fmt_str, target_percent) == NULL)
12497
    {
12498
      const char *str;
12499
 
12500
      if (strcmp (fmt_str, target_percent_s) == 0)
12501
        {
12502
          if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12503
            return NULL_TREE;
12504
 
12505
          if (!arg || !validate_arg (arg, POINTER_TYPE))
12506
            return NULL_TREE;
12507
 
12508
          str = c_getstr (arg);
12509
          if (str == NULL)
12510
            return NULL_TREE;
12511
        }
12512
      else
12513
        {
12514
          /* The format specifier doesn't contain any '%' characters.  */
12515
          if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12516
              && arg)
12517
            return NULL_TREE;
12518
          str = fmt_str;
12519
        }
12520
 
12521
      /* If the string was "", printf does nothing.  */
12522
      if (str[0] == '\0')
12523
        return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12524
 
12525
      /* If the string has length of 1, call putchar.  */
12526
      if (str[1] == '\0')
12527
        {
12528
          /* Given printf("c"), (where c is any one character,)
12529
             convert "c"[0] to an int and pass that to the replacement
12530
             function.  */
12531
          newarg = build_int_cst (NULL_TREE, str[0]);
12532
          if (fn_putchar)
12533
            call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12534
        }
12535
      else
12536
        {
12537
          /* If the string was "string\n", call puts("string").  */
12538
          size_t len = strlen (str);
12539
          if ((unsigned char)str[len - 1] == target_newline)
12540
            {
12541
              /* Create a NUL-terminated string that's one char shorter
12542
                 than the original, stripping off the trailing '\n'.  */
12543
              char *newstr = XALLOCAVEC (char, len);
12544
              memcpy (newstr, str, len - 1);
12545
              newstr[len - 1] = 0;
12546
 
12547
              newarg = build_string_literal (len, newstr);
12548
              if (fn_puts)
12549
                call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12550
            }
12551
          else
12552
            /* We'd like to arrange to call fputs(string,stdout) here,
12553
               but we need stdout and don't have a way to get it yet.  */
12554
            return NULL_TREE;
12555
        }
12556
    }
12557
 
12558
  /* The other optimizations can be done only on the non-va_list variants.  */
12559
  else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12560
    return NULL_TREE;
12561
 
12562
  /* If the format specifier was "%s\n", call __builtin_puts(arg).  */
12563
  else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12564
    {
12565
      if (!arg || !validate_arg (arg, POINTER_TYPE))
12566
        return NULL_TREE;
12567
      if (fn_puts)
12568
        call = build_call_expr_loc (loc, fn_puts, 1, arg);
12569
    }
12570
 
12571
  /* If the format specifier was "%c", call __builtin_putchar(arg).  */
12572
  else if (strcmp (fmt_str, target_percent_c) == 0)
12573
    {
12574
      if (!arg || !validate_arg (arg, INTEGER_TYPE))
12575
        return NULL_TREE;
12576
      if (fn_putchar)
12577
        call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12578
    }
12579
 
12580
  if (!call)
12581
    return NULL_TREE;
12582
 
12583
  return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12584
}
12585
 
12586
/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12587
   FP, FMT, and ARG are the arguments to the call.  We don't fold calls with
12588
   more than 3 arguments, and ARG may be null in the 2-argument case.
12589
 
12590
   Return NULL_TREE if no simplification was possible, otherwise return the
12591
   simplified form of the call as a tree.  FCODE is the BUILT_IN_*
12592
   code of the function to be simplified.  */
12593
 
12594
static tree
12595
fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12596
                      tree fmt, tree arg, bool ignore,
12597
                      enum built_in_function fcode)
12598
{
12599
  tree fn_fputc, fn_fputs, call = NULL_TREE;
12600
  const char *fmt_str = NULL;
12601
 
12602
  /* If the return value is used, don't do the transformation.  */
12603
  if (! ignore)
12604
    return NULL_TREE;
12605
 
12606
  /* Verify the required arguments in the original call.  */
12607
  if (!validate_arg (fp, POINTER_TYPE))
12608
    return NULL_TREE;
12609
  if (!validate_arg (fmt, POINTER_TYPE))
12610
    return NULL_TREE;
12611
 
12612
  /* Check whether the format is a literal string constant.  */
12613
  fmt_str = c_getstr (fmt);
12614
  if (fmt_str == NULL)
12615
    return NULL_TREE;
12616
 
12617
  if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12618
    {
12619
      /* If we're using an unlocked function, assume the other
12620
         unlocked functions exist explicitly.  */
12621
      fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12622
      fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12623
    }
12624
  else
12625
    {
12626
      fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12627
      fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12628
    }
12629
 
12630
  if (!init_target_chars ())
12631
    return NULL_TREE;
12632
 
12633
  /* If the format doesn't contain % args or %%, use strcpy.  */
12634
  if (strchr (fmt_str, target_percent) == NULL)
12635
    {
12636
      if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12637
          && arg)
12638
        return NULL_TREE;
12639
 
12640
      /* If the format specifier was "", fprintf does nothing.  */
12641
      if (fmt_str[0] == '\0')
12642
        {
12643
          /* If FP has side-effects, just wait until gimplification is
12644
             done.  */
12645
          if (TREE_SIDE_EFFECTS (fp))
12646
            return NULL_TREE;
12647
 
12648
          return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12649
        }
12650
 
12651
      /* When "string" doesn't contain %, replace all cases of
12652
         fprintf (fp, string) with fputs (string, fp).  The fputs
12653
         builtin will take care of special cases like length == 1.  */
12654
      if (fn_fputs)
12655
        call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12656
    }
12657
 
12658
  /* The other optimizations can be done only on the non-va_list variants.  */
12659
  else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12660
    return NULL_TREE;
12661
 
12662
  /* If the format specifier was "%s", call __builtin_fputs (arg, fp).  */
12663
  else if (strcmp (fmt_str, target_percent_s) == 0)
12664
    {
12665
      if (!arg || !validate_arg (arg, POINTER_TYPE))
12666
        return NULL_TREE;
12667
      if (fn_fputs)
12668
        call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12669
    }
12670
 
12671
  /* If the format specifier was "%c", call __builtin_fputc (arg, fp).  */
12672
  else if (strcmp (fmt_str, target_percent_c) == 0)
12673
    {
12674
      if (!arg || !validate_arg (arg, INTEGER_TYPE))
12675
        return NULL_TREE;
12676
      if (fn_fputc)
12677
        call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12678
    }
12679
 
12680
  if (!call)
12681
    return NULL_TREE;
12682
  return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12683
}
12684
 
12685
/* Initialize format string characters in the target charset.  */
12686
 
12687
static bool
12688
init_target_chars (void)
12689
{
12690
  static bool init;
12691
  if (!init)
12692
    {
12693
      target_newline = lang_hooks.to_target_charset ('\n');
12694
      target_percent = lang_hooks.to_target_charset ('%');
12695
      target_c = lang_hooks.to_target_charset ('c');
12696
      target_s = lang_hooks.to_target_charset ('s');
12697
      if (target_newline == 0 || target_percent == 0 || target_c == 0
12698
          || target_s == 0)
12699
        return false;
12700
 
12701
      target_percent_c[0] = target_percent;
12702
      target_percent_c[1] = target_c;
12703
      target_percent_c[2] = '\0';
12704
 
12705
      target_percent_s[0] = target_percent;
12706
      target_percent_s[1] = target_s;
12707
      target_percent_s[2] = '\0';
12708
 
12709
      target_percent_s_newline[0] = target_percent;
12710
      target_percent_s_newline[1] = target_s;
12711
      target_percent_s_newline[2] = target_newline;
12712
      target_percent_s_newline[3] = '\0';
12713
 
12714
      init = true;
12715
    }
12716
  return true;
12717
}
12718
 
12719
/* Helper function for do_mpfr_arg*().  Ensure M is a normal number
12720
   and no overflow/underflow occurred.  INEXACT is true if M was not
12721
   exactly calculated.  TYPE is the tree type for the result.  This
12722
   function assumes that you cleared the MPFR flags and then
12723
   calculated M to see if anything subsequently set a flag prior to
12724
   entering this function.  Return NULL_TREE if any checks fail.  */
12725
 
12726
static tree
12727
do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12728
{
12729
  /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12730
     overflow/underflow occurred.  If -frounding-math, proceed iff the
12731
     result of calling FUNC was exact.  */
12732
  if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12733
      && (!flag_rounding_math || !inexact))
12734
    {
12735
      REAL_VALUE_TYPE rr;
12736
 
12737
      real_from_mpfr (&rr, m, type, GMP_RNDN);
12738
      /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12739
         check for overflow/underflow.  If the REAL_VALUE_TYPE is zero
12740
         but the mpft_t is not, then we underflowed in the
12741
         conversion.  */
12742
      if (real_isfinite (&rr)
12743
          && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12744
        {
12745
          REAL_VALUE_TYPE rmode;
12746
 
12747
          real_convert (&rmode, TYPE_MODE (type), &rr);
12748
          /* Proceed iff the specified mode can hold the value.  */
12749
          if (real_identical (&rmode, &rr))
12750
            return build_real (type, rmode);
12751
        }
12752
    }
12753
  return NULL_TREE;
12754
}
12755
 
12756
/* Helper function for do_mpc_arg*().  Ensure M is a normal complex
12757
   number and no overflow/underflow occurred.  INEXACT is true if M
12758
   was not exactly calculated.  TYPE is the tree type for the result.
12759
   This function assumes that you cleared the MPFR flags and then
12760
   calculated M to see if anything subsequently set a flag prior to
12761
   entering this function.  Return NULL_TREE if any checks fail, if
12762
   FORCE_CONVERT is true, then bypass the checks.  */
12763
 
12764
static tree
12765
do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12766
{
12767
  /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12768
     overflow/underflow occurred.  If -frounding-math, proceed iff the
12769
     result of calling FUNC was exact.  */
12770
  if (force_convert
12771
      || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12772
          && !mpfr_overflow_p () && !mpfr_underflow_p ()
12773
          && (!flag_rounding_math || !inexact)))
12774
    {
12775
      REAL_VALUE_TYPE re, im;
12776
 
12777
      real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12778
      real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12779
      /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12780
         check for overflow/underflow.  If the REAL_VALUE_TYPE is zero
12781
         but the mpft_t is not, then we underflowed in the
12782
         conversion.  */
12783
      if (force_convert
12784
          || (real_isfinite (&re) && real_isfinite (&im)
12785
              && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12786
              && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12787
        {
12788
          REAL_VALUE_TYPE re_mode, im_mode;
12789
 
12790
          real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12791
          real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12792
          /* Proceed iff the specified mode can hold the value.  */
12793
          if (force_convert
12794
              || (real_identical (&re_mode, &re)
12795
                  && real_identical (&im_mode, &im)))
12796
            return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12797
                                  build_real (TREE_TYPE (type), im_mode));
12798
        }
12799
    }
12800
  return NULL_TREE;
12801
}
12802
 
12803
/* If argument ARG is a REAL_CST, call the one-argument mpfr function
12804
   FUNC on it and return the resulting value as a tree with type TYPE.
12805
   If MIN and/or MAX are not NULL, then the supplied ARG must be
12806
   within those bounds.  If INCLUSIVE is true, then MIN/MAX are
12807
   acceptable values, otherwise they are not.  The mpfr precision is
12808
   set to the precision of TYPE.  We assume that function FUNC returns
12809
   zero if the result could be calculated exactly within the requested
12810
   precision.  */
12811
 
12812
static tree
12813
do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12814
              const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12815
              bool inclusive)
12816
{
12817
  tree result = NULL_TREE;
12818
 
12819
  STRIP_NOPS (arg);
12820
 
12821
  /* To proceed, MPFR must exactly represent the target floating point
12822
     format, which only happens when the target base equals two.  */
12823
  if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12824
      && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12825
    {
12826
      const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12827
 
12828
      if (real_isfinite (ra)
12829
          && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12830
          && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12831
        {
12832
          const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12833
          const int prec = fmt->p;
12834
          const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12835
          int inexact;
12836
          mpfr_t m;
12837
 
12838
          mpfr_init2 (m, prec);
12839
          mpfr_from_real (m, ra, GMP_RNDN);
12840
          mpfr_clear_flags ();
12841
          inexact = func (m, m, rnd);
12842
          result = do_mpfr_ckconv (m, type, inexact);
12843
          mpfr_clear (m);
12844
        }
12845
    }
12846
 
12847
  return result;
12848
}
12849
 
12850
/* If argument ARG is a REAL_CST, call the two-argument mpfr function
12851
   FUNC on it and return the resulting value as a tree with type TYPE.
12852
   The mpfr precision is set to the precision of TYPE.  We assume that
12853
   function FUNC returns zero if the result could be calculated
12854
   exactly within the requested precision.  */
12855
 
12856
static tree
12857
do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12858
              int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12859
{
12860
  tree result = NULL_TREE;
12861
 
12862
  STRIP_NOPS (arg1);
12863
  STRIP_NOPS (arg2);
12864
 
12865
  /* To proceed, MPFR must exactly represent the target floating point
12866
     format, which only happens when the target base equals two.  */
12867
  if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12868
      && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12869
      && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12870
    {
12871
      const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12872
      const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12873
 
12874
      if (real_isfinite (ra1) && real_isfinite (ra2))
12875
        {
12876
          const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12877
          const int prec = fmt->p;
12878
          const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12879
          int inexact;
12880
          mpfr_t m1, m2;
12881
 
12882
          mpfr_inits2 (prec, m1, m2, NULL);
12883
          mpfr_from_real (m1, ra1, GMP_RNDN);
12884
          mpfr_from_real (m2, ra2, GMP_RNDN);
12885
          mpfr_clear_flags ();
12886
          inexact = func (m1, m1, m2, rnd);
12887
          result = do_mpfr_ckconv (m1, type, inexact);
12888
          mpfr_clears (m1, m2, NULL);
12889
        }
12890
    }
12891
 
12892
  return result;
12893
}
12894
 
12895
/* If argument ARG is a REAL_CST, call the three-argument mpfr function
12896
   FUNC on it and return the resulting value as a tree with type TYPE.
12897
   The mpfr precision is set to the precision of TYPE.  We assume that
12898
   function FUNC returns zero if the result could be calculated
12899
   exactly within the requested precision.  */
12900
 
12901
static tree
12902
do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12903
              int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12904
{
12905
  tree result = NULL_TREE;
12906
 
12907
  STRIP_NOPS (arg1);
12908
  STRIP_NOPS (arg2);
12909
  STRIP_NOPS (arg3);
12910
 
12911
  /* To proceed, MPFR must exactly represent the target floating point
12912
     format, which only happens when the target base equals two.  */
12913
  if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12914
      && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12915
      && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12916
      && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12917
    {
12918
      const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12919
      const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12920
      const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12921
 
12922
      if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12923
        {
12924
          const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12925
          const int prec = fmt->p;
12926
          const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12927
          int inexact;
12928
          mpfr_t m1, m2, m3;
12929
 
12930
          mpfr_inits2 (prec, m1, m2, m3, NULL);
12931
          mpfr_from_real (m1, ra1, GMP_RNDN);
12932
          mpfr_from_real (m2, ra2, GMP_RNDN);
12933
          mpfr_from_real (m3, ra3, GMP_RNDN);
12934
          mpfr_clear_flags ();
12935
          inexact = func (m1, m1, m2, m3, rnd);
12936
          result = do_mpfr_ckconv (m1, type, inexact);
12937
          mpfr_clears (m1, m2, m3, NULL);
12938
        }
12939
    }
12940
 
12941
  return result;
12942
}
12943
 
12944
/* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12945
   the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12946
   If ARG_SINP and ARG_COSP are NULL then the result is returned
12947
   as a complex value.
12948
   The type is taken from the type of ARG and is used for setting the
12949
   precision of the calculation and results.  */
12950
 
12951
static tree
12952
do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12953
{
12954
  tree const type = TREE_TYPE (arg);
12955
  tree result = NULL_TREE;
12956
 
12957
  STRIP_NOPS (arg);
12958
 
12959
  /* To proceed, MPFR must exactly represent the target floating point
12960
     format, which only happens when the target base equals two.  */
12961
  if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12962
      && TREE_CODE (arg) == REAL_CST
12963
      && !TREE_OVERFLOW (arg))
12964
    {
12965
      const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12966
 
12967
      if (real_isfinite (ra))
12968
        {
12969
          const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12970
          const int prec = fmt->p;
12971
          const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12972
          tree result_s, result_c;
12973
          int inexact;
12974
          mpfr_t m, ms, mc;
12975
 
12976
          mpfr_inits2 (prec, m, ms, mc, NULL);
12977
          mpfr_from_real (m, ra, GMP_RNDN);
12978
          mpfr_clear_flags ();
12979
          inexact = mpfr_sin_cos (ms, mc, m, rnd);
12980
          result_s = do_mpfr_ckconv (ms, type, inexact);
12981
          result_c = do_mpfr_ckconv (mc, type, inexact);
12982
          mpfr_clears (m, ms, mc, NULL);
12983
          if (result_s && result_c)
12984
            {
12985
              /* If we are to return in a complex value do so.  */
12986
              if (!arg_sinp && !arg_cosp)
12987
                return build_complex (build_complex_type (type),
12988
                                      result_c, result_s);
12989
 
12990
              /* Dereference the sin/cos pointer arguments.  */
12991
              arg_sinp = build_fold_indirect_ref (arg_sinp);
12992
              arg_cosp = build_fold_indirect_ref (arg_cosp);
12993
              /* Proceed if valid pointer type were passed in.  */
12994
              if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12995
                  && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12996
                {
12997
                  /* Set the values. */
12998
                  result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12999
                                          result_s);
13000
                  TREE_SIDE_EFFECTS (result_s) = 1;
13001
                  result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13002
                                          result_c);
13003
                  TREE_SIDE_EFFECTS (result_c) = 1;
13004
                  /* Combine the assignments into a compound expr.  */
13005
                  result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13006
                                                    result_s, result_c));
13007
                }
13008
            }
13009
        }
13010
    }
13011
  return result;
13012
}
13013
 
13014
/* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13015
   two-argument mpfr order N Bessel function FUNC on them and return
13016
   the resulting value as a tree with type TYPE.  The mpfr precision
13017
   is set to the precision of TYPE.  We assume that function FUNC
13018
   returns zero if the result could be calculated exactly within the
13019
   requested precision.  */
13020
static tree
13021
do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13022
                  int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13023
                  const REAL_VALUE_TYPE *min, bool inclusive)
13024
{
13025
  tree result = NULL_TREE;
13026
 
13027
  STRIP_NOPS (arg1);
13028
  STRIP_NOPS (arg2);
13029
 
13030
  /* To proceed, MPFR must exactly represent the target floating point
13031
     format, which only happens when the target base equals two.  */
13032
  if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13033
      && host_integerp (arg1, 0)
13034
      && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13035
    {
13036
      const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13037
      const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13038
 
13039
      if (n == (long)n
13040
          && real_isfinite (ra)
13041
          && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13042
        {
13043
          const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13044
          const int prec = fmt->p;
13045
          const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13046
          int inexact;
13047
          mpfr_t m;
13048
 
13049
          mpfr_init2 (m, prec);
13050
          mpfr_from_real (m, ra, GMP_RNDN);
13051
          mpfr_clear_flags ();
13052
          inexact = func (m, n, m, rnd);
13053
          result = do_mpfr_ckconv (m, type, inexact);
13054
          mpfr_clear (m);
13055
        }
13056
    }
13057
 
13058
  return result;
13059
}
13060
 
13061
/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13062
   the pointer *(ARG_QUO) and return the result.  The type is taken
13063
   from the type of ARG0 and is used for setting the precision of the
13064
   calculation and results.  */
13065
 
13066
static tree
13067
do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13068
{
13069
  tree const type = TREE_TYPE (arg0);
13070
  tree result = NULL_TREE;
13071
 
13072
  STRIP_NOPS (arg0);
13073
  STRIP_NOPS (arg1);
13074
 
13075
  /* To proceed, MPFR must exactly represent the target floating point
13076
     format, which only happens when the target base equals two.  */
13077
  if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13078
      && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13079
      && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13080
    {
13081
      const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13082
      const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13083
 
13084
      if (real_isfinite (ra0) && real_isfinite (ra1))
13085
        {
13086
          const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13087
          const int prec = fmt->p;
13088
          const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13089
          tree result_rem;
13090
          long integer_quo;
13091
          mpfr_t m0, m1;
13092
 
13093
          mpfr_inits2 (prec, m0, m1, NULL);
13094
          mpfr_from_real (m0, ra0, GMP_RNDN);
13095
          mpfr_from_real (m1, ra1, GMP_RNDN);
13096
          mpfr_clear_flags ();
13097
          mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13098
          /* Remquo is independent of the rounding mode, so pass
13099
             inexact=0 to do_mpfr_ckconv().  */
13100
          result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13101
          mpfr_clears (m0, m1, NULL);
13102
          if (result_rem)
13103
            {
13104
              /* MPFR calculates quo in the host's long so it may
13105
                 return more bits in quo than the target int can hold
13106
                 if sizeof(host long) > sizeof(target int).  This can
13107
                 happen even for native compilers in LP64 mode.  In
13108
                 these cases, modulo the quo value with the largest
13109
                 number that the target int can hold while leaving one
13110
                 bit for the sign.  */
13111
              if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13112
                integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13113
 
13114
              /* Dereference the quo pointer argument.  */
13115
              arg_quo = build_fold_indirect_ref (arg_quo);
13116
              /* Proceed iff a valid pointer type was passed in.  */
13117
              if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13118
                {
13119
                  /* Set the value. */
13120
                  tree result_quo = fold_build2 (MODIFY_EXPR,
13121
                                                 TREE_TYPE (arg_quo), arg_quo,
13122
                                                 build_int_cst (NULL, integer_quo));
13123
                  TREE_SIDE_EFFECTS (result_quo) = 1;
13124
                  /* Combine the quo assignment with the rem.  */
13125
                  result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13126
                                                    result_quo, result_rem));
13127
                }
13128
            }
13129
        }
13130
    }
13131
  return result;
13132
}
13133
 
13134
/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13135
   resulting value as a tree with type TYPE.  The mpfr precision is
13136
   set to the precision of TYPE.  We assume that this mpfr function
13137
   returns zero if the result could be calculated exactly within the
13138
   requested precision.  In addition, the integer pointer represented
13139
   by ARG_SG will be dereferenced and set to the appropriate signgam
13140
   (-1,1) value.  */
13141
 
13142
static tree
13143
do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13144
{
13145
  tree result = NULL_TREE;
13146
 
13147
  STRIP_NOPS (arg);
13148
 
13149
  /* To proceed, MPFR must exactly represent the target floating point
13150
     format, which only happens when the target base equals two.  Also
13151
     verify ARG is a constant and that ARG_SG is an int pointer.  */
13152
  if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13153
      && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13154
      && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13155
      && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13156
    {
13157
      const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13158
 
13159
      /* In addition to NaN and Inf, the argument cannot be zero or a
13160
         negative integer.  */
13161
      if (real_isfinite (ra)
13162
          && ra->cl != rvc_zero
13163
          && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13164
        {
13165
          const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13166
          const int prec = fmt->p;
13167
          const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13168
          int inexact, sg;
13169
          mpfr_t m;
13170
          tree result_lg;
13171
 
13172
          mpfr_init2 (m, prec);
13173
          mpfr_from_real (m, ra, GMP_RNDN);
13174
          mpfr_clear_flags ();
13175
          inexact = mpfr_lgamma (m, &sg, m, rnd);
13176
          result_lg = do_mpfr_ckconv (m, type, inexact);
13177
          mpfr_clear (m);
13178
          if (result_lg)
13179
            {
13180
              tree result_sg;
13181
 
13182
              /* Dereference the arg_sg pointer argument.  */
13183
              arg_sg = build_fold_indirect_ref (arg_sg);
13184
              /* Assign the signgam value into *arg_sg. */
13185
              result_sg = fold_build2 (MODIFY_EXPR,
13186
                                       TREE_TYPE (arg_sg), arg_sg,
13187
                                       build_int_cst (NULL, sg));
13188
              TREE_SIDE_EFFECTS (result_sg) = 1;
13189
              /* Combine the signgam assignment with the lgamma result.  */
13190
              result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13191
                                                result_sg, result_lg));
13192
            }
13193
        }
13194
    }
13195
 
13196
  return result;
13197
}
13198
 
13199
/* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13200
   function FUNC on it and return the resulting value as a tree with
13201
   type TYPE.  The mpfr precision is set to the precision of TYPE.  We
13202
   assume that function FUNC returns zero if the result could be
13203
   calculated exactly within the requested precision.  */
13204
 
13205
static tree
13206
do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13207
{
13208
  tree result = NULL_TREE;
13209
 
13210
  STRIP_NOPS (arg);
13211
 
13212
  /* To proceed, MPFR must exactly represent the target floating point
13213
     format, which only happens when the target base equals two.  */
13214
  if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13215
      && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13216
      && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13217
    {
13218
      const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13219
      const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13220
 
13221
      if (real_isfinite (re) && real_isfinite (im))
13222
        {
13223
          const struct real_format *const fmt =
13224
            REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13225
          const int prec = fmt->p;
13226
          const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13227
          const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13228
          int inexact;
13229
          mpc_t m;
13230
 
13231
          mpc_init2 (m, prec);
13232
          mpfr_from_real (mpc_realref(m), re, rnd);
13233
          mpfr_from_real (mpc_imagref(m), im, rnd);
13234
          mpfr_clear_flags ();
13235
          inexact = func (m, m, crnd);
13236
          result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13237
          mpc_clear (m);
13238
        }
13239
    }
13240
 
13241
  return result;
13242
}
13243
 
13244
/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13245
   mpc function FUNC on it and return the resulting value as a tree
13246
   with type TYPE.  The mpfr precision is set to the precision of
13247
   TYPE.  We assume that function FUNC returns zero if the result
13248
   could be calculated exactly within the requested precision.  If
13249
   DO_NONFINITE is true, then fold expressions containing Inf or NaN
13250
   in the arguments and/or results.  */
13251
 
13252
tree
13253
do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13254
             int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13255
{
13256
  tree result = NULL_TREE;
13257
 
13258
  STRIP_NOPS (arg0);
13259
  STRIP_NOPS (arg1);
13260
 
13261
  /* To proceed, MPFR must exactly represent the target floating point
13262
     format, which only happens when the target base equals two.  */
13263
  if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13264
      && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13265
      && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13266
      && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13267
      && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13268
    {
13269
      const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13270
      const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13271
      const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13272
      const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13273
 
13274
      if (do_nonfinite
13275
          || (real_isfinite (re0) && real_isfinite (im0)
13276
              && real_isfinite (re1) && real_isfinite (im1)))
13277
        {
13278
          const struct real_format *const fmt =
13279
            REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13280
          const int prec = fmt->p;
13281
          const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13282
          const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13283
          int inexact;
13284
          mpc_t m0, m1;
13285
 
13286
          mpc_init2 (m0, prec);
13287
          mpc_init2 (m1, prec);
13288
          mpfr_from_real (mpc_realref(m0), re0, rnd);
13289
          mpfr_from_real (mpc_imagref(m0), im0, rnd);
13290
          mpfr_from_real (mpc_realref(m1), re1, rnd);
13291
          mpfr_from_real (mpc_imagref(m1), im1, rnd);
13292
          mpfr_clear_flags ();
13293
          inexact = func (m0, m0, m1, crnd);
13294
          result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13295
          mpc_clear (m0);
13296
          mpc_clear (m1);
13297
        }
13298
    }
13299
 
13300
  return result;
13301
}
13302
 
13303
/* FIXME tuples.
13304
   The functions below provide an alternate interface for folding
13305
   builtin function calls presented as GIMPLE_CALL statements rather
13306
   than as CALL_EXPRs.  The folded result is still expressed as a
13307
   tree.  There is too much code duplication in the handling of
13308
   varargs functions, and a more intrusive re-factoring would permit
13309
   better sharing of code between the tree and statement-based
13310
   versions of these functions.  */
13311
 
13312
/* Construct a new CALL_EXPR using the tail of the argument list of STMT
13313
   along with N new arguments specified as the "..." parameters.  SKIP
13314
   is the number of arguments in STMT to be omitted.  This function is used
13315
   to do varargs-to-varargs transformations.  */
13316
 
13317
static tree
13318
gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13319
{
13320
  int oldnargs = gimple_call_num_args (stmt);
13321
  int nargs = oldnargs - skip + n;
13322
  tree fntype = TREE_TYPE (fndecl);
13323
  tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13324
  tree *buffer;
13325
  int i, j;
13326
  va_list ap;
13327
  location_t loc = gimple_location (stmt);
13328
 
13329
  buffer = XALLOCAVEC (tree, nargs);
13330
  va_start (ap, n);
13331
  for (i = 0; i < n; i++)
13332
    buffer[i] = va_arg (ap, tree);
13333
  va_end (ap);
13334
  for (j = skip; j < oldnargs; j++, i++)
13335
    buffer[i] = gimple_call_arg (stmt, j);
13336
 
13337
  return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13338
}
13339
 
13340
/* Fold a call STMT to __{,v}sprintf_chk.  Return NULL_TREE if
13341
   a normal call should be emitted rather than expanding the function
13342
   inline.  FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK.  */
13343
 
13344
static tree
13345
gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13346
{
13347
  tree dest, size, len, fn, fmt, flag;
13348
  const char *fmt_str;
13349
  int nargs = gimple_call_num_args (stmt);
13350
 
13351
  /* Verify the required arguments in the original call.  */
13352
  if (nargs < 4)
13353
    return NULL_TREE;
13354
  dest = gimple_call_arg (stmt, 0);
13355
  if (!validate_arg (dest, POINTER_TYPE))
13356
    return NULL_TREE;
13357
  flag = gimple_call_arg (stmt, 1);
13358
  if (!validate_arg (flag, INTEGER_TYPE))
13359
    return NULL_TREE;
13360
  size = gimple_call_arg (stmt, 2);
13361
  if (!validate_arg (size, INTEGER_TYPE))
13362
    return NULL_TREE;
13363
  fmt = gimple_call_arg (stmt, 3);
13364
  if (!validate_arg (fmt, POINTER_TYPE))
13365
    return NULL_TREE;
13366
 
13367
  if (! host_integerp (size, 1))
13368
    return NULL_TREE;
13369
 
13370
  len = NULL_TREE;
13371
 
13372
  if (!init_target_chars ())
13373
    return NULL_TREE;
13374
 
13375
  /* Check whether the format is a literal string constant.  */
13376
  fmt_str = c_getstr (fmt);
13377
  if (fmt_str != NULL)
13378
    {
13379
      /* If the format doesn't contain % args or %%, we know the size.  */
13380
      if (strchr (fmt_str, target_percent) == 0)
13381
        {
13382
          if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13383
            len = build_int_cstu (size_type_node, strlen (fmt_str));
13384
        }
13385
      /* If the format is "%s" and first ... argument is a string literal,
13386
         we know the size too.  */
13387
      else if (fcode == BUILT_IN_SPRINTF_CHK
13388
               && strcmp (fmt_str, target_percent_s) == 0)
13389
        {
13390
          tree arg;
13391
 
13392
          if (nargs == 5)
13393
            {
13394
              arg = gimple_call_arg (stmt, 4);
13395
              if (validate_arg (arg, POINTER_TYPE))
13396
                {
13397
                  len = c_strlen (arg, 1);
13398
                  if (! len || ! host_integerp (len, 1))
13399
                    len = NULL_TREE;
13400
                }
13401
            }
13402
        }
13403
    }
13404
 
13405
  if (! integer_all_onesp (size))
13406
    {
13407
      if (! len || ! tree_int_cst_lt (len, size))
13408
        return NULL_TREE;
13409
    }
13410
 
13411
  /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13412
     or if format doesn't contain % chars or is "%s".  */
13413
  if (! integer_zerop (flag))
13414
    {
13415
      if (fmt_str == NULL)
13416
        return NULL_TREE;
13417
      if (strchr (fmt_str, target_percent) != NULL
13418
          && strcmp (fmt_str, target_percent_s))
13419
        return NULL_TREE;
13420
    }
13421
 
13422
  /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available.  */
13423
  fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13424
                      ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13425
  if (!fn)
13426
    return NULL_TREE;
13427
 
13428
  return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13429
}
13430
 
13431
/* Fold a call STMT to {,v}snprintf.  Return NULL_TREE if
13432
   a normal call should be emitted rather than expanding the function
13433
   inline.  FCODE is either BUILT_IN_SNPRINTF_CHK or
13434
   BUILT_IN_VSNPRINTF_CHK.  If MAXLEN is not NULL, it is maximum length
13435
   passed as second argument.  */
13436
 
13437
tree
13438
gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13439
                                  enum built_in_function fcode)
13440
{
13441
  tree dest, size, len, fn, fmt, flag;
13442
  const char *fmt_str;
13443
 
13444
  /* Verify the required arguments in the original call.  */
13445
  if (gimple_call_num_args (stmt) < 5)
13446
    return NULL_TREE;
13447
  dest = gimple_call_arg (stmt, 0);
13448
  if (!validate_arg (dest, POINTER_TYPE))
13449
    return NULL_TREE;
13450
  len = gimple_call_arg (stmt, 1);
13451
  if (!validate_arg (len, INTEGER_TYPE))
13452
    return NULL_TREE;
13453
  flag = gimple_call_arg (stmt, 2);
13454
  if (!validate_arg (flag, INTEGER_TYPE))
13455
    return NULL_TREE;
13456
  size = gimple_call_arg (stmt, 3);
13457
  if (!validate_arg (size, INTEGER_TYPE))
13458
    return NULL_TREE;
13459
  fmt = gimple_call_arg (stmt, 4);
13460
  if (!validate_arg (fmt, POINTER_TYPE))
13461
    return NULL_TREE;
13462
 
13463
  if (! host_integerp (size, 1))
13464
    return NULL_TREE;
13465
 
13466
  if (! integer_all_onesp (size))
13467
    {
13468
      if (! host_integerp (len, 1))
13469
        {
13470
          /* If LEN is not constant, try MAXLEN too.
13471
             For MAXLEN only allow optimizing into non-_ocs function
13472
             if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
13473
          if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13474
            return NULL_TREE;
13475
        }
13476
      else
13477
        maxlen = len;
13478
 
13479
      if (tree_int_cst_lt (size, maxlen))
13480
        return NULL_TREE;
13481
    }
13482
 
13483
  if (!init_target_chars ())
13484
    return NULL_TREE;
13485
 
13486
  /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13487
     or if format doesn't contain % chars or is "%s".  */
13488
  if (! integer_zerop (flag))
13489
    {
13490
      fmt_str = c_getstr (fmt);
13491
      if (fmt_str == NULL)
13492
        return NULL_TREE;
13493
      if (strchr (fmt_str, target_percent) != NULL
13494
          && strcmp (fmt_str, target_percent_s))
13495
        return NULL_TREE;
13496
    }
13497
 
13498
  /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13499
     available.  */
13500
  fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13501
                      ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13502
  if (!fn)
13503
    return NULL_TREE;
13504
 
13505
  return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13506
}
13507
 
13508
/* Builtins with folding operations that operate on "..." arguments
13509
   need special handling; we need to store the arguments in a convenient
13510
   data structure before attempting any folding.  Fortunately there are
13511
   only a few builtins that fall into this category.  FNDECL is the
13512
   function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13513
   result of the function call is ignored.  */
13514
 
13515
static tree
13516
gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13517
                             bool ignore ATTRIBUTE_UNUSED)
13518
{
13519
  enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13520
  tree ret = NULL_TREE;
13521
 
13522
  switch (fcode)
13523
    {
13524
    case BUILT_IN_SPRINTF_CHK:
13525
    case BUILT_IN_VSPRINTF_CHK:
13526
      ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13527
      break;
13528
 
13529
    case BUILT_IN_SNPRINTF_CHK:
13530
    case BUILT_IN_VSNPRINTF_CHK:
13531
      ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13532
 
13533
    default:
13534
      break;
13535
    }
13536
  if (ret)
13537
    {
13538
      ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13539
      TREE_NO_WARNING (ret) = 1;
13540
      return ret;
13541
    }
13542
  return NULL_TREE;
13543
}
13544
 
13545
/* A wrapper function for builtin folding that prevents warnings for
13546
   "statement without effect" and the like, caused by removing the
13547
   call node earlier than the warning is generated.  */
13548
 
13549
tree
13550
fold_call_stmt (gimple stmt, bool ignore)
13551
{
13552
  tree ret = NULL_TREE;
13553
  tree fndecl = gimple_call_fndecl (stmt);
13554
  location_t loc = gimple_location (stmt);
13555
  if (fndecl
13556
      && TREE_CODE (fndecl) == FUNCTION_DECL
13557
      && DECL_BUILT_IN (fndecl)
13558
      && !gimple_call_va_arg_pack_p (stmt))
13559
    {
13560
      int nargs = gimple_call_num_args (stmt);
13561
 
13562
      if (avoid_folding_inline_builtin (fndecl))
13563
        return NULL_TREE;
13564
      /* FIXME: Don't use a list in this interface.  */
13565
      if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13566
        {
13567
          tree arglist = NULL_TREE;
13568
          int i;
13569
          for (i = nargs - 1; i >= 0; i--)
13570
            arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13571
          return targetm.fold_builtin (fndecl, arglist, ignore);
13572
        }
13573
      else
13574
        {
13575
          if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13576
            {
13577
              tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13578
              int i;
13579
              for (i = 0; i < nargs; i++)
13580
                args[i] = gimple_call_arg (stmt, i);
13581
              ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13582
            }
13583
          if (!ret)
13584
            ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13585
          if (ret)
13586
            {
13587
              /* Propagate location information from original call to
13588
                 expansion of builtin.  Otherwise things like
13589
                 maybe_emit_chk_warning, that operate on the expansion
13590
                 of a builtin, will use the wrong location information.  */
13591
              if (gimple_has_location (stmt))
13592
                {
13593
                  tree realret = ret;
13594
                  if (TREE_CODE (ret) == NOP_EXPR)
13595
                    realret = TREE_OPERAND (ret, 0);
13596
                  if (CAN_HAVE_LOCATION_P (realret)
13597
                      && !EXPR_HAS_LOCATION (realret))
13598
                    SET_EXPR_LOCATION (realret, loc);
13599
                  return realret;
13600
                }
13601
              return ret;
13602
            }
13603
        }
13604
    }
13605
  return NULL_TREE;
13606
}
13607
 
13608
/* Look up the function in built_in_decls that corresponds to DECL
13609
   and set ASMSPEC as its user assembler name.  DECL must be a
13610
   function decl that declares a builtin.  */
13611
 
13612
void
13613
set_builtin_user_assembler_name (tree decl, const char *asmspec)
13614
{
13615
  tree builtin;
13616
  gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13617
              && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13618
              && asmspec != 0);
13619
 
13620
  builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13621
  set_user_assembler_name (builtin, asmspec);
13622
  switch (DECL_FUNCTION_CODE (decl))
13623
    {
13624
    case BUILT_IN_MEMCPY:
13625
      init_block_move_fn (asmspec);
13626
      memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13627
      break;
13628
    case BUILT_IN_MEMSET:
13629
      init_block_clear_fn (asmspec);
13630
      memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13631
      break;
13632
    case BUILT_IN_MEMMOVE:
13633
      memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13634
      break;
13635
    case BUILT_IN_MEMCMP:
13636
      memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13637
      break;
13638
    case BUILT_IN_ABORT:
13639
      abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13640
      break;
13641
    case BUILT_IN_FFS:
13642
      if (INT_TYPE_SIZE < BITS_PER_WORD)
13643
        {
13644
          set_user_assembler_libfunc ("ffs", asmspec);
13645
          set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13646
                                                       MODE_INT, 0), "ffs");
13647
        }
13648
      break;
13649
    default:
13650
      break;
13651
    }
13652
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.