OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [config/] [i386/] [predicates.md] - Blame information for rev 709

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 709 jeremybenn
;; Predicate definitions for IA-32 and x86-64.
2
;; Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3
;; Free Software Foundation, Inc.
4
;;
5
;; This file is part of GCC.
6
;;
7
;; GCC is free software; you can redistribute it and/or modify
8
;; it under the terms of the GNU General Public License as published by
9
;; the Free Software Foundation; either version 3, or (at your option)
10
;; any later version.
11
;;
12
;; GCC is distributed in the hope that it will be useful,
13
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
14
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15
;; GNU General Public License for more details.
16
;;
17
;; You should have received a copy of the GNU General Public License
18
;; along with GCC; see the file COPYING3.  If not see
19
;; .
20
 
21
;; Return true if OP is either a i387 or SSE fp register.
22
(define_predicate "any_fp_register_operand"
23
  (and (match_code "reg")
24
       (match_test "ANY_FP_REGNO_P (REGNO (op))")))
25
 
26
;; Return true if OP is an i387 fp register.
27
(define_predicate "fp_register_operand"
28
  (and (match_code "reg")
29
       (match_test "FP_REGNO_P (REGNO (op))")))
30
 
31
;; Return true if OP is a non-fp register_operand.
32
(define_predicate "register_and_not_any_fp_reg_operand"
33
  (and (match_code "reg")
34
       (not (match_test "ANY_FP_REGNO_P (REGNO (op))"))))
35
 
36
;; Return true if OP is a register operand other than an i387 fp register.
37
(define_predicate "register_and_not_fp_reg_operand"
38
  (and (match_code "reg")
39
       (not (match_test "FP_REGNO_P (REGNO (op))"))))
40
 
41
;; True if the operand is an MMX register.
42
(define_predicate "mmx_reg_operand"
43
  (and (match_code "reg")
44
       (match_test "MMX_REGNO_P (REGNO (op))")))
45
 
46
;; True if the operand is an SSE register.
47
(define_predicate "sse_reg_operand"
48
  (and (match_code "reg")
49
       (match_test "SSE_REGNO_P (REGNO (op))")))
50
 
51
;; True if the operand is a Q_REGS class register.
52
(define_predicate "q_regs_operand"
53
  (match_operand 0 "register_operand")
54
{
55
  if (GET_CODE (op) == SUBREG)
56
    op = SUBREG_REG (op);
57
  return ANY_QI_REG_P (op);
58
})
59
 
60
;; Match an SI or HImode register for a zero_extract.
61
(define_special_predicate "ext_register_operand"
62
  (match_operand 0 "register_operand")
63
{
64
  if ((!TARGET_64BIT || GET_MODE (op) != DImode)
65
      && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
66
    return false;
67
  if (GET_CODE (op) == SUBREG)
68
    op = SUBREG_REG (op);
69
 
70
  /* Be careful to accept only registers having upper parts.  */
71
  return (REG_P (op)
72
          && (REGNO (op) > LAST_VIRTUAL_REGISTER || REGNO (op) <= BX_REG));
73
})
74
 
75
;; Return true if op is the AX register.
76
(define_predicate "ax_reg_operand"
77
  (and (match_code "reg")
78
       (match_test "REGNO (op) == AX_REG")))
79
 
80
;; Return true if op is the flags register.
81
(define_predicate "flags_reg_operand"
82
  (and (match_code "reg")
83
       (match_test "REGNO (op) == FLAGS_REG")))
84
 
85
;; Return true if op is one of QImode registers: %[abcd][hl].
86
(define_predicate "QIreg_operand"
87
  (match_test "QI_REG_P (op)"))
88
 
89
;; Return true if op is a QImode register operand other than
90
;; %[abcd][hl].
91
(define_predicate "ext_QIreg_operand"
92
  (and (match_code "reg")
93
       (match_test "TARGET_64BIT")
94
       (match_test "REGNO (op) > BX_REG")))
95
 
96
;; Return true if op is not xmm0 register.
97
(define_predicate "reg_not_xmm0_operand"
98
  (match_operand 0 "register_operand")
99
{
100
  if (GET_CODE (op) == SUBREG)
101
    op = SUBREG_REG (op);
102
 
103
  return !REG_P (op) || REGNO (op) != FIRST_SSE_REG;
104
})
105
 
106
;; As above, but also allow memory operands.
107
(define_predicate "nonimm_not_xmm0_operand"
108
  (ior (match_operand 0 "memory_operand")
109
       (match_operand 0 "reg_not_xmm0_operand")))
110
 
111
;; Return true if op is not xmm0 register, but only for non-AVX targets.
112
(define_predicate "reg_not_xmm0_operand_maybe_avx"
113
  (if_then_else (match_test "TARGET_AVX")
114
    (match_operand 0 "register_operand")
115
    (match_operand 0 "reg_not_xmm0_operand")))
116
 
117
;; As above, but also allow memory operands.
118
(define_predicate "nonimm_not_xmm0_operand_maybe_avx"
119
  (if_then_else (match_test "TARGET_AVX")
120
    (match_operand 0 "nonimmediate_operand")
121
    (match_operand 0 "nonimm_not_xmm0_operand")))
122
 
123
;; Return true if VALUE can be stored in a sign extended immediate field.
124
(define_predicate "x86_64_immediate_operand"
125
  (match_code "const_int,symbol_ref,label_ref,const")
126
{
127
  if (!TARGET_64BIT)
128
    return immediate_operand (op, mode);
129
 
130
  switch (GET_CODE (op))
131
    {
132
    case CONST_INT:
133
      /* CONST_DOUBLES never match, since HOST_BITS_PER_WIDE_INT is known
134
         to be at least 32 and this all acceptable constants are
135
         represented as CONST_INT.  */
136
      if (HOST_BITS_PER_WIDE_INT == 32)
137
        return true;
138
      else
139
        {
140
          HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode);
141
          return trunc_int_for_mode (val, SImode) == val;
142
        }
143
      break;
144
 
145
    case SYMBOL_REF:
146
      /* For certain code models, the symbolic references are known to fit.
147
         in CM_SMALL_PIC model we know it fits if it is local to the shared
148
         library.  Don't count TLS SYMBOL_REFs here, since they should fit
149
         only if inside of UNSPEC handled below.  */
150
      /* TLS symbols are not constant.  */
151
      if (SYMBOL_REF_TLS_MODEL (op))
152
        return false;
153
      return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
154
              || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
155
 
156
    case LABEL_REF:
157
      /* For certain code models, the code is near as well.  */
158
      return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
159
              || ix86_cmodel == CM_KERNEL);
160
 
161
    case CONST:
162
      /* We also may accept the offsetted memory references in certain
163
         special cases.  */
164
      if (GET_CODE (XEXP (op, 0)) == UNSPEC)
165
        switch (XINT (XEXP (op, 0), 1))
166
          {
167
          case UNSPEC_GOTPCREL:
168
          case UNSPEC_DTPOFF:
169
          case UNSPEC_GOTNTPOFF:
170
          case UNSPEC_NTPOFF:
171
            return true;
172
          default:
173
            break;
174
          }
175
 
176
      if (GET_CODE (XEXP (op, 0)) == PLUS)
177
        {
178
          rtx op1 = XEXP (XEXP (op, 0), 0);
179
          rtx op2 = XEXP (XEXP (op, 0), 1);
180
          HOST_WIDE_INT offset;
181
 
182
          if (ix86_cmodel == CM_LARGE)
183
            return false;
184
          if (!CONST_INT_P (op2))
185
            return false;
186
          offset = trunc_int_for_mode (INTVAL (op2), DImode);
187
          switch (GET_CODE (op1))
188
            {
189
            case SYMBOL_REF:
190
              /* TLS symbols are not constant.  */
191
              if (SYMBOL_REF_TLS_MODEL (op1))
192
                return false;
193
              /* For CM_SMALL assume that latest object is 16MB before
194
                 end of 31bits boundary.  We may also accept pretty
195
                 large negative constants knowing that all objects are
196
                 in the positive half of address space.  */
197
              if ((ix86_cmodel == CM_SMALL
198
                   || (ix86_cmodel == CM_MEDIUM
199
                       && !SYMBOL_REF_FAR_ADDR_P (op1)))
200
                  && offset < 16*1024*1024
201
                  && trunc_int_for_mode (offset, SImode) == offset)
202
                return true;
203
              /* For CM_KERNEL we know that all object resist in the
204
                 negative half of 32bits address space.  We may not
205
                 accept negative offsets, since they may be just off
206
                 and we may accept pretty large positive ones.  */
207
              if (ix86_cmodel == CM_KERNEL
208
                  && offset > 0
209
                  && trunc_int_for_mode (offset, SImode) == offset)
210
                return true;
211
              break;
212
 
213
            case LABEL_REF:
214
              /* These conditions are similar to SYMBOL_REF ones, just the
215
                 constraints for code models differ.  */
216
              if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
217
                  && offset < 16*1024*1024
218
                  && trunc_int_for_mode (offset, SImode) == offset)
219
                return true;
220
              if (ix86_cmodel == CM_KERNEL
221
                  && offset > 0
222
                  && trunc_int_for_mode (offset, SImode) == offset)
223
                return true;
224
              break;
225
 
226
            case UNSPEC:
227
              switch (XINT (op1, 1))
228
                {
229
                case UNSPEC_DTPOFF:
230
                case UNSPEC_NTPOFF:
231
                  if (offset > 0
232
                      && trunc_int_for_mode (offset, SImode) == offset)
233
                    return true;
234
                }
235
              break;
236
 
237
            default:
238
              break;
239
            }
240
        }
241
      break;
242
 
243
      default:
244
        gcc_unreachable ();
245
    }
246
 
247
  return false;
248
})
249
 
250
;; Return true if VALUE can be stored in the zero extended immediate field.
251
(define_predicate "x86_64_zext_immediate_operand"
252
  (match_code "const_double,const_int,symbol_ref,label_ref,const")
253
{
254
  switch (GET_CODE (op))
255
    {
256
    case CONST_DOUBLE:
257
      if (HOST_BITS_PER_WIDE_INT == 32)
258
        return (GET_MODE (op) == VOIDmode && !CONST_DOUBLE_HIGH (op));
259
      else
260
        return false;
261
 
262
    case CONST_INT:
263
      if (HOST_BITS_PER_WIDE_INT == 32)
264
        return INTVAL (op) >= 0;
265
      else
266
        return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
267
 
268
    case SYMBOL_REF:
269
      /* For certain code models, the symbolic references are known to fit.  */
270
      /* TLS symbols are not constant.  */
271
      if (SYMBOL_REF_TLS_MODEL (op))
272
        return false;
273
      return (ix86_cmodel == CM_SMALL
274
              || (ix86_cmodel == CM_MEDIUM
275
                  && !SYMBOL_REF_FAR_ADDR_P (op)));
276
 
277
    case LABEL_REF:
278
      /* For certain code models, the code is near as well.  */
279
      return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
280
 
281
    case CONST:
282
      /* We also may accept the offsetted memory references in certain
283
         special cases.  */
284
      if (GET_CODE (XEXP (op, 0)) == PLUS)
285
        {
286
          rtx op1 = XEXP (XEXP (op, 0), 0);
287
          rtx op2 = XEXP (XEXP (op, 0), 1);
288
 
289
          if (ix86_cmodel == CM_LARGE)
290
            return false;
291
          switch (GET_CODE (op1))
292
            {
293
            case SYMBOL_REF:
294
              /* TLS symbols are not constant.  */
295
              if (SYMBOL_REF_TLS_MODEL (op1))
296
                return false;
297
              /* For small code model we may accept pretty large positive
298
                 offsets, since one bit is available for free.  Negative
299
                 offsets are limited by the size of NULL pointer area
300
                 specified by the ABI.  */
301
              if ((ix86_cmodel == CM_SMALL
302
                   || (ix86_cmodel == CM_MEDIUM
303
                       && !SYMBOL_REF_FAR_ADDR_P (op1)))
304
                  && CONST_INT_P (op2)
305
                  && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
306
                  && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
307
                return true;
308
              /* ??? For the kernel, we may accept adjustment of
309
                 -0x10000000, since we know that it will just convert
310
                 negative address space to positive, but perhaps this
311
                 is not worthwhile.  */
312
              break;
313
 
314
            case LABEL_REF:
315
              /* These conditions are similar to SYMBOL_REF ones, just the
316
                 constraints for code models differ.  */
317
              if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
318
                  && CONST_INT_P (op2)
319
                  && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
320
                  && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
321
                return true;
322
              break;
323
 
324
            default:
325
              return false;
326
            }
327
        }
328
      break;
329
 
330
    default:
331
      gcc_unreachable ();
332
    }
333
  return false;
334
})
335
 
336
;; Return true if OP is general operand representable on x86_64.
337
(define_predicate "x86_64_general_operand"
338
  (if_then_else (match_test "TARGET_64BIT")
339
    (ior (match_operand 0 "nonimmediate_operand")
340
         (match_operand 0 "x86_64_immediate_operand"))
341
    (match_operand 0 "general_operand")))
342
 
343
;; Return true if OP is general operand representable on x86_64
344
;; as either sign extended or zero extended constant.
345
(define_predicate "x86_64_szext_general_operand"
346
  (if_then_else (match_test "TARGET_64BIT")
347
    (ior (match_operand 0 "nonimmediate_operand")
348
         (match_operand 0 "x86_64_immediate_operand")
349
         (match_operand 0 "x86_64_zext_immediate_operand"))
350
    (match_operand 0 "general_operand")))
351
 
352
;; Return true if OP is nonmemory operand representable on x86_64.
353
(define_predicate "x86_64_nonmemory_operand"
354
  (if_then_else (match_test "TARGET_64BIT")
355
    (ior (match_operand 0 "register_operand")
356
         (match_operand 0 "x86_64_immediate_operand"))
357
    (match_operand 0 "nonmemory_operand")))
358
 
359
;; Return true if OP is nonmemory operand representable on x86_64.
360
(define_predicate "x86_64_szext_nonmemory_operand"
361
  (if_then_else (match_test "TARGET_64BIT")
362
    (ior (match_operand 0 "register_operand")
363
         (match_operand 0 "x86_64_immediate_operand")
364
         (match_operand 0 "x86_64_zext_immediate_operand"))
365
    (match_operand 0 "nonmemory_operand")))
366
 
367
;; Return true when operand is PIC expression that can be computed by lea
368
;; operation.
369
(define_predicate "pic_32bit_operand"
370
  (match_code "const,symbol_ref,label_ref")
371
{
372
  if (!flag_pic)
373
    return false;
374
 
375
  /* Rule out relocations that translate into 64bit constants.  */
376
  if (TARGET_64BIT && GET_CODE (op) == CONST)
377
    {
378
      op = XEXP (op, 0);
379
      if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
380
        op = XEXP (op, 0);
381
      if (GET_CODE (op) == UNSPEC
382
          && (XINT (op, 1) == UNSPEC_GOTOFF
383
              || XINT (op, 1) == UNSPEC_GOT))
384
        return false;
385
    }
386
 
387
  return symbolic_operand (op, mode);
388
})
389
 
390
;; Return true if OP is nonmemory operand acceptable by movabs patterns.
391
(define_predicate "x86_64_movabs_operand"
392
  (and (match_operand 0 "nonmemory_operand")
393
       (not (match_operand 0 "pic_32bit_operand"))))
394
 
395
;; Return true if OP is either a symbol reference or a sum of a symbol
396
;; reference and a constant.
397
(define_predicate "symbolic_operand"
398
  (match_code "symbol_ref,label_ref,const")
399
{
400
  switch (GET_CODE (op))
401
    {
402
    case SYMBOL_REF:
403
    case LABEL_REF:
404
      return true;
405
 
406
    case CONST:
407
      op = XEXP (op, 0);
408
      if (GET_CODE (op) == SYMBOL_REF
409
          || GET_CODE (op) == LABEL_REF
410
          || (GET_CODE (op) == UNSPEC
411
              && (XINT (op, 1) == UNSPEC_GOT
412
                  || XINT (op, 1) == UNSPEC_GOTOFF
413
                  || XINT (op, 1) == UNSPEC_PCREL
414
                  || XINT (op, 1) == UNSPEC_GOTPCREL)))
415
        return true;
416
      if (GET_CODE (op) != PLUS
417
          || !CONST_INT_P (XEXP (op, 1)))
418
        return false;
419
 
420
      op = XEXP (op, 0);
421
      if (GET_CODE (op) == SYMBOL_REF
422
          || GET_CODE (op) == LABEL_REF)
423
        return true;
424
      /* Only @GOTOFF gets offsets.  */
425
      if (GET_CODE (op) != UNSPEC
426
          || XINT (op, 1) != UNSPEC_GOTOFF)
427
        return false;
428
 
429
      op = XVECEXP (op, 0, 0);
430
      if (GET_CODE (op) == SYMBOL_REF
431
          || GET_CODE (op) == LABEL_REF)
432
        return true;
433
      return false;
434
 
435
    default:
436
      gcc_unreachable ();
437
    }
438
})
439
 
440
;; Return true if OP is a symbolic operand that resolves locally.
441
(define_predicate "local_symbolic_operand"
442
  (match_code "const,label_ref,symbol_ref")
443
{
444
  if (GET_CODE (op) == CONST
445
      && GET_CODE (XEXP (op, 0)) == PLUS
446
      && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
447
    op = XEXP (XEXP (op, 0), 0);
448
 
449
  if (GET_CODE (op) == LABEL_REF)
450
    return true;
451
 
452
  if (GET_CODE (op) != SYMBOL_REF)
453
    return false;
454
 
455
  if (SYMBOL_REF_TLS_MODEL (op))
456
    return false;
457
 
458
  if (SYMBOL_REF_LOCAL_P (op))
459
    return true;
460
 
461
  /* There is, however, a not insubstantial body of code in the rest of
462
     the compiler that assumes it can just stick the results of
463
     ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done.  */
464
  /* ??? This is a hack.  Should update the body of the compiler to
465
     always create a DECL an invoke targetm.encode_section_info.  */
466
  if (strncmp (XSTR (op, 0), internal_label_prefix,
467
               internal_label_prefix_len) == 0)
468
    return true;
469
 
470
  return false;
471
})
472
 
473
;; Test for a legitimate @GOTOFF operand.
474
;;
475
;; VxWorks does not impose a fixed gap between segments; the run-time
476
;; gap can be different from the object-file gap.  We therefore can't
477
;; use @GOTOFF unless we are absolutely sure that the symbol is in the
478
;; same segment as the GOT.  Unfortunately, the flexibility of linker
479
;; scripts means that we can't be sure of that in general, so assume
480
;; that @GOTOFF is never valid on VxWorks.
481
(define_predicate "gotoff_operand"
482
  (and (not (match_test "TARGET_VXWORKS_RTP"))
483
       (match_operand 0 "local_symbolic_operand")))
484
 
485
;; Test for various thread-local symbols.
486
(define_predicate "tls_symbolic_operand"
487
  (and (match_code "symbol_ref")
488
       (match_test "SYMBOL_REF_TLS_MODEL (op)")))
489
 
490
(define_predicate "tls_modbase_operand"
491
  (and (match_code "symbol_ref")
492
       (match_test "op == ix86_tls_module_base ()")))
493
 
494
;; Test for a pc-relative call operand
495
(define_predicate "constant_call_address_operand"
496
  (match_code "symbol_ref")
497
{
498
  if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC)
499
    return false;
500
  if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
501
    return false;
502
  return true;
503
})
504
 
505
;; P6 processors will jump to the address after the decrement when %esp
506
;; is used as a call operand, so they will execute return address as a code.
507
;; See Pentium Pro errata 70, Pentium 2 errata A33 and Pentium 3 errata E17.
508
 
509
(define_predicate "call_register_no_elim_operand"
510
  (match_operand 0 "register_operand")
511
{
512
  if (GET_CODE (op) == SUBREG)
513
    op = SUBREG_REG (op);
514
 
515
  if (!TARGET_64BIT && op == stack_pointer_rtx)
516
    return false;
517
 
518
  return register_no_elim_operand (op, mode);
519
})
520
 
521
;; True for any non-virtual or eliminable register.  Used in places where
522
;; instantiation of such a register may cause the pattern to not be recognized.
523
(define_predicate "register_no_elim_operand"
524
  (match_operand 0 "register_operand")
525
{
526
  if (GET_CODE (op) == SUBREG)
527
    op = SUBREG_REG (op);
528
  return !(op == arg_pointer_rtx
529
           || op == frame_pointer_rtx
530
           || IN_RANGE (REGNO (op),
531
                        FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
532
})
533
 
534
;; Similarly, but include the stack pointer.  This is used to prevent esp
535
;; from being used as an index reg.
536
(define_predicate "index_register_operand"
537
  (match_operand 0 "register_operand")
538
{
539
  if (GET_CODE (op) == SUBREG)
540
    op = SUBREG_REG (op);
541
  if (reload_in_progress || reload_completed)
542
    return REG_OK_FOR_INDEX_STRICT_P (op);
543
  else
544
    return REG_OK_FOR_INDEX_NONSTRICT_P (op);
545
})
546
 
547
;; Return false if this is any eliminable register.  Otherwise general_operand.
548
(define_predicate "general_no_elim_operand"
549
  (if_then_else (match_code "reg,subreg")
550
    (match_operand 0 "register_no_elim_operand")
551
    (match_operand 0 "general_operand")))
552
 
553
;; Return false if this is any eliminable register.  Otherwise
554
;; register_operand or a constant.
555
(define_predicate "nonmemory_no_elim_operand"
556
  (ior (match_operand 0 "register_no_elim_operand")
557
       (match_operand 0 "immediate_operand")))
558
 
559
;; Test for a valid operand for indirect branch.
560
(define_predicate "indirect_branch_operand"
561
  (if_then_else (match_test "TARGET_X32")
562
    (match_operand 0 "register_operand")
563
    (match_operand 0 "nonimmediate_operand")))
564
 
565
;; Test for a valid operand for a call instruction.
566
(define_predicate "call_insn_operand"
567
  (ior (match_operand 0 "constant_call_address_operand")
568
       (match_operand 0 "call_register_no_elim_operand")
569
       (and (not (match_test "TARGET_X32"))
570
            (match_operand 0 "memory_operand"))))
571
 
572
;; Similarly, but for tail calls, in which we cannot allow memory references.
573
(define_predicate "sibcall_insn_operand"
574
  (ior (match_operand 0 "constant_call_address_operand")
575
       (match_operand 0 "register_no_elim_operand")))
576
 
577
;; Match exactly zero.
578
(define_predicate "const0_operand"
579
  (match_code "const_int,const_double,const_vector")
580
{
581
  if (mode == VOIDmode)
582
    mode = GET_MODE (op);
583
  return op == CONST0_RTX (mode);
584
})
585
 
586
;; Match exactly one.
587
(define_predicate "const1_operand"
588
  (and (match_code "const_int")
589
       (match_test "op == const1_rtx")))
590
 
591
;; Match exactly eight.
592
(define_predicate "const8_operand"
593
  (and (match_code "const_int")
594
       (match_test "INTVAL (op) == 8")))
595
 
596
;; Match exactly 128.
597
(define_predicate "const128_operand"
598
  (and (match_code "const_int")
599
       (match_test "INTVAL (op) == 128")))
600
 
601
;; Match exactly 0x0FFFFFFFF in anddi as a zero-extension operation
602
(define_predicate "const_32bit_mask"
603
  (and (match_code "const_int")
604
       (match_test "trunc_int_for_mode (INTVAL (op), DImode)
605
                    == (HOST_WIDE_INT) 0xffffffff")))
606
 
607
;; Match 2, 4, or 8.  Used for leal multiplicands.
608
(define_predicate "const248_operand"
609
  (match_code "const_int")
610
{
611
  HOST_WIDE_INT i = INTVAL (op);
612
  return i == 2 || i == 4 || i == 8;
613
})
614
 
615
;; Match 1, 2, 4, or 8
616
(define_predicate "const1248_operand"
617
  (match_code "const_int")
618
{
619
  HOST_WIDE_INT i = INTVAL (op);
620
  return i == 1 || i == 2 || i == 4 || i == 8;
621
})
622
 
623
;; Match 3, 5, or 9.  Used for leal multiplicands.
624
(define_predicate "const359_operand"
625
  (match_code "const_int")
626
{
627
  HOST_WIDE_INT i = INTVAL (op);
628
  return i == 3 || i == 5 || i == 9;
629
})
630
 
631
;; Match 0 or 1.
632
(define_predicate "const_0_to_1_operand"
633
  (and (match_code "const_int")
634
       (ior (match_test "op == const0_rtx")
635
            (match_test "op == const1_rtx"))))
636
 
637
;; Match 0 to 3.
638
(define_predicate "const_0_to_3_operand"
639
  (and (match_code "const_int")
640
       (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
641
 
642
;; Match 0 to 7.
643
(define_predicate "const_0_to_7_operand"
644
  (and (match_code "const_int")
645
       (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
646
 
647
;; Match 0 to 15.
648
(define_predicate "const_0_to_15_operand"
649
  (and (match_code "const_int")
650
       (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
651
 
652
;; Match 0 to 31.
653
(define_predicate "const_0_to_31_operand"
654
  (and (match_code "const_int")
655
       (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
656
 
657
;; Match 0 to 63.
658
(define_predicate "const_0_to_63_operand"
659
  (and (match_code "const_int")
660
       (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
661
 
662
;; Match 0 to 255.
663
(define_predicate "const_0_to_255_operand"
664
  (and (match_code "const_int")
665
       (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
666
 
667
;; Match (0 to 255) * 8
668
(define_predicate "const_0_to_255_mul_8_operand"
669
  (match_code "const_int")
670
{
671
  unsigned HOST_WIDE_INT val = INTVAL (op);
672
  return val <= 255*8 && val % 8 == 0;
673
})
674
 
675
;; Return true if OP is CONST_INT >= 1 and <= 31 (a valid operand
676
;; for shift & compare patterns, as shifting by 0 does not change flags).
677
(define_predicate "const_1_to_31_operand"
678
  (and (match_code "const_int")
679
       (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
680
 
681
;; Return true if OP is CONST_INT >= 1 and <= 63 (a valid operand
682
;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
683
(define_predicate "const_1_to_63_operand"
684
  (and (match_code "const_int")
685
       (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
686
 
687
;; Match 2 or 3.
688
(define_predicate "const_2_to_3_operand"
689
  (and (match_code "const_int")
690
       (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
691
 
692
;; Match 4 to 5.
693
(define_predicate "const_4_to_5_operand"
694
  (and (match_code "const_int")
695
       (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
696
 
697
;; Match 4 to 7.
698
(define_predicate "const_4_to_7_operand"
699
  (and (match_code "const_int")
700
       (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
701
 
702
;; Match 6 to 7.
703
(define_predicate "const_6_to_7_operand"
704
  (and (match_code "const_int")
705
       (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
706
 
707
;; Match 8 to 11.
708
(define_predicate "const_8_to_11_operand"
709
  (and (match_code "const_int")
710
       (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
711
 
712
;; Match 12 to 15.
713
(define_predicate "const_12_to_15_operand"
714
  (and (match_code "const_int")
715
       (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
716
 
717
;; True if this is a constant appropriate for an increment or decrement.
718
(define_predicate "incdec_operand"
719
  (match_code "const_int")
720
{
721
  /* On Pentium4, the inc and dec operations causes extra dependency on flag
722
     registers, since carry flag is not set.  */
723
  if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ())
724
    return false;
725
  return op == const1_rtx || op == constm1_rtx;
726
})
727
 
728
;; True for registers, or 1 or -1.  Used to optimize double-word shifts.
729
(define_predicate "reg_or_pm1_operand"
730
  (ior (match_operand 0 "register_operand")
731
       (and (match_code "const_int")
732
            (ior (match_test "op == const1_rtx")
733
                 (match_test "op == constm1_rtx")))))
734
 
735
;; True if OP is acceptable as operand of DImode shift expander.
736
(define_predicate "shiftdi_operand"
737
  (if_then_else (match_test "TARGET_64BIT")
738
    (match_operand 0 "nonimmediate_operand")
739
    (match_operand 0 "register_operand")))
740
 
741
(define_predicate "ashldi_input_operand"
742
  (if_then_else (match_test "TARGET_64BIT")
743
    (match_operand 0 "nonimmediate_operand")
744
    (match_operand 0 "reg_or_pm1_operand")))
745
 
746
;; Return true if OP is a vector load from the constant pool with just
747
;; the first element nonzero.
748
(define_predicate "zero_extended_scalar_load_operand"
749
  (match_code "mem")
750
{
751
  unsigned n_elts;
752
  op = maybe_get_pool_constant (op);
753
 
754
  if (!(op && GET_CODE (op) == CONST_VECTOR))
755
    return false;
756
 
757
  n_elts = CONST_VECTOR_NUNITS (op);
758
 
759
  for (n_elts--; n_elts > 0; n_elts--)
760
    {
761
      rtx elt = CONST_VECTOR_ELT (op, n_elts);
762
      if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
763
        return false;
764
    }
765
  return true;
766
})
767
 
768
/* Return true if operand is a vector constant that is all ones. */
769
(define_predicate "vector_all_ones_operand"
770
  (match_code "const_vector")
771
{
772
  int nunits = GET_MODE_NUNITS (mode);
773
 
774
  if (GET_CODE (op) == CONST_VECTOR
775
      && CONST_VECTOR_NUNITS (op) == nunits)
776
    {
777
      int i;
778
      for (i = 0; i < nunits; ++i)
779
        {
780
          rtx x = CONST_VECTOR_ELT (op, i);
781
          if (x != constm1_rtx)
782
            return false;
783
        }
784
      return true;
785
    }
786
 
787
  return false;
788
})
789
 
790
; Return true when OP is operand acceptable for standard SSE move.
791
(define_predicate "vector_move_operand"
792
  (ior (match_operand 0 "nonimmediate_operand")
793
       (match_operand 0 "const0_operand")))
794
 
795
;; Return true when OP is nonimmediate or standard SSE constant.
796
(define_predicate "nonimmediate_or_sse_const_operand"
797
  (match_operand 0 "general_operand")
798
{
799
  if (nonimmediate_operand (op, mode))
800
    return true;
801
  if (standard_sse_constant_p (op) > 0)
802
    return true;
803
  return false;
804
})
805
 
806
;; Return true if OP is a register or a zero.
807
(define_predicate "reg_or_0_operand"
808
  (ior (match_operand 0 "register_operand")
809
       (match_operand 0 "const0_operand")))
810
 
811
;; Return true if op if a valid address for LEA, and does not contain
812
;; a segment override.  Defined as a special predicate to allow
813
;; mode-less const_int operands pass to address_operand.
814
(define_special_predicate "lea_address_operand"
815
  (match_operand 0 "address_operand")
816
{
817
  struct ix86_address parts;
818
  int ok;
819
 
820
  /*  LEA handles zero-extend by itself.  */
821
  if (GET_CODE (op) == ZERO_EXTEND
822
      || GET_CODE (op) == AND)
823
    return false;
824
 
825
  ok = ix86_decompose_address (op, &parts);
826
  gcc_assert (ok);
827
  return parts.seg == SEG_DEFAULT;
828
})
829
 
830
;; Return true if op if a valid base register, displacement or
831
;; sum of base register and displacement for VSIB addressing.
832
(define_predicate "vsib_address_operand"
833
  (match_operand 0 "address_operand")
834
{
835
  struct ix86_address parts;
836
  int ok;
837
  rtx disp;
838
 
839
  ok = ix86_decompose_address (op, &parts);
840
  gcc_assert (ok);
841
  if (parts.index || parts.seg != SEG_DEFAULT)
842
    return false;
843
 
844
  /* VSIB addressing doesn't support (%rip).  */
845
  if (parts.disp && GET_CODE (parts.disp) == CONST)
846
    {
847
      disp = XEXP (parts.disp, 0);
848
      if (GET_CODE (disp) == PLUS)
849
        disp = XEXP (disp, 0);
850
      if (GET_CODE (disp) == UNSPEC)
851
        switch (XINT (disp, 1))
852
          {
853
          case UNSPEC_GOTPCREL:
854
          case UNSPEC_PCREL:
855
          case UNSPEC_GOTNTPOFF:
856
            return false;
857
          }
858
    }
859
 
860
  return true;
861
})
862
 
863
(define_predicate "vsib_mem_operator"
864
  (match_code "mem"))
865
 
866
;; Return true if the rtx is known to be at least 32 bits aligned.
867
(define_predicate "aligned_operand"
868
  (match_operand 0 "general_operand")
869
{
870
  struct ix86_address parts;
871
  int ok;
872
 
873
  /* Registers and immediate operands are always "aligned".  */
874
  if (!MEM_P (op))
875
    return true;
876
 
877
  /* All patterns using aligned_operand on memory operands ends up
878
     in promoting memory operand to 64bit and thus causing memory mismatch.  */
879
  if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ())
880
    return false;
881
 
882
  /* Don't even try to do any aligned optimizations with volatiles.  */
883
  if (MEM_VOLATILE_P (op))
884
    return false;
885
 
886
  if (MEM_ALIGN (op) >= 32)
887
    return true;
888
 
889
  op = XEXP (op, 0);
890
 
891
  /* Pushes and pops are only valid on the stack pointer.  */
892
  if (GET_CODE (op) == PRE_DEC
893
      || GET_CODE (op) == POST_INC)
894
    return true;
895
 
896
  /* Decode the address.  */
897
  ok = ix86_decompose_address (op, &parts);
898
  gcc_assert (ok);
899
 
900
  if (parts.base && GET_CODE (parts.base) == SUBREG)
901
    parts.base = SUBREG_REG (parts.base);
902
  if (parts.index && GET_CODE (parts.index) == SUBREG)
903
    parts.index = SUBREG_REG (parts.index);
904
 
905
  /* Look for some component that isn't known to be aligned.  */
906
  if (parts.index)
907
    {
908
      if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
909
        return false;
910
    }
911
  if (parts.base)
912
    {
913
      if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
914
        return false;
915
    }
916
  if (parts.disp)
917
    {
918
      if (!CONST_INT_P (parts.disp)
919
          || (INTVAL (parts.disp) & 3))
920
        return false;
921
    }
922
 
923
  /* Didn't find one -- this must be an aligned address.  */
924
  return true;
925
})
926
 
927
;; Return true if OP is memory operand with a displacement.
928
(define_predicate "memory_displacement_operand"
929
  (match_operand 0 "memory_operand")
930
{
931
  struct ix86_address parts;
932
  int ok;
933
 
934
  ok = ix86_decompose_address (XEXP (op, 0), &parts);
935
  gcc_assert (ok);
936
  return parts.disp != NULL_RTX;
937
})
938
 
939
;; Return true if OP is memory operand with a displacement only.
940
(define_predicate "memory_displacement_only_operand"
941
  (match_operand 0 "memory_operand")
942
{
943
  struct ix86_address parts;
944
  int ok;
945
 
946
  if (TARGET_64BIT)
947
    return false;
948
 
949
  ok = ix86_decompose_address (XEXP (op, 0), &parts);
950
  gcc_assert (ok);
951
 
952
  if (parts.base || parts.index)
953
    return false;
954
 
955
  return parts.disp != NULL_RTX;
956
})
957
 
958
;; Return true if OP is memory operand which will need zero or
959
;; one register at most, not counting stack pointer or frame pointer.
960
(define_predicate "cmpxchg8b_pic_memory_operand"
961
  (match_operand 0 "memory_operand")
962
{
963
  struct ix86_address parts;
964
  int ok;
965
 
966
  ok = ix86_decompose_address (XEXP (op, 0), &parts);
967
  gcc_assert (ok);
968
 
969
  if (parts.base && GET_CODE (parts.base) == SUBREG)
970
    parts.base = SUBREG_REG (parts.base);
971
  if (parts.index && GET_CODE (parts.index) == SUBREG)
972
    parts.index = SUBREG_REG (parts.index);
973
 
974
  if (parts.base == NULL_RTX
975
      || parts.base == arg_pointer_rtx
976
      || parts.base == frame_pointer_rtx
977
      || parts.base == hard_frame_pointer_rtx
978
      || parts.base == stack_pointer_rtx)
979
    return true;
980
 
981
  if (parts.index == NULL_RTX
982
      || parts.index == arg_pointer_rtx
983
      || parts.index == frame_pointer_rtx
984
      || parts.index == hard_frame_pointer_rtx
985
      || parts.index == stack_pointer_rtx)
986
    return true;
987
 
988
  return false;
989
})
990
 
991
 
992
;; Return true if OP is memory operand that cannot be represented
993
;; by the modRM array.
994
(define_predicate "long_memory_operand"
995
  (and (match_operand 0 "memory_operand")
996
       (match_test "memory_address_length (op)")))
997
 
998
;; Return true if OP is a comparison operator that can be issued by fcmov.
999
(define_predicate "fcmov_comparison_operator"
1000
  (match_operand 0 "comparison_operator")
1001
{
1002
  enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1003
  enum rtx_code code = GET_CODE (op);
1004
 
1005
  if (inmode == CCFPmode || inmode == CCFPUmode)
1006
    {
1007
      if (!ix86_trivial_fp_comparison_operator (op, mode))
1008
        return false;
1009
      code = ix86_fp_compare_code_to_integer (code);
1010
    }
1011
  /* i387 supports just limited amount of conditional codes.  */
1012
  switch (code)
1013
    {
1014
    case LTU: case GTU: case LEU: case GEU:
1015
      if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode
1016
          || inmode == CCCmode)
1017
        return true;
1018
      return false;
1019
    case ORDERED: case UNORDERED:
1020
    case EQ: case NE:
1021
      return true;
1022
    default:
1023
      return false;
1024
    }
1025
})
1026
 
1027
;; Return true if OP is a comparison that can be used in the CMPSS/CMPPS insns.
1028
;; The first set are supported directly; the second set can't be done with
1029
;; full IEEE support, i.e. NaNs.
1030
 
1031
(define_predicate "sse_comparison_operator"
1032
  (ior (match_code "eq,ne,lt,le,unordered,unge,ungt,ordered")
1033
       (and (match_test "TARGET_AVX")
1034
            (match_code "ge,gt,uneq,unle,unlt,ltgt"))))
1035
 
1036
(define_predicate "ix86_comparison_int_operator"
1037
  (match_code "ne,eq,ge,gt,le,lt"))
1038
 
1039
(define_predicate "ix86_comparison_uns_operator"
1040
  (match_code "ne,eq,geu,gtu,leu,ltu"))
1041
 
1042
(define_predicate "bt_comparison_operator"
1043
  (match_code "ne,eq"))
1044
 
1045
;; Return true if OP is a valid comparison operator in valid mode.
1046
(define_predicate "ix86_comparison_operator"
1047
  (match_operand 0 "comparison_operator")
1048
{
1049
  enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1050
  enum rtx_code code = GET_CODE (op);
1051
 
1052
  if (inmode == CCFPmode || inmode == CCFPUmode)
1053
    return ix86_trivial_fp_comparison_operator (op, mode);
1054
 
1055
  switch (code)
1056
    {
1057
    case EQ: case NE:
1058
      return true;
1059
    case LT: case GE:
1060
      if (inmode == CCmode || inmode == CCGCmode
1061
          || inmode == CCGOCmode || inmode == CCNOmode)
1062
        return true;
1063
      return false;
1064
    case LTU: case GTU: case LEU: case GEU:
1065
      if (inmode == CCmode || inmode == CCCmode)
1066
        return true;
1067
      return false;
1068
    case ORDERED: case UNORDERED:
1069
      if (inmode == CCmode)
1070
        return true;
1071
      return false;
1072
    case GT: case LE:
1073
      if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
1074
        return true;
1075
      return false;
1076
    default:
1077
      return false;
1078
    }
1079
})
1080
 
1081
;; Return true if OP is a valid comparison operator
1082
;; testing carry flag to be set.
1083
(define_predicate "ix86_carry_flag_operator"
1084
  (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
1085
{
1086
  enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1087
  enum rtx_code code = GET_CODE (op);
1088
 
1089
  if (inmode == CCFPmode || inmode == CCFPUmode)
1090
    {
1091
      if (!ix86_trivial_fp_comparison_operator (op, mode))
1092
        return false;
1093
      code = ix86_fp_compare_code_to_integer (code);
1094
    }
1095
  else if (inmode == CCCmode)
1096
   return code == LTU || code == GTU;
1097
  else if (inmode != CCmode)
1098
    return false;
1099
 
1100
  return code == LTU;
1101
})
1102
 
1103
;; Return true if this comparison only requires testing one flag bit.
1104
(define_predicate "ix86_trivial_fp_comparison_operator"
1105
  (match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered"))
1106
 
1107
;; Return true if we know how to do this comparison.  Others require
1108
;; testing more than one flag bit, and we let the generic middle-end
1109
;; code do that.
1110
(define_predicate "ix86_fp_comparison_operator"
1111
  (if_then_else (match_test "ix86_fp_comparison_strategy (GET_CODE (op))
1112
                             == IX86_FPCMP_ARITH")
1113
               (match_operand 0 "comparison_operator")
1114
               (match_operand 0 "ix86_trivial_fp_comparison_operator")))
1115
 
1116
;; Same as above, but for swapped comparison used in fp_jcc_4_387.
1117
(define_predicate "ix86_swapped_fp_comparison_operator"
1118
  (match_operand 0 "comparison_operator")
1119
{
1120
  enum rtx_code code = GET_CODE (op);
1121
  bool ret;
1122
 
1123
  PUT_CODE (op, swap_condition (code));
1124
  ret = ix86_fp_comparison_operator (op, mode);
1125
  PUT_CODE (op, code);
1126
  return ret;
1127
})
1128
 
1129
;; Nearly general operand, but accept any const_double, since we wish
1130
;; to be able to drop them into memory rather than have them get pulled
1131
;; into registers.
1132
(define_predicate "cmp_fp_expander_operand"
1133
  (ior (match_code "const_double")
1134
       (match_operand 0 "general_operand")))
1135
 
1136
;; Return true if this is a valid binary floating-point operation.
1137
(define_predicate "binary_fp_operator"
1138
  (match_code "plus,minus,mult,div"))
1139
 
1140
;; Return true if this is a multiply operation.
1141
(define_predicate "mult_operator"
1142
  (match_code "mult"))
1143
 
1144
;; Return true if this is a division operation.
1145
(define_predicate "div_operator"
1146
  (match_code "div"))
1147
 
1148
;; Return true if this is a plus, minus, and, ior or xor operation.
1149
(define_predicate "plusminuslogic_operator"
1150
  (match_code "plus,minus,and,ior,xor"))
1151
 
1152
;; Return true if this is a float extend operation.
1153
(define_predicate "float_operator"
1154
  (match_code "float"))
1155
 
1156
;; Return true for ARITHMETIC_P.
1157
(define_predicate "arith_or_logical_operator"
1158
  (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1159
               mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
1160
 
1161
;; Return true for COMMUTATIVE_P.
1162
(define_predicate "commutative_operator"
1163
  (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1164
 
1165
;; Return true if OP is a binary operator that can be promoted to wider mode.
1166
(define_predicate "promotable_binary_operator"
1167
  (ior (match_code "plus,minus,and,ior,xor,ashift")
1168
       (and (match_code "mult")
1169
            (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
1170
 
1171
(define_predicate "compare_operator"
1172
  (match_code "compare"))
1173
 
1174
(define_predicate "absneg_operator"
1175
  (match_code "abs,neg"))
1176
 
1177
;; Return true if OP is misaligned memory operand
1178
(define_predicate "misaligned_operand"
1179
  (and (match_code "mem")
1180
       (match_test "MEM_ALIGN (op) < GET_MODE_ALIGNMENT (mode)")))
1181
 
1182
;; Return true if OP is a emms operation, known to be a PARALLEL.
1183
(define_predicate "emms_operation"
1184
  (match_code "parallel")
1185
{
1186
  unsigned i;
1187
 
1188
  if (XVECLEN (op, 0) != 17)
1189
    return false;
1190
 
1191
  for (i = 0; i < 8; i++)
1192
    {
1193
      rtx elt = XVECEXP (op, 0, i+1);
1194
 
1195
      if (GET_CODE (elt) != CLOBBER
1196
          || GET_CODE (SET_DEST (elt)) != REG
1197
          || GET_MODE (SET_DEST (elt)) != XFmode
1198
          || REGNO (SET_DEST (elt)) != FIRST_STACK_REG + i)
1199
        return false;
1200
 
1201
      elt = XVECEXP (op, 0, i+9);
1202
 
1203
      if (GET_CODE (elt) != CLOBBER
1204
          || GET_CODE (SET_DEST (elt)) != REG
1205
          || GET_MODE (SET_DEST (elt)) != DImode
1206
          || REGNO (SET_DEST (elt)) != FIRST_MMX_REG + i)
1207
        return false;
1208
    }
1209
  return true;
1210
})
1211
 
1212
;; Return true if OP is a vzeroall operation, known to be a PARALLEL.
1213
(define_predicate "vzeroall_operation"
1214
  (match_code "parallel")
1215
{
1216
  unsigned i, nregs = TARGET_64BIT ? 16 : 8;
1217
 
1218
  if ((unsigned) XVECLEN (op, 0) != 1 + nregs)
1219
    return false;
1220
 
1221
  for (i = 0; i < nregs; i++)
1222
    {
1223
      rtx elt = XVECEXP (op, 0, i+1);
1224
 
1225
      if (GET_CODE (elt) != SET
1226
          || GET_CODE (SET_DEST (elt)) != REG
1227
          || GET_MODE (SET_DEST (elt)) != V8SImode
1228
          || REGNO (SET_DEST (elt)) != SSE_REGNO (i)
1229
          || SET_SRC (elt) != CONST0_RTX (V8SImode))
1230
        return false;
1231
    }
1232
  return true;
1233
})
1234
 
1235
;; Return true if OP is a parallel for a vbroadcast permute.
1236
 
1237
(define_predicate "avx_vbroadcast_operand"
1238
  (and (match_code "parallel")
1239
       (match_code "const_int" "a"))
1240
{
1241
  rtx elt = XVECEXP (op, 0, 0);
1242
  int i, nelt = XVECLEN (op, 0);
1243
 
1244
  /* Don't bother checking there are the right number of operands,
1245
     merely that they're all identical.  */
1246
  for (i = 1; i < nelt; ++i)
1247
    if (XVECEXP (op, 0, i) != elt)
1248
      return false;
1249
  return true;
1250
})
1251
 
1252
;; Return true if OP is a proper third operand to vpblendw256.
1253
(define_predicate "avx2_pblendw_operand"
1254
  (match_code "const_int")
1255
{
1256
  HOST_WIDE_INT val = INTVAL (op);
1257
  HOST_WIDE_INT low = val & 0xff;
1258
  return val == ((low << 8) | low);
1259
})

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.