OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [config/] [i386/] [predicates.md] - Blame information for rev 12

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
;; Predicate definitions for IA-32 and x86-64.
2
;; Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3
;;
4
;; This file is part of GCC.
5
;;
6
;; GCC is free software; you can redistribute it and/or modify
7
;; it under the terms of the GNU General Public License as published by
8
;; the Free Software Foundation; either version 2, or (at your option)
9
;; any later version.
10
;;
11
;; GCC is distributed in the hope that it will be useful,
12
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
;; GNU General Public License for more details.
15
;;
16
;; You should have received a copy of the GNU General Public License
17
;; along with GCC; see the file COPYING.  If not, write to
18
;; the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19
;; Boston, MA 02110-1301, USA.
20
 
21
;; Return nonzero if OP is either a i387 or SSE fp register.
22
(define_predicate "any_fp_register_operand"
23
  (and (match_code "reg")
24
       (match_test "ANY_FP_REGNO_P (REGNO (op))")))
25
 
26
;; Return nonzero if OP is an i387 fp register.
27
(define_predicate "fp_register_operand"
28
  (and (match_code "reg")
29
       (match_test "FP_REGNO_P (REGNO (op))")))
30
 
31
;; Return nonzero if OP is a non-fp register_operand.
32
(define_predicate "register_and_not_any_fp_reg_operand"
33
  (and (match_code "reg")
34
       (not (match_test "ANY_FP_REGNO_P (REGNO (op))"))))
35
 
36
;; Return nonzero if OP is a register operand other than an i387 fp register.
37
(define_predicate "register_and_not_fp_reg_operand"
38
  (and (match_code "reg")
39
       (not (match_test "FP_REGNO_P (REGNO (op))"))))
40
 
41
;; True if the operand is an MMX register.
42
(define_predicate "mmx_reg_operand"
43
  (and (match_code "reg")
44
       (match_test "MMX_REGNO_P (REGNO (op))")))
45
 
46
;; True if the operand is a Q_REGS class register.
47
(define_predicate "q_regs_operand"
48
  (match_operand 0 "register_operand")
49
{
50
  if (GET_CODE (op) == SUBREG)
51
    op = SUBREG_REG (op);
52
  return ANY_QI_REG_P (op);
53
})
54
 
55
;; Return true if op is a NON_Q_REGS class register.
56
(define_predicate "non_q_regs_operand"
57
  (match_operand 0 "register_operand")
58
{
59
  if (GET_CODE (op) == SUBREG)
60
    op = SUBREG_REG (op);
61
  return NON_QI_REG_P (op);
62
})
63
 
64
;; Match an SI or HImode register for a zero_extract.
65
(define_special_predicate "ext_register_operand"
66
  (match_operand 0 "register_operand")
67
{
68
  if ((!TARGET_64BIT || GET_MODE (op) != DImode)
69
      && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
70
    return 0;
71
  if (GET_CODE (op) == SUBREG)
72
    op = SUBREG_REG (op);
73
 
74
  /* Be careful to accept only registers having upper parts.  */
75
  return REGNO (op) > LAST_VIRTUAL_REGISTER || REGNO (op) < 4;
76
})
77
 
78
;; Return true if op is the flags register.
79
(define_predicate "flags_reg_operand"
80
  (and (match_code "reg")
81
       (match_test "REGNO (op) == FLAGS_REG")))
82
 
83
;; Return 1 if VALUE can be stored in a sign extended immediate field.
84
(define_predicate "x86_64_immediate_operand"
85
  (match_code "const_int,symbol_ref,label_ref,const")
86
{
87
  if (!TARGET_64BIT)
88
    return immediate_operand (op, mode);
89
 
90
  switch (GET_CODE (op))
91
    {
92
    case CONST_INT:
93
      /* CONST_DOUBLES never match, since HOST_BITS_PER_WIDE_INT is known
94
         to be at least 32 and this all acceptable constants are
95
         represented as CONST_INT.  */
96
      if (HOST_BITS_PER_WIDE_INT == 32)
97
        return 1;
98
      else
99
        {
100
          HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode);
101
          return trunc_int_for_mode (val, SImode) == val;
102
        }
103
      break;
104
 
105
    case SYMBOL_REF:
106
      /* For certain code models, the symbolic references are known to fit.
107
         in CM_SMALL_PIC model we know it fits if it is local to the shared
108
         library.  Don't count TLS SYMBOL_REFs here, since they should fit
109
         only if inside of UNSPEC handled below.  */
110
      /* TLS symbols are not constant.  */
111
      if (SYMBOL_REF_TLS_MODEL (op))
112
        return false;
113
      return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
114
              || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
115
 
116
    case LABEL_REF:
117
      /* For certain code models, the code is near as well.  */
118
      return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
119
              || ix86_cmodel == CM_KERNEL);
120
 
121
    case CONST:
122
      /* We also may accept the offsetted memory references in certain
123
         special cases.  */
124
      if (GET_CODE (XEXP (op, 0)) == UNSPEC)
125
        switch (XINT (XEXP (op, 0), 1))
126
          {
127
          case UNSPEC_GOTPCREL:
128
          case UNSPEC_DTPOFF:
129
          case UNSPEC_GOTNTPOFF:
130
          case UNSPEC_NTPOFF:
131
            return 1;
132
          default:
133
            break;
134
          }
135
 
136
      if (GET_CODE (XEXP (op, 0)) == PLUS)
137
        {
138
          rtx op1 = XEXP (XEXP (op, 0), 0);
139
          rtx op2 = XEXP (XEXP (op, 0), 1);
140
          HOST_WIDE_INT offset;
141
 
142
          if (ix86_cmodel == CM_LARGE)
143
            return 0;
144
          if (GET_CODE (op2) != CONST_INT)
145
            return 0;
146
          offset = trunc_int_for_mode (INTVAL (op2), DImode);
147
          switch (GET_CODE (op1))
148
            {
149
            case SYMBOL_REF:
150
              /* TLS symbols are not constant.  */
151
              if (SYMBOL_REF_TLS_MODEL (op1))
152
                return 0;
153
              /* For CM_SMALL assume that latest object is 16MB before
154
                 end of 31bits boundary.  We may also accept pretty
155
                 large negative constants knowing that all objects are
156
                 in the positive half of address space.  */
157
              if ((ix86_cmodel == CM_SMALL
158
                   || (ix86_cmodel == CM_MEDIUM
159
                       && !SYMBOL_REF_FAR_ADDR_P (op1)))
160
                  && offset < 16*1024*1024
161
                  && trunc_int_for_mode (offset, SImode) == offset)
162
                return 1;
163
              /* For CM_KERNEL we know that all object resist in the
164
                 negative half of 32bits address space.  We may not
165
                 accept negative offsets, since they may be just off
166
                 and we may accept pretty large positive ones.  */
167
              if (ix86_cmodel == CM_KERNEL
168
                  && offset > 0
169
                  && trunc_int_for_mode (offset, SImode) == offset)
170
                return 1;
171
              break;
172
 
173
            case LABEL_REF:
174
              /* These conditions are similar to SYMBOL_REF ones, just the
175
                 constraints for code models differ.  */
176
              if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
177
                  && offset < 16*1024*1024
178
                  && trunc_int_for_mode (offset, SImode) == offset)
179
                return 1;
180
              if (ix86_cmodel == CM_KERNEL
181
                  && offset > 0
182
                  && trunc_int_for_mode (offset, SImode) == offset)
183
                return 1;
184
              break;
185
 
186
            case UNSPEC:
187
              switch (XINT (op1, 1))
188
                {
189
                case UNSPEC_DTPOFF:
190
                case UNSPEC_NTPOFF:
191
                  if (offset > 0
192
                      && trunc_int_for_mode (offset, SImode) == offset)
193
                    return 1;
194
                }
195
              break;
196
 
197
            default:
198
              break;
199
            }
200
        }
201
      break;
202
 
203
      default:
204
        gcc_unreachable ();
205
    }
206
 
207
  return 0;
208
})
209
 
210
;; Return 1 if VALUE can be stored in the zero extended immediate field.
211
(define_predicate "x86_64_zext_immediate_operand"
212
  (match_code "const_double,const_int,symbol_ref,label_ref,const")
213
{
214
  switch (GET_CODE (op))
215
    {
216
    case CONST_DOUBLE:
217
      if (HOST_BITS_PER_WIDE_INT == 32)
218
        return (GET_MODE (op) == VOIDmode && !CONST_DOUBLE_HIGH (op));
219
      else
220
        return 0;
221
 
222
    case CONST_INT:
223
      if (HOST_BITS_PER_WIDE_INT == 32)
224
        return INTVAL (op) >= 0;
225
      else
226
        return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
227
 
228
    case SYMBOL_REF:
229
      /* For certain code models, the symbolic references are known to fit.  */
230
      /* TLS symbols are not constant.  */
231
      if (SYMBOL_REF_TLS_MODEL (op))
232
        return false;
233
      return (ix86_cmodel == CM_SMALL
234
              || (ix86_cmodel == CM_MEDIUM
235
                  && !SYMBOL_REF_FAR_ADDR_P (op)));
236
 
237
    case LABEL_REF:
238
      /* For certain code models, the code is near as well.  */
239
      return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
240
 
241
    case CONST:
242
      /* We also may accept the offsetted memory references in certain
243
         special cases.  */
244
      if (GET_CODE (XEXP (op, 0)) == PLUS)
245
        {
246
          rtx op1 = XEXP (XEXP (op, 0), 0);
247
          rtx op2 = XEXP (XEXP (op, 0), 1);
248
 
249
          if (ix86_cmodel == CM_LARGE)
250
            return 0;
251
          switch (GET_CODE (op1))
252
            {
253
            case SYMBOL_REF:
254
              /* TLS symbols are not constant.  */
255
              if (SYMBOL_REF_TLS_MODEL (op1))
256
                return 0;
257
              /* For small code model we may accept pretty large positive
258
                 offsets, since one bit is available for free.  Negative
259
                 offsets are limited by the size of NULL pointer area
260
                 specified by the ABI.  */
261
              if ((ix86_cmodel == CM_SMALL
262
                   || (ix86_cmodel == CM_MEDIUM
263
                       && !SYMBOL_REF_FAR_ADDR_P (op1)))
264
                  && GET_CODE (op2) == CONST_INT
265
                  && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
266
                  && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
267
                return 1;
268
              /* ??? For the kernel, we may accept adjustment of
269
                 -0x10000000, since we know that it will just convert
270
                 negative address space to positive, but perhaps this
271
                 is not worthwhile.  */
272
              break;
273
 
274
            case LABEL_REF:
275
              /* These conditions are similar to SYMBOL_REF ones, just the
276
                 constraints for code models differ.  */
277
              if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
278
                  && GET_CODE (op2) == CONST_INT
279
                  && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
280
                  && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
281
                return 1;
282
              break;
283
 
284
            default:
285
              return 0;
286
            }
287
        }
288
      break;
289
 
290
    default:
291
      gcc_unreachable ();
292
    }
293
  return 0;
294
})
295
 
296
;; Return nonzero if OP is general operand representable on x86_64.
297
(define_predicate "x86_64_general_operand"
298
  (if_then_else (match_test "TARGET_64BIT")
299
    (ior (match_operand 0 "nonimmediate_operand")
300
         (match_operand 0 "x86_64_immediate_operand"))
301
    (match_operand 0 "general_operand")))
302
 
303
;; Return nonzero if OP is general operand representable on x86_64
304
;; as either sign extended or zero extended constant.
305
(define_predicate "x86_64_szext_general_operand"
306
  (if_then_else (match_test "TARGET_64BIT")
307
    (ior (match_operand 0 "nonimmediate_operand")
308
         (ior (match_operand 0 "x86_64_immediate_operand")
309
              (match_operand 0 "x86_64_zext_immediate_operand")))
310
    (match_operand 0 "general_operand")))
311
 
312
;; Return nonzero if OP is nonmemory operand representable on x86_64.
313
(define_predicate "x86_64_nonmemory_operand"
314
  (if_then_else (match_test "TARGET_64BIT")
315
    (ior (match_operand 0 "register_operand")
316
         (match_operand 0 "x86_64_immediate_operand"))
317
    (match_operand 0 "nonmemory_operand")))
318
 
319
;; Return nonzero if OP is nonmemory operand representable on x86_64.
320
(define_predicate "x86_64_szext_nonmemory_operand"
321
  (if_then_else (match_test "TARGET_64BIT")
322
    (ior (match_operand 0 "register_operand")
323
         (ior (match_operand 0 "x86_64_immediate_operand")
324
              (match_operand 0 "x86_64_zext_immediate_operand")))
325
    (match_operand 0 "nonmemory_operand")))
326
 
327
;; Return true when operand is PIC expression that can be computed by lea
328
;; operation.
329
(define_predicate "pic_32bit_operand"
330
  (match_code "const,symbol_ref,label_ref")
331
{
332
  if (!flag_pic)
333
    return 0;
334
  /* Rule out relocations that translate into 64bit constants.  */
335
  if (TARGET_64BIT && GET_CODE (op) == CONST)
336
    {
337
      op = XEXP (op, 0);
338
      if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
339
        op = XEXP (op, 0);
340
      if (GET_CODE (op) == UNSPEC
341
          && (XINT (op, 1) == UNSPEC_GOTOFF
342
              || XINT (op, 1) == UNSPEC_GOT))
343
        return 0;
344
    }
345
  return symbolic_operand (op, mode);
346
})
347
 
348
 
349
;; Return nonzero if OP is nonmemory operand acceptable by movabs patterns.
350
(define_predicate "x86_64_movabs_operand"
351
  (if_then_else (match_test "!TARGET_64BIT || !flag_pic")
352
    (match_operand 0 "nonmemory_operand")
353
    (ior (match_operand 0 "register_operand")
354
         (and (match_operand 0 "const_double_operand")
355
              (match_test "GET_MODE_SIZE (mode) <= 8")))))
356
 
357
;; Returns nonzero if OP is either a symbol reference or a sum of a symbol
358
;; reference and a constant.
359
(define_predicate "symbolic_operand"
360
  (match_code "symbol_ref,label_ref,const")
361
{
362
  switch (GET_CODE (op))
363
    {
364
    case SYMBOL_REF:
365
    case LABEL_REF:
366
      return 1;
367
 
368
    case CONST:
369
      op = XEXP (op, 0);
370
      if (GET_CODE (op) == SYMBOL_REF
371
          || GET_CODE (op) == LABEL_REF
372
          || (GET_CODE (op) == UNSPEC
373
              && (XINT (op, 1) == UNSPEC_GOT
374
                  || XINT (op, 1) == UNSPEC_GOTOFF
375
                  || XINT (op, 1) == UNSPEC_GOTPCREL)))
376
        return 1;
377
      if (GET_CODE (op) != PLUS
378
          || GET_CODE (XEXP (op, 1)) != CONST_INT)
379
        return 0;
380
 
381
      op = XEXP (op, 0);
382
      if (GET_CODE (op) == SYMBOL_REF
383
          || GET_CODE (op) == LABEL_REF)
384
        return 1;
385
      /* Only @GOTOFF gets offsets.  */
386
      if (GET_CODE (op) != UNSPEC
387
          || XINT (op, 1) != UNSPEC_GOTOFF)
388
        return 0;
389
 
390
      op = XVECEXP (op, 0, 0);
391
      if (GET_CODE (op) == SYMBOL_REF
392
          || GET_CODE (op) == LABEL_REF)
393
        return 1;
394
      return 0;
395
 
396
    default:
397
      gcc_unreachable ();
398
    }
399
})
400
 
401
;; Return true if the operand contains a @GOT or @GOTOFF reference.
402
(define_predicate "pic_symbolic_operand"
403
  (match_code "const")
404
{
405
  op = XEXP (op, 0);
406
  if (TARGET_64BIT)
407
    {
408
      if (GET_CODE (op) == UNSPEC
409
          && XINT (op, 1) == UNSPEC_GOTPCREL)
410
        return 1;
411
      if (GET_CODE (op) == PLUS
412
          && GET_CODE (XEXP (op, 0)) == UNSPEC
413
          && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL)
414
        return 1;
415
    }
416
  else
417
    {
418
      if (GET_CODE (op) == UNSPEC)
419
        return 1;
420
      if (GET_CODE (op) != PLUS
421
          || GET_CODE (XEXP (op, 1)) != CONST_INT)
422
        return 0;
423
      op = XEXP (op, 0);
424
      if (GET_CODE (op) == UNSPEC)
425
        return 1;
426
    }
427
  return 0;
428
})
429
 
430
;; Return true if OP is a symbolic operand that resolves locally.
431
(define_predicate "local_symbolic_operand"
432
  (match_code "const,label_ref,symbol_ref")
433
{
434
  if (GET_CODE (op) == CONST
435
      && GET_CODE (XEXP (op, 0)) == PLUS
436
      && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
437
    op = XEXP (XEXP (op, 0), 0);
438
 
439
  if (GET_CODE (op) == LABEL_REF)
440
    return 1;
441
 
442
  if (GET_CODE (op) != SYMBOL_REF)
443
    return 0;
444
 
445
  if (SYMBOL_REF_LOCAL_P (op))
446
    return 1;
447
 
448
  /* There is, however, a not insubstantial body of code in the rest of
449
     the compiler that assumes it can just stick the results of
450
     ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done.  */
451
  /* ??? This is a hack.  Should update the body of the compiler to
452
     always create a DECL an invoke targetm.encode_section_info.  */
453
  if (strncmp (XSTR (op, 0), internal_label_prefix,
454
               internal_label_prefix_len) == 0)
455
    return 1;
456
 
457
  return 0;
458
})
459
 
460
;; Test for various thread-local symbols.
461
(define_predicate "tls_symbolic_operand"
462
  (and (match_code "symbol_ref")
463
       (match_test "SYMBOL_REF_TLS_MODEL (op) != 0")))
464
 
465
;; Test for a pc-relative call operand
466
(define_predicate "constant_call_address_operand"
467
  (ior (match_code "symbol_ref")
468
       (match_operand 0 "local_symbolic_operand")))
469
 
470
;; True for any non-virtual or eliminable register.  Used in places where
471
;; instantiation of such a register may cause the pattern to not be recognized.
472
(define_predicate "register_no_elim_operand"
473
  (match_operand 0 "register_operand")
474
{
475
  if (GET_CODE (op) == SUBREG)
476
    op = SUBREG_REG (op);
477
  return !(op == arg_pointer_rtx
478
           || op == frame_pointer_rtx
479
           || (REGNO (op) >= FIRST_PSEUDO_REGISTER
480
               && REGNO (op) <= LAST_VIRTUAL_REGISTER));
481
})
482
 
483
;; Similarly, but include the stack pointer.  This is used to prevent esp
484
;; from being used as an index reg.
485
(define_predicate "index_register_operand"
486
  (match_operand 0 "register_operand")
487
{
488
  if (GET_CODE (op) == SUBREG)
489
    op = SUBREG_REG (op);
490
  if (reload_in_progress || reload_completed)
491
    return REG_OK_FOR_INDEX_STRICT_P (op);
492
  else
493
    return REG_OK_FOR_INDEX_NONSTRICT_P (op);
494
})
495
 
496
;; Return false if this is any eliminable register.  Otherwise general_operand.
497
(define_predicate "general_no_elim_operand"
498
  (if_then_else (match_code "reg,subreg")
499
    (match_operand 0 "register_no_elim_operand")
500
    (match_operand 0 "general_operand")))
501
 
502
;; Return false if this is any eliminable register.  Otherwise
503
;; register_operand or a constant.
504
(define_predicate "nonmemory_no_elim_operand"
505
  (ior (match_operand 0 "register_no_elim_operand")
506
       (match_operand 0 "immediate_operand")))
507
 
508
;; Test for a valid operand for a call instruction.
509
(define_predicate "call_insn_operand"
510
  (ior (match_operand 0 "constant_call_address_operand")
511
       (ior (match_operand 0 "register_no_elim_operand")
512
            (match_operand 0 "memory_operand"))))
513
 
514
;; Similarly, but for tail calls, in which we cannot allow memory references.
515
(define_predicate "sibcall_insn_operand"
516
  (ior (match_operand 0 "constant_call_address_operand")
517
       (match_operand 0 "register_no_elim_operand")))
518
 
519
;; Match exactly zero.
520
(define_predicate "const0_operand"
521
  (match_code "const_int,const_double,const_vector")
522
{
523
  if (mode == VOIDmode)
524
    mode = GET_MODE (op);
525
  return op == CONST0_RTX (mode);
526
})
527
 
528
;; Match exactly one.
529
(define_predicate "const1_operand"
530
  (and (match_code "const_int")
531
       (match_test "op == const1_rtx")))
532
 
533
;; Match exactly eight.
534
(define_predicate "const8_operand"
535
  (and (match_code "const_int")
536
       (match_test "INTVAL (op) == 8")))
537
 
538
;; Match 2, 4, or 8.  Used for leal multiplicands.
539
(define_predicate "const248_operand"
540
  (match_code "const_int")
541
{
542
  HOST_WIDE_INT i = INTVAL (op);
543
  return i == 2 || i == 4 || i == 8;
544
})
545
 
546
;; Match 0 or 1.
547
(define_predicate "const_0_to_1_operand"
548
  (and (match_code "const_int")
549
       (match_test "op == const0_rtx || op == const1_rtx")))
550
 
551
;; Match 0 to 3.
552
(define_predicate "const_0_to_3_operand"
553
  (and (match_code "const_int")
554
       (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 3")))
555
 
556
;; Match 0 to 7.
557
(define_predicate "const_0_to_7_operand"
558
  (and (match_code "const_int")
559
       (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 7")))
560
 
561
;; Match 0 to 15.
562
(define_predicate "const_0_to_15_operand"
563
  (and (match_code "const_int")
564
       (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 15")))
565
 
566
;; Match 0 to 63.
567
(define_predicate "const_0_to_63_operand"
568
  (and (match_code "const_int")
569
       (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 63")))
570
 
571
;; Match 0 to 255.
572
(define_predicate "const_0_to_255_operand"
573
  (and (match_code "const_int")
574
       (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 255")))
575
 
576
;; Match (0 to 255) * 8
577
(define_predicate "const_0_to_255_mul_8_operand"
578
  (match_code "const_int")
579
{
580
  unsigned HOST_WIDE_INT val = INTVAL (op);
581
  return val <= 255*8 && val % 8 == 0;
582
})
583
 
584
;; Return nonzero if OP is CONST_INT >= 1 and <= 31 (a valid operand
585
;; for shift & compare patterns, as shifting by 0 does not change flags).
586
(define_predicate "const_1_to_31_operand"
587
  (and (match_code "const_int")
588
       (match_test "INTVAL (op) >= 1 && INTVAL (op) <= 31")))
589
 
590
;; Match 2 or 3.
591
(define_predicate "const_2_to_3_operand"
592
  (and (match_code "const_int")
593
       (match_test "INTVAL (op) == 2 || INTVAL (op) == 3")))
594
 
595
;; Match 4 to 7.
596
(define_predicate "const_4_to_7_operand"
597
  (and (match_code "const_int")
598
       (match_test "INTVAL (op) >= 4 && INTVAL (op) <= 7")))
599
 
600
;; Match exactly one bit in 4-bit mask.
601
(define_predicate "const_pow2_1_to_8_operand"
602
  (match_code "const_int")
603
{
604
  unsigned int log = exact_log2 (INTVAL (op));
605
  return log <= 3;
606
})
607
 
608
;; Match exactly one bit in 8-bit mask.
609
(define_predicate "const_pow2_1_to_128_operand"
610
  (match_code "const_int")
611
{
612
  unsigned int log = exact_log2 (INTVAL (op));
613
  return log <= 7;
614
})
615
 
616
;; True if this is a constant appropriate for an increment or decrement.
617
(define_predicate "incdec_operand"
618
  (match_code "const_int")
619
{
620
  /* On Pentium4, the inc and dec operations causes extra dependency on flag
621
     registers, since carry flag is not set.  */
622
  if ((TARGET_PENTIUM4 || TARGET_NOCONA) && !optimize_size)
623
    return 0;
624
  return op == const1_rtx || op == constm1_rtx;
625
})
626
 
627
;; True for registers, or 1 or -1.  Used to optimize double-word shifts.
628
(define_predicate "reg_or_pm1_operand"
629
  (ior (match_operand 0 "register_operand")
630
       (and (match_code "const_int")
631
            (match_test "op == const1_rtx || op == constm1_rtx"))))
632
 
633
;; True if OP is acceptable as operand of DImode shift expander.
634
(define_predicate "shiftdi_operand"
635
  (if_then_else (match_test "TARGET_64BIT")
636
    (match_operand 0 "nonimmediate_operand")
637
    (match_operand 0 "register_operand")))
638
 
639
(define_predicate "ashldi_input_operand"
640
  (if_then_else (match_test "TARGET_64BIT")
641
    (match_operand 0 "nonimmediate_operand")
642
    (match_operand 0 "reg_or_pm1_operand")))
643
 
644
;; Return true if OP is a vector load from the constant pool with just
645
;; the first element nonzero.
646
(define_predicate "zero_extended_scalar_load_operand"
647
  (match_code "mem")
648
{
649
  unsigned n_elts;
650
  op = maybe_get_pool_constant (op);
651
  if (!op)
652
    return 0;
653
  if (GET_CODE (op) != CONST_VECTOR)
654
    return 0;
655
  n_elts =
656
    (GET_MODE_SIZE (GET_MODE (op)) /
657
     GET_MODE_SIZE (GET_MODE_INNER (GET_MODE (op))));
658
  for (n_elts--; n_elts > 0; n_elts--)
659
    {
660
      rtx elt = CONST_VECTOR_ELT (op, n_elts);
661
      if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
662
        return 0;
663
    }
664
  return 1;
665
})
666
 
667
;; Return 1 when OP is operand acceptable for standard SSE move.
668
(define_predicate "vector_move_operand"
669
  (ior (match_operand 0 "nonimmediate_operand")
670
       (match_operand 0 "const0_operand")))
671
 
672
;; Return true if OP is a register or a zero.
673
(define_predicate "reg_or_0_operand"
674
  (ior (match_operand 0 "register_operand")
675
       (match_operand 0 "const0_operand")))
676
 
677
;; Return true if op if a valid address, and does not contain
678
;; a segment override.
679
(define_special_predicate "no_seg_address_operand"
680
  (match_operand 0 "address_operand")
681
{
682
  struct ix86_address parts;
683
  int ok;
684
 
685
  ok = ix86_decompose_address (op, &parts);
686
  gcc_assert (ok);
687
  return parts.seg == SEG_DEFAULT;
688
})
689
 
690
;; Return nonzero if the rtx is known to be at least 32 bits aligned.
691
(define_predicate "aligned_operand"
692
  (match_operand 0 "general_operand")
693
{
694
  struct ix86_address parts;
695
  int ok;
696
 
697
  /* Registers and immediate operands are always "aligned".  */
698
  if (GET_CODE (op) != MEM)
699
    return 1;
700
 
701
  /* Don't even try to do any aligned optimizations with volatiles.  */
702
  if (MEM_VOLATILE_P (op))
703
    return 0;
704
 
705
  if (MEM_ALIGN (op) >= 32)
706
    return 1;
707
 
708
  op = XEXP (op, 0);
709
 
710
  /* Pushes and pops are only valid on the stack pointer.  */
711
  if (GET_CODE (op) == PRE_DEC
712
      || GET_CODE (op) == POST_INC)
713
    return 1;
714
 
715
  /* Decode the address.  */
716
  ok = ix86_decompose_address (op, &parts);
717
  gcc_assert (ok);
718
 
719
  /* Look for some component that isn't known to be aligned.  */
720
  if (parts.index)
721
    {
722
      if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
723
        return 0;
724
    }
725
  if (parts.base)
726
    {
727
      if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
728
        return 0;
729
    }
730
  if (parts.disp)
731
    {
732
      if (GET_CODE (parts.disp) != CONST_INT
733
          || (INTVAL (parts.disp) & 3) != 0)
734
        return 0;
735
    }
736
 
737
  /* Didn't find one -- this must be an aligned address.  */
738
  return 1;
739
})
740
 
741
;; Returns 1 if OP is memory operand with a displacement.
742
(define_predicate "memory_displacement_operand"
743
  (match_operand 0 "memory_operand")
744
{
745
  struct ix86_address parts;
746
  int ok;
747
 
748
  ok = ix86_decompose_address (XEXP (op, 0), &parts);
749
  gcc_assert (ok);
750
  return parts.disp != NULL_RTX;
751
})
752
 
753
;; Returns 1 if OP is memory operand that cannot be represented
754
;; by the modRM array.
755
(define_predicate "long_memory_operand"
756
  (and (match_operand 0 "memory_operand")
757
       (match_test "memory_address_length (op) != 0")))
758
 
759
;; Return 1 if OP is a comparison operator that can be issued by fcmov.
760
(define_predicate "fcmov_comparison_operator"
761
  (match_operand 0 "comparison_operator")
762
{
763
  enum machine_mode inmode = GET_MODE (XEXP (op, 0));
764
  enum rtx_code code = GET_CODE (op);
765
 
766
  if (inmode == CCFPmode || inmode == CCFPUmode)
767
    {
768
      enum rtx_code second_code, bypass_code;
769
      ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
770
      if (bypass_code != UNKNOWN || second_code != UNKNOWN)
771
        return 0;
772
      code = ix86_fp_compare_code_to_integer (code);
773
    }
774
  /* i387 supports just limited amount of conditional codes.  */
775
  switch (code)
776
    {
777
    case LTU: case GTU: case LEU: case GEU:
778
      if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode)
779
        return 1;
780
      return 0;
781
    case ORDERED: case UNORDERED:
782
    case EQ: case NE:
783
      return 1;
784
    default:
785
      return 0;
786
    }
787
})
788
 
789
;; Return 1 if OP is a comparison that can be used in the CMPSS/CMPPS insns.
790
;; The first set are supported directly; the second set can't be done with
791
;; full IEEE support, i.e. NaNs.
792
;;
793
;; ??? It would seem that we have a lot of uses of this predicate that pass
794
;; it the wrong mode.  We got away with this because the old function didn't
795
;; check the mode at all.  Mirror that for now by calling this a special
796
;; predicate.
797
 
798
(define_special_predicate "sse_comparison_operator"
799
  (match_code "eq,lt,le,unordered,ne,unge,ungt,ordered"))
800
 
801
;; Return 1 if OP is a valid comparison operator in valid mode.
802
(define_predicate "ix86_comparison_operator"
803
  (match_operand 0 "comparison_operator")
804
{
805
  enum machine_mode inmode = GET_MODE (XEXP (op, 0));
806
  enum rtx_code code = GET_CODE (op);
807
 
808
  if (inmode == CCFPmode || inmode == CCFPUmode)
809
    {
810
      enum rtx_code second_code, bypass_code;
811
      ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
812
      return (bypass_code == UNKNOWN && second_code == UNKNOWN);
813
    }
814
  switch (code)
815
    {
816
    case EQ: case NE:
817
      return 1;
818
    case LT: case GE:
819
      if (inmode == CCmode || inmode == CCGCmode
820
          || inmode == CCGOCmode || inmode == CCNOmode)
821
        return 1;
822
      return 0;
823
    case LTU: case GTU: case LEU: case ORDERED: case UNORDERED: case GEU:
824
      if (inmode == CCmode)
825
        return 1;
826
      return 0;
827
    case GT: case LE:
828
      if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
829
        return 1;
830
      return 0;
831
    default:
832
      return 0;
833
    }
834
})
835
 
836
;; Return 1 if OP is a valid comparison operator testing carry flag to be set.
837
(define_predicate "ix86_carry_flag_operator"
838
  (match_code "ltu,lt,unlt,gt,ungt,le,unle,ge,unge,ltgt,uneq")
839
{
840
  enum machine_mode inmode = GET_MODE (XEXP (op, 0));
841
  enum rtx_code code = GET_CODE (op);
842
 
843
  if (GET_CODE (XEXP (op, 0)) != REG
844
      || REGNO (XEXP (op, 0)) != FLAGS_REG
845
      || XEXP (op, 1) != const0_rtx)
846
    return 0;
847
 
848
  if (inmode == CCFPmode || inmode == CCFPUmode)
849
    {
850
      enum rtx_code second_code, bypass_code;
851
      ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
852
      if (bypass_code != UNKNOWN || second_code != UNKNOWN)
853
        return 0;
854
      code = ix86_fp_compare_code_to_integer (code);
855
    }
856
  else if (inmode != CCmode)
857
    return 0;
858
 
859
  return code == LTU;
860
})
861
 
862
;; Nearly general operand, but accept any const_double, since we wish
863
;; to be able to drop them into memory rather than have them get pulled
864
;; into registers.
865
(define_predicate "cmp_fp_expander_operand"
866
  (ior (match_code "const_double")
867
       (match_operand 0 "general_operand")))
868
 
869
;; Return true if this is a valid binary floating-point operation.
870
(define_predicate "binary_fp_operator"
871
  (match_code "plus,minus,mult,div"))
872
 
873
;; Return true if this is a multiply operation.
874
(define_predicate "mult_operator"
875
  (match_code "mult"))
876
 
877
;; Return true if this is a division operation.
878
(define_predicate "div_operator"
879
  (match_code "div"))
880
 
881
;; Return true if this is a float extend operation.
882
(define_predicate "float_operator"
883
  (match_code "float"))
884
 
885
;; Return true for ARITHMETIC_P.
886
(define_predicate "arith_or_logical_operator"
887
  (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
888
               mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
889
 
890
;; Return 1 if OP is a binary operator that can be promoted to wider mode.
891
;; Modern CPUs have same latency for HImode and SImode multiply,
892
;; but 386 and 486 do HImode multiply faster.  */
893
(define_predicate "promotable_binary_operator"
894
  (ior (match_code "plus,and,ior,xor,ashift")
895
       (and (match_code "mult")
896
            (match_test "ix86_tune > PROCESSOR_I486"))))
897
 
898
;; To avoid problems when jump re-emits comparisons like testqi_ext_ccno_0,
899
;; re-recognize the operand to avoid a copy_to_mode_reg that will fail.
900
;;
901
;; ??? It seems likely that this will only work because cmpsi is an
902
;; expander, and no actual insns use this.
903
 
904
(define_predicate "cmpsi_operand_1"
905
  (match_code "and")
906
{
907
  return (GET_MODE (op) == SImode
908
          && GET_CODE (XEXP (op, 0)) == ZERO_EXTRACT
909
          && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT
910
          && GET_CODE (XEXP (XEXP (op, 0), 2)) == CONST_INT
911
          && INTVAL (XEXP (XEXP (op, 0), 1)) == 8
912
          && INTVAL (XEXP (XEXP (op, 0), 2)) == 8
913
          && GET_CODE (XEXP (op, 1)) == CONST_INT);
914
})
915
 
916
(define_predicate "cmpsi_operand"
917
  (ior (match_operand 0 "nonimmediate_operand")
918
       (match_operand 0 "cmpsi_operand_1")))
919
 
920
(define_predicate "compare_operator"
921
  (match_code "compare"))
922
 
923
(define_predicate "absneg_operator"
924
  (match_code "abs,neg"))

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.