OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [config/] [i386/] [predicates.md] - Blame information for rev 868

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
;; Predicate definitions for IA-32 and x86-64.
2
;; Copyright (C) 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3
;;
4
;; This file is part of GCC.
5
;;
6
;; GCC is free software; you can redistribute it and/or modify
7
;; it under the terms of the GNU General Public License as published by
8
;; the Free Software Foundation; either version 3, or (at your option)
9
;; any later version.
10
;;
11
;; GCC is distributed in the hope that it will be useful,
12
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
;; GNU General Public License for more details.
15
;;
16
;; You should have received a copy of the GNU General Public License
17
;; along with GCC; see the file COPYING3.  If not see
18
;; .
19
 
20
;; Return nonzero if OP is either a i387 or SSE fp register.
21
(define_predicate "any_fp_register_operand"
22
  (and (match_code "reg")
23
       (match_test "ANY_FP_REGNO_P (REGNO (op))")))
24
 
25
;; Return nonzero if OP is an i387 fp register.
26
(define_predicate "fp_register_operand"
27
  (and (match_code "reg")
28
       (match_test "FP_REGNO_P (REGNO (op))")))
29
 
30
;; Return nonzero if OP is a non-fp register_operand.
31
(define_predicate "register_and_not_any_fp_reg_operand"
32
  (and (match_code "reg")
33
       (not (match_test "ANY_FP_REGNO_P (REGNO (op))"))))
34
 
35
;; Return nonzero if OP is a register operand other than an i387 fp register.
36
(define_predicate "register_and_not_fp_reg_operand"
37
  (and (match_code "reg")
38
       (not (match_test "FP_REGNO_P (REGNO (op))"))))
39
 
40
;; True if the operand is an MMX register.
41
(define_predicate "mmx_reg_operand"
42
  (and (match_code "reg")
43
       (match_test "MMX_REGNO_P (REGNO (op))")))
44
 
45
;; True if the operand is a Q_REGS class register.
46
(define_predicate "q_regs_operand"
47
  (match_operand 0 "register_operand")
48
{
49
  if (GET_CODE (op) == SUBREG)
50
    op = SUBREG_REG (op);
51
  return ANY_QI_REG_P (op);
52
})
53
 
54
;; Return true if op is a NON_Q_REGS class register.
55
(define_predicate "non_q_regs_operand"
56
  (match_operand 0 "register_operand")
57
{
58
  if (GET_CODE (op) == SUBREG)
59
    op = SUBREG_REG (op);
60
  return NON_QI_REG_P (op);
61
})
62
 
63
;; Match an SI or HImode register for a zero_extract.
64
(define_special_predicate "ext_register_operand"
65
  (match_operand 0 "register_operand")
66
{
67
  if ((!TARGET_64BIT || GET_MODE (op) != DImode)
68
      && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
69
    return 0;
70
  if (GET_CODE (op) == SUBREG)
71
    op = SUBREG_REG (op);
72
 
73
  /* Be careful to accept only registers having upper parts.  */
74
  return REGNO (op) > LAST_VIRTUAL_REGISTER || REGNO (op) < 4;
75
})
76
 
77
;; Return true if op is the AX register.
78
(define_predicate "ax_reg_operand"
79
  (and (match_code "reg")
80
       (match_test "REGNO (op) == 0")))
81
 
82
;; Return true if op is the flags register.
83
(define_predicate "flags_reg_operand"
84
  (and (match_code "reg")
85
       (match_test "REGNO (op) == FLAGS_REG")))
86
 
87
;; Return 1 if VALUE can be stored in a sign extended immediate field.
88
(define_predicate "x86_64_immediate_operand"
89
  (match_code "const_int,symbol_ref,label_ref,const")
90
{
91
  if (!TARGET_64BIT)
92
    return immediate_operand (op, mode);
93
 
94
  switch (GET_CODE (op))
95
    {
96
    case CONST_INT:
97
      /* CONST_DOUBLES never match, since HOST_BITS_PER_WIDE_INT is known
98
         to be at least 32 and this all acceptable constants are
99
         represented as CONST_INT.  */
100
      if (HOST_BITS_PER_WIDE_INT == 32)
101
        return 1;
102
      else
103
        {
104
          HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode);
105
          return trunc_int_for_mode (val, SImode) == val;
106
        }
107
      break;
108
 
109
    case SYMBOL_REF:
110
      /* For certain code models, the symbolic references are known to fit.
111
         in CM_SMALL_PIC model we know it fits if it is local to the shared
112
         library.  Don't count TLS SYMBOL_REFs here, since they should fit
113
         only if inside of UNSPEC handled below.  */
114
      /* TLS symbols are not constant.  */
115
      if (SYMBOL_REF_TLS_MODEL (op))
116
        return false;
117
      return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
118
              || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
119
 
120
    case LABEL_REF:
121
      /* For certain code models, the code is near as well.  */
122
      return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
123
              || ix86_cmodel == CM_KERNEL);
124
 
125
    case CONST:
126
      /* We also may accept the offsetted memory references in certain
127
         special cases.  */
128
      if (GET_CODE (XEXP (op, 0)) == UNSPEC)
129
        switch (XINT (XEXP (op, 0), 1))
130
          {
131
          case UNSPEC_GOTPCREL:
132
          case UNSPEC_DTPOFF:
133
          case UNSPEC_GOTNTPOFF:
134
          case UNSPEC_NTPOFF:
135
            return 1;
136
          default:
137
            break;
138
          }
139
 
140
      if (GET_CODE (XEXP (op, 0)) == PLUS)
141
        {
142
          rtx op1 = XEXP (XEXP (op, 0), 0);
143
          rtx op2 = XEXP (XEXP (op, 0), 1);
144
          HOST_WIDE_INT offset;
145
 
146
          if (ix86_cmodel == CM_LARGE)
147
            return 0;
148
          if (GET_CODE (op2) != CONST_INT)
149
            return 0;
150
          offset = trunc_int_for_mode (INTVAL (op2), DImode);
151
          switch (GET_CODE (op1))
152
            {
153
            case SYMBOL_REF:
154
              /* TLS symbols are not constant.  */
155
              if (SYMBOL_REF_TLS_MODEL (op1))
156
                return 0;
157
              /* For CM_SMALL assume that latest object is 16MB before
158
                 end of 31bits boundary.  We may also accept pretty
159
                 large negative constants knowing that all objects are
160
                 in the positive half of address space.  */
161
              if ((ix86_cmodel == CM_SMALL
162
                   || (ix86_cmodel == CM_MEDIUM
163
                       && !SYMBOL_REF_FAR_ADDR_P (op1)))
164
                  && offset < 16*1024*1024
165
                  && trunc_int_for_mode (offset, SImode) == offset)
166
                return 1;
167
              /* For CM_KERNEL we know that all object resist in the
168
                 negative half of 32bits address space.  We may not
169
                 accept negative offsets, since they may be just off
170
                 and we may accept pretty large positive ones.  */
171
              if (ix86_cmodel == CM_KERNEL
172
                  && offset > 0
173
                  && trunc_int_for_mode (offset, SImode) == offset)
174
                return 1;
175
              break;
176
 
177
            case LABEL_REF:
178
              /* These conditions are similar to SYMBOL_REF ones, just the
179
                 constraints for code models differ.  */
180
              if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
181
                  && offset < 16*1024*1024
182
                  && trunc_int_for_mode (offset, SImode) == offset)
183
                return 1;
184
              if (ix86_cmodel == CM_KERNEL
185
                  && offset > 0
186
                  && trunc_int_for_mode (offset, SImode) == offset)
187
                return 1;
188
              break;
189
 
190
            case UNSPEC:
191
              switch (XINT (op1, 1))
192
                {
193
                case UNSPEC_DTPOFF:
194
                case UNSPEC_NTPOFF:
195
                  if (offset > 0
196
                      && trunc_int_for_mode (offset, SImode) == offset)
197
                    return 1;
198
                }
199
              break;
200
 
201
            default:
202
              break;
203
            }
204
        }
205
      break;
206
 
207
      default:
208
        gcc_unreachable ();
209
    }
210
 
211
  return 0;
212
})
213
 
214
;; Return 1 if VALUE can be stored in the zero extended immediate field.
215
(define_predicate "x86_64_zext_immediate_operand"
216
  (match_code "const_double,const_int,symbol_ref,label_ref,const")
217
{
218
  switch (GET_CODE (op))
219
    {
220
    case CONST_DOUBLE:
221
      if (HOST_BITS_PER_WIDE_INT == 32)
222
        return (GET_MODE (op) == VOIDmode && !CONST_DOUBLE_HIGH (op));
223
      else
224
        return 0;
225
 
226
    case CONST_INT:
227
      if (HOST_BITS_PER_WIDE_INT == 32)
228
        return INTVAL (op) >= 0;
229
      else
230
        return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
231
 
232
    case SYMBOL_REF:
233
      /* For certain code models, the symbolic references are known to fit.  */
234
      /* TLS symbols are not constant.  */
235
      if (SYMBOL_REF_TLS_MODEL (op))
236
        return false;
237
      return (ix86_cmodel == CM_SMALL
238
              || (ix86_cmodel == CM_MEDIUM
239
                  && !SYMBOL_REF_FAR_ADDR_P (op)));
240
 
241
    case LABEL_REF:
242
      /* For certain code models, the code is near as well.  */
243
      return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
244
 
245
    case CONST:
246
      /* We also may accept the offsetted memory references in certain
247
         special cases.  */
248
      if (GET_CODE (XEXP (op, 0)) == PLUS)
249
        {
250
          rtx op1 = XEXP (XEXP (op, 0), 0);
251
          rtx op2 = XEXP (XEXP (op, 0), 1);
252
 
253
          if (ix86_cmodel == CM_LARGE)
254
            return 0;
255
          switch (GET_CODE (op1))
256
            {
257
            case SYMBOL_REF:
258
              /* TLS symbols are not constant.  */
259
              if (SYMBOL_REF_TLS_MODEL (op1))
260
                return 0;
261
              /* For small code model we may accept pretty large positive
262
                 offsets, since one bit is available for free.  Negative
263
                 offsets are limited by the size of NULL pointer area
264
                 specified by the ABI.  */
265
              if ((ix86_cmodel == CM_SMALL
266
                   || (ix86_cmodel == CM_MEDIUM
267
                       && !SYMBOL_REF_FAR_ADDR_P (op1)))
268
                  && GET_CODE (op2) == CONST_INT
269
                  && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
270
                  && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
271
                return 1;
272
              /* ??? For the kernel, we may accept adjustment of
273
                 -0x10000000, since we know that it will just convert
274
                 negative address space to positive, but perhaps this
275
                 is not worthwhile.  */
276
              break;
277
 
278
            case LABEL_REF:
279
              /* These conditions are similar to SYMBOL_REF ones, just the
280
                 constraints for code models differ.  */
281
              if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
282
                  && GET_CODE (op2) == CONST_INT
283
                  && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
284
                  && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
285
                return 1;
286
              break;
287
 
288
            default:
289
              return 0;
290
            }
291
        }
292
      break;
293
 
294
    default:
295
      gcc_unreachable ();
296
    }
297
  return 0;
298
})
299
 
300
;; Return nonzero if OP is general operand representable on x86_64.
301
(define_predicate "x86_64_general_operand"
302
  (if_then_else (match_test "TARGET_64BIT")
303
    (ior (match_operand 0 "nonimmediate_operand")
304
         (match_operand 0 "x86_64_immediate_operand"))
305
    (match_operand 0 "general_operand")))
306
 
307
;; Return nonzero if OP is general operand representable on x86_64
308
;; as either sign extended or zero extended constant.
309
(define_predicate "x86_64_szext_general_operand"
310
  (if_then_else (match_test "TARGET_64BIT")
311
    (ior (match_operand 0 "nonimmediate_operand")
312
         (ior (match_operand 0 "x86_64_immediate_operand")
313
              (match_operand 0 "x86_64_zext_immediate_operand")))
314
    (match_operand 0 "general_operand")))
315
 
316
;; Return nonzero if OP is nonmemory operand representable on x86_64.
317
(define_predicate "x86_64_nonmemory_operand"
318
  (if_then_else (match_test "TARGET_64BIT")
319
    (ior (match_operand 0 "register_operand")
320
         (match_operand 0 "x86_64_immediate_operand"))
321
    (match_operand 0 "nonmemory_operand")))
322
 
323
;; Return nonzero if OP is nonmemory operand representable on x86_64.
324
(define_predicate "x86_64_szext_nonmemory_operand"
325
  (if_then_else (match_test "TARGET_64BIT")
326
    (ior (match_operand 0 "register_operand")
327
         (ior (match_operand 0 "x86_64_immediate_operand")
328
              (match_operand 0 "x86_64_zext_immediate_operand")))
329
    (match_operand 0 "nonmemory_operand")))
330
 
331
;; Return true when operand is PIC expression that can be computed by lea
332
;; operation.
333
(define_predicate "pic_32bit_operand"
334
  (match_code "const,symbol_ref,label_ref")
335
{
336
  if (!flag_pic)
337
    return 0;
338
  /* Rule out relocations that translate into 64bit constants.  */
339
  if (TARGET_64BIT && GET_CODE (op) == CONST)
340
    {
341
      op = XEXP (op, 0);
342
      if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
343
        op = XEXP (op, 0);
344
      if (GET_CODE (op) == UNSPEC
345
          && (XINT (op, 1) == UNSPEC_GOTOFF
346
              || XINT (op, 1) == UNSPEC_GOT))
347
        return 0;
348
    }
349
  return symbolic_operand (op, mode);
350
})
351
 
352
 
353
;; Return nonzero if OP is nonmemory operand acceptable by movabs patterns.
354
(define_predicate "x86_64_movabs_operand"
355
  (if_then_else (match_test "!TARGET_64BIT || !flag_pic")
356
    (match_operand 0 "nonmemory_operand")
357
    (ior (match_operand 0 "register_operand")
358
         (and (match_operand 0 "const_double_operand")
359
              (match_test "GET_MODE_SIZE (mode) <= 8")))))
360
 
361
;; Returns nonzero if OP is either a symbol reference or a sum of a symbol
362
;; reference and a constant.
363
(define_predicate "symbolic_operand"
364
  (match_code "symbol_ref,label_ref,const")
365
{
366
  switch (GET_CODE (op))
367
    {
368
    case SYMBOL_REF:
369
    case LABEL_REF:
370
      return 1;
371
 
372
    case CONST:
373
      op = XEXP (op, 0);
374
      if (GET_CODE (op) == SYMBOL_REF
375
          || GET_CODE (op) == LABEL_REF
376
          || (GET_CODE (op) == UNSPEC
377
              && (XINT (op, 1) == UNSPEC_GOT
378
                  || XINT (op, 1) == UNSPEC_GOTOFF
379
                  || XINT (op, 1) == UNSPEC_GOTPCREL)))
380
        return 1;
381
      if (GET_CODE (op) != PLUS
382
          || GET_CODE (XEXP (op, 1)) != CONST_INT)
383
        return 0;
384
 
385
      op = XEXP (op, 0);
386
      if (GET_CODE (op) == SYMBOL_REF
387
          || GET_CODE (op) == LABEL_REF)
388
        return 1;
389
      /* Only @GOTOFF gets offsets.  */
390
      if (GET_CODE (op) != UNSPEC
391
          || XINT (op, 1) != UNSPEC_GOTOFF)
392
        return 0;
393
 
394
      op = XVECEXP (op, 0, 0);
395
      if (GET_CODE (op) == SYMBOL_REF
396
          || GET_CODE (op) == LABEL_REF)
397
        return 1;
398
      return 0;
399
 
400
    default:
401
      gcc_unreachable ();
402
    }
403
})
404
 
405
;; Return true if the operand contains a @GOT or @GOTOFF reference.
406
(define_predicate "pic_symbolic_operand"
407
  (match_code "const")
408
{
409
  op = XEXP (op, 0);
410
  if (TARGET_64BIT)
411
    {
412
      if (GET_CODE (op) == UNSPEC
413
          && XINT (op, 1) == UNSPEC_GOTPCREL)
414
        return 1;
415
      if (GET_CODE (op) == PLUS
416
          && GET_CODE (XEXP (op, 0)) == UNSPEC
417
          && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL)
418
        return 1;
419
    }
420
  else
421
    {
422
      if (GET_CODE (op) == UNSPEC)
423
        return 1;
424
      if (GET_CODE (op) != PLUS
425
          || GET_CODE (XEXP (op, 1)) != CONST_INT)
426
        return 0;
427
      op = XEXP (op, 0);
428
      if (GET_CODE (op) == UNSPEC)
429
        return 1;
430
    }
431
  return 0;
432
})
433
 
434
;; Return true if OP is a symbolic operand that resolves locally.
435
(define_predicate "local_symbolic_operand"
436
  (match_code "const,label_ref,symbol_ref")
437
{
438
  if (GET_CODE (op) == CONST
439
      && GET_CODE (XEXP (op, 0)) == PLUS
440
      && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
441
    op = XEXP (XEXP (op, 0), 0);
442
 
443
  if (GET_CODE (op) == LABEL_REF)
444
    return 1;
445
 
446
  if (GET_CODE (op) != SYMBOL_REF)
447
    return 0;
448
 
449
  if (SYMBOL_REF_TLS_MODEL (op) != 0)
450
    return 0;
451
 
452
  if (SYMBOL_REF_LOCAL_P (op))
453
    return 1;
454
 
455
  /* There is, however, a not insubstantial body of code in the rest of
456
     the compiler that assumes it can just stick the results of
457
     ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done.  */
458
  /* ??? This is a hack.  Should update the body of the compiler to
459
     always create a DECL an invoke targetm.encode_section_info.  */
460
  if (strncmp (XSTR (op, 0), internal_label_prefix,
461
               internal_label_prefix_len) == 0)
462
    return 1;
463
 
464
  return 0;
465
})
466
 
467
;; Test for various thread-local symbols.
468
(define_predicate "tls_symbolic_operand"
469
  (and (match_code "symbol_ref")
470
       (match_test "SYMBOL_REF_TLS_MODEL (op) != 0")))
471
 
472
(define_predicate "tls_modbase_operand"
473
  (and (match_code "symbol_ref")
474
       (match_test "op == ix86_tls_module_base ()")))
475
 
476
(define_predicate "tp_or_register_operand"
477
  (ior (match_operand 0 "register_operand")
478
       (and (match_code "unspec")
479
            (match_test "XINT (op, 1) == UNSPEC_TP"))))
480
 
481
;; Test for a pc-relative call operand
482
(define_predicate "constant_call_address_operand"
483
  (ior (match_code "symbol_ref")
484
       (match_operand 0 "local_symbolic_operand")))
485
 
486
;; True for any non-virtual or eliminable register.  Used in places where
487
;; instantiation of such a register may cause the pattern to not be recognized.
488
(define_predicate "register_no_elim_operand"
489
  (match_operand 0 "register_operand")
490
{
491
  if (GET_CODE (op) == SUBREG)
492
    op = SUBREG_REG (op);
493
  return !(op == arg_pointer_rtx
494
           || op == frame_pointer_rtx
495
           || (REGNO (op) >= FIRST_PSEUDO_REGISTER
496
               && REGNO (op) <= LAST_VIRTUAL_REGISTER));
497
})
498
 
499
;; Similarly, but include the stack pointer.  This is used to prevent esp
500
;; from being used as an index reg.
501
(define_predicate "index_register_operand"
502
  (match_operand 0 "register_operand")
503
{
504
  if (GET_CODE (op) == SUBREG)
505
    op = SUBREG_REG (op);
506
  if (reload_in_progress || reload_completed)
507
    return REG_OK_FOR_INDEX_STRICT_P (op);
508
  else
509
    return REG_OK_FOR_INDEX_NONSTRICT_P (op);
510
})
511
 
512
;; Return false if this is any eliminable register.  Otherwise general_operand.
513
(define_predicate "general_no_elim_operand"
514
  (if_then_else (match_code "reg,subreg")
515
    (match_operand 0 "register_no_elim_operand")
516
    (match_operand 0 "general_operand")))
517
 
518
;; Return false if this is any eliminable register.  Otherwise
519
;; register_operand or a constant.
520
(define_predicate "nonmemory_no_elim_operand"
521
  (ior (match_operand 0 "register_no_elim_operand")
522
       (match_operand 0 "immediate_operand")))
523
 
524
;; Test for a valid operand for a call instruction.
525
(define_predicate "call_insn_operand"
526
  (ior (match_operand 0 "constant_call_address_operand")
527
       (ior (match_operand 0 "register_no_elim_operand")
528
            (match_operand 0 "memory_operand"))))
529
 
530
;; Similarly, but for tail calls, in which we cannot allow memory references.
531
(define_predicate "sibcall_insn_operand"
532
  (ior (match_operand 0 "constant_call_address_operand")
533
       (match_operand 0 "register_no_elim_operand")))
534
 
535
;; Match exactly zero.
536
(define_predicate "const0_operand"
537
  (match_code "const_int,const_double,const_vector")
538
{
539
  if (mode == VOIDmode)
540
    mode = GET_MODE (op);
541
  return op == CONST0_RTX (mode);
542
})
543
 
544
;; Match exactly one.
545
(define_predicate "const1_operand"
546
  (and (match_code "const_int")
547
       (match_test "op == const1_rtx")))
548
 
549
;; Match exactly eight.
550
(define_predicate "const8_operand"
551
  (and (match_code "const_int")
552
       (match_test "INTVAL (op) == 8")))
553
 
554
;; Match 2, 4, or 8.  Used for leal multiplicands.
555
(define_predicate "const248_operand"
556
  (match_code "const_int")
557
{
558
  HOST_WIDE_INT i = INTVAL (op);
559
  return i == 2 || i == 4 || i == 8;
560
})
561
 
562
;; Match 0 or 1.
563
(define_predicate "const_0_to_1_operand"
564
  (and (match_code "const_int")
565
       (match_test "op == const0_rtx || op == const1_rtx")))
566
 
567
;; Match 0 to 3.
568
(define_predicate "const_0_to_3_operand"
569
  (and (match_code "const_int")
570
       (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 3")))
571
 
572
;; Match 0 to 7.
573
(define_predicate "const_0_to_7_operand"
574
  (and (match_code "const_int")
575
       (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 7")))
576
 
577
;; Match 0 to 15.
578
(define_predicate "const_0_to_15_operand"
579
  (and (match_code "const_int")
580
       (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 15")))
581
 
582
;; Match 0 to 63.
583
(define_predicate "const_0_to_63_operand"
584
  (and (match_code "const_int")
585
       (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 63")))
586
 
587
;; Match 0 to 255.
588
(define_predicate "const_0_to_255_operand"
589
  (and (match_code "const_int")
590
       (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 255")))
591
 
592
;; Match (0 to 255) * 8
593
(define_predicate "const_0_to_255_mul_8_operand"
594
  (match_code "const_int")
595
{
596
  unsigned HOST_WIDE_INT val = INTVAL (op);
597
  return val <= 255*8 && val % 8 == 0;
598
})
599
 
600
;; Return nonzero if OP is CONST_INT >= 1 and <= 31 (a valid operand
601
;; for shift & compare patterns, as shifting by 0 does not change flags).
602
(define_predicate "const_1_to_31_operand"
603
  (and (match_code "const_int")
604
       (match_test "INTVAL (op) >= 1 && INTVAL (op) <= 31")))
605
 
606
;; Match 2 or 3.
607
(define_predicate "const_2_to_3_operand"
608
  (and (match_code "const_int")
609
       (match_test "INTVAL (op) == 2 || INTVAL (op) == 3")))
610
 
611
;; Match 4 to 7.
612
(define_predicate "const_4_to_7_operand"
613
  (and (match_code "const_int")
614
       (match_test "INTVAL (op) >= 4 && INTVAL (op) <= 7")))
615
 
616
;; Match exactly one bit in 4-bit mask.
617
(define_predicate "const_pow2_1_to_8_operand"
618
  (match_code "const_int")
619
{
620
  unsigned int log = exact_log2 (INTVAL (op));
621
  return log <= 3;
622
})
623
 
624
;; Match exactly one bit in 8-bit mask.
625
(define_predicate "const_pow2_1_to_128_operand"
626
  (match_code "const_int")
627
{
628
  unsigned int log = exact_log2 (INTVAL (op));
629
  return log <= 7;
630
})
631
 
632
;; True if this is a constant appropriate for an increment or decrement.
633
(define_predicate "incdec_operand"
634
  (match_code "const_int")
635
{
636
  /* On Pentium4, the inc and dec operations causes extra dependency on flag
637
     registers, since carry flag is not set.  */
638
  if (!TARGET_USE_INCDEC && !optimize_size)
639
    return 0;
640
  return op == const1_rtx || op == constm1_rtx;
641
})
642
 
643
;; True for registers, or 1 or -1.  Used to optimize double-word shifts.
644
(define_predicate "reg_or_pm1_operand"
645
  (ior (match_operand 0 "register_operand")
646
       (and (match_code "const_int")
647
            (match_test "op == const1_rtx || op == constm1_rtx"))))
648
 
649
;; True if OP is acceptable as operand of DImode shift expander.
650
(define_predicate "shiftdi_operand"
651
  (if_then_else (match_test "TARGET_64BIT")
652
    (match_operand 0 "nonimmediate_operand")
653
    (match_operand 0 "register_operand")))
654
 
655
(define_predicate "ashldi_input_operand"
656
  (if_then_else (match_test "TARGET_64BIT")
657
    (match_operand 0 "nonimmediate_operand")
658
    (match_operand 0 "reg_or_pm1_operand")))
659
 
660
;; Return true if OP is a vector load from the constant pool with just
661
;; the first element nonzero.
662
(define_predicate "zero_extended_scalar_load_operand"
663
  (match_code "mem")
664
{
665
  unsigned n_elts;
666
  op = maybe_get_pool_constant (op);
667
  if (!op)
668
    return 0;
669
  if (GET_CODE (op) != CONST_VECTOR)
670
    return 0;
671
  n_elts =
672
    (GET_MODE_SIZE (GET_MODE (op)) /
673
     GET_MODE_SIZE (GET_MODE_INNER (GET_MODE (op))));
674
  for (n_elts--; n_elts > 0; n_elts--)
675
    {
676
      rtx elt = CONST_VECTOR_ELT (op, n_elts);
677
      if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
678
        return 0;
679
    }
680
  return 1;
681
})
682
 
683
/* Return true if operand is a vector constant that is all ones. */
684
(define_predicate "vector_all_ones_operand"
685
  (match_code "const_vector")
686
{
687
  int nunits = GET_MODE_NUNITS (mode);
688
 
689
  if (GET_CODE (op) == CONST_VECTOR
690
      && CONST_VECTOR_NUNITS (op) == nunits)
691
    {
692
      int i;
693
      for (i = 0; i < nunits; ++i)
694
        {
695
          rtx x = CONST_VECTOR_ELT (op, i);
696
          if (x != constm1_rtx)
697
            return 0;
698
        }
699
      return 1;
700
    }
701
 
702
  return 0;
703
})
704
 
705
; Return 1 when OP is operand acceptable for standard SSE move.
706
(define_predicate "vector_move_operand"
707
  (ior (match_operand 0 "nonimmediate_operand")
708
       (match_operand 0 "const0_operand")))
709
 
710
;; Return 1 when OP is nonimmediate or standard SSE constant.
711
(define_predicate "nonimmediate_or_sse_const_operand"
712
  (match_operand 0 "general_operand")
713
{
714
  if (nonimmediate_operand (op, mode))
715
    return 1;
716
  if (standard_sse_constant_p (op) > 0)
717
    return 1;
718
  return 0;
719
})
720
 
721
;; Return true if OP is a register or a zero.
722
(define_predicate "reg_or_0_operand"
723
  (ior (match_operand 0 "register_operand")
724
       (match_operand 0 "const0_operand")))
725
 
726
;; Return true if op if a valid address, and does not contain
727
;; a segment override.
728
(define_special_predicate "no_seg_address_operand"
729
  (match_operand 0 "address_operand")
730
{
731
  struct ix86_address parts;
732
  int ok;
733
 
734
  ok = ix86_decompose_address (op, &parts);
735
  gcc_assert (ok);
736
  return parts.seg == SEG_DEFAULT;
737
})
738
 
739
;; Return nonzero if the rtx is known to be at least 32 bits aligned.
740
(define_predicate "aligned_operand"
741
  (match_operand 0 "general_operand")
742
{
743
  struct ix86_address parts;
744
  int ok;
745
 
746
  /* Registers and immediate operands are always "aligned".  */
747
  if (GET_CODE (op) != MEM)
748
    return 1;
749
 
750
  /* All patterns using aligned_operand on memory operands ends up
751
     in promoting memory operand to 64bit and thus causing memory mismatch.  */
752
  if (TARGET_MEMORY_MISMATCH_STALL && !optimize_size)
753
    return 0;
754
 
755
  /* Don't even try to do any aligned optimizations with volatiles.  */
756
  if (MEM_VOLATILE_P (op))
757
    return 0;
758
 
759
  if (MEM_ALIGN (op) >= 32)
760
    return 1;
761
 
762
  op = XEXP (op, 0);
763
 
764
  /* Pushes and pops are only valid on the stack pointer.  */
765
  if (GET_CODE (op) == PRE_DEC
766
      || GET_CODE (op) == POST_INC)
767
    return 1;
768
 
769
  /* Decode the address.  */
770
  ok = ix86_decompose_address (op, &parts);
771
  gcc_assert (ok);
772
 
773
  /* Look for some component that isn't known to be aligned.  */
774
  if (parts.index)
775
    {
776
      if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
777
        return 0;
778
    }
779
  if (parts.base)
780
    {
781
      if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
782
        return 0;
783
    }
784
  if (parts.disp)
785
    {
786
      if (GET_CODE (parts.disp) != CONST_INT
787
          || (INTVAL (parts.disp) & 3) != 0)
788
        return 0;
789
    }
790
 
791
  /* Didn't find one -- this must be an aligned address.  */
792
  return 1;
793
})
794
 
795
;; Returns 1 if OP is memory operand with a displacement.
796
(define_predicate "memory_displacement_operand"
797
  (match_operand 0 "memory_operand")
798
{
799
  struct ix86_address parts;
800
  int ok;
801
 
802
  ok = ix86_decompose_address (XEXP (op, 0), &parts);
803
  gcc_assert (ok);
804
  return parts.disp != NULL_RTX;
805
})
806
 
807
;; Returns 1 if OP is memory operand with a displacement only.
808
(define_predicate "memory_displacement_only_operand"
809
  (match_operand 0 "memory_operand")
810
{
811
  struct ix86_address parts;
812
  int ok;
813
 
814
  ok = ix86_decompose_address (XEXP (op, 0), &parts);
815
  gcc_assert (ok);
816
 
817
  if (parts.base || parts.index)
818
    return 0;
819
 
820
  return parts.disp != NULL_RTX;
821
})
822
 
823
;; Returns 1 if OP is memory operand that cannot be represented
824
;; by the modRM array.
825
(define_predicate "long_memory_operand"
826
  (and (match_operand 0 "memory_operand")
827
       (match_test "memory_address_length (op) != 0")))
828
 
829
;; Return 1 if OP is a comparison operator that can be issued by fcmov.
830
(define_predicate "fcmov_comparison_operator"
831
  (match_operand 0 "comparison_operator")
832
{
833
  enum machine_mode inmode = GET_MODE (XEXP (op, 0));
834
  enum rtx_code code = GET_CODE (op);
835
 
836
  if (inmode == CCFPmode || inmode == CCFPUmode)
837
    {
838
      enum rtx_code second_code, bypass_code;
839
      ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
840
      if (bypass_code != UNKNOWN || second_code != UNKNOWN)
841
        return 0;
842
      code = ix86_fp_compare_code_to_integer (code);
843
    }
844
  /* i387 supports just limited amount of conditional codes.  */
845
  switch (code)
846
    {
847
    case LTU: case GTU: case LEU: case GEU:
848
      if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode)
849
        return 1;
850
      return 0;
851
    case ORDERED: case UNORDERED:
852
    case EQ: case NE:
853
      return 1;
854
    default:
855
      return 0;
856
    }
857
})
858
 
859
;; Return 1 if OP is a comparison that can be used in the CMPSS/CMPPS insns.
860
;; The first set are supported directly; the second set can't be done with
861
;; full IEEE support, i.e. NaNs.
862
;;
863
;; ??? It would seem that we have a lot of uses of this predicate that pass
864
;; it the wrong mode.  We got away with this because the old function didn't
865
;; check the mode at all.  Mirror that for now by calling this a special
866
;; predicate.
867
 
868
(define_special_predicate "sse_comparison_operator"
869
  (match_code "eq,lt,le,unordered,ne,unge,ungt,ordered"))
870
 
871
;; Return 1 if OP is a valid comparison operator in valid mode.
872
(define_predicate "ix86_comparison_operator"
873
  (match_operand 0 "comparison_operator")
874
{
875
  enum machine_mode inmode = GET_MODE (XEXP (op, 0));
876
  enum rtx_code code = GET_CODE (op);
877
 
878
  if (inmode == CCFPmode || inmode == CCFPUmode)
879
    {
880
      enum rtx_code second_code, bypass_code;
881
      ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
882
      return (bypass_code == UNKNOWN && second_code == UNKNOWN);
883
    }
884
  switch (code)
885
    {
886
    case EQ: case NE:
887
      return 1;
888
    case LT: case GE:
889
      if (inmode == CCmode || inmode == CCGCmode
890
          || inmode == CCGOCmode || inmode == CCNOmode)
891
        return 1;
892
      return 0;
893
    case LTU: case GTU: case LEU: case ORDERED: case UNORDERED: case GEU:
894
      if (inmode == CCmode)
895
        return 1;
896
      return 0;
897
    case GT: case LE:
898
      if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
899
        return 1;
900
      return 0;
901
    default:
902
      return 0;
903
    }
904
})
905
 
906
;; Return 1 if OP is a valid comparison operator testing carry flag to be set.
907
(define_predicate "ix86_carry_flag_operator"
908
  (match_code "ltu,lt,unlt,gt,ungt,le,unle,ge,unge,ltgt,uneq")
909
{
910
  enum machine_mode inmode = GET_MODE (XEXP (op, 0));
911
  enum rtx_code code = GET_CODE (op);
912
 
913
  if (GET_CODE (XEXP (op, 0)) != REG
914
      || REGNO (XEXP (op, 0)) != FLAGS_REG
915
      || XEXP (op, 1) != const0_rtx)
916
    return 0;
917
 
918
  if (inmode == CCFPmode || inmode == CCFPUmode)
919
    {
920
      enum rtx_code second_code, bypass_code;
921
      ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
922
      if (bypass_code != UNKNOWN || second_code != UNKNOWN)
923
        return 0;
924
      code = ix86_fp_compare_code_to_integer (code);
925
    }
926
  else if (inmode != CCmode)
927
    return 0;
928
 
929
  return code == LTU;
930
})
931
 
932
;; Nearly general operand, but accept any const_double, since we wish
933
;; to be able to drop them into memory rather than have them get pulled
934
;; into registers.
935
(define_predicate "cmp_fp_expander_operand"
936
  (ior (match_code "const_double")
937
       (match_operand 0 "general_operand")))
938
 
939
;; Return true if this is a valid binary floating-point operation.
940
(define_predicate "binary_fp_operator"
941
  (match_code "plus,minus,mult,div"))
942
 
943
;; Return true if this is a multiply operation.
944
(define_predicate "mult_operator"
945
  (match_code "mult"))
946
 
947
;; Return true if this is a division operation.
948
(define_predicate "div_operator"
949
  (match_code "div"))
950
 
951
;; Return true if this is a float extend operation.
952
(define_predicate "float_operator"
953
  (match_code "float"))
954
 
955
;; Return true for ARITHMETIC_P.
956
(define_predicate "arith_or_logical_operator"
957
  (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
958
               mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
959
 
960
;; Return 1 if OP is a binary operator that can be promoted to wider mode.
961
;; Modern CPUs have same latency for HImode and SImode multiply,
962
;; but 386 and 486 do HImode multiply faster.  */
963
(define_predicate "promotable_binary_operator"
964
  (ior (match_code "plus,and,ior,xor,ashift")
965
       (and (match_code "mult")
966
            (match_test "ix86_tune > PROCESSOR_I486"))))
967
 
968
;; To avoid problems when jump re-emits comparisons like testqi_ext_ccno_0,
969
;; re-recognize the operand to avoid a copy_to_mode_reg that will fail.
970
;;
971
;; ??? It seems likely that this will only work because cmpsi is an
972
;; expander, and no actual insns use this.
973
 
974
(define_predicate "cmpsi_operand"
975
  (ior (match_operand 0 "nonimmediate_operand")
976
       (and (match_code "and")
977
            (match_code "zero_extract" "0")
978
            (match_code "const_int"    "1")
979
            (match_code "const_int"    "01")
980
            (match_code "const_int"    "02")
981
            (match_test "INTVAL (XEXP (XEXP (op, 0), 1)) == 8")
982
            (match_test "INTVAL (XEXP (XEXP (op, 0), 2)) == 8")
983
       )))
984
 
985
(define_predicate "compare_operator"
986
  (match_code "compare"))
987
 
988
(define_predicate "absneg_operator"
989
  (match_code "abs,neg"))

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.