OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [config/] [arm/] [sync.md] - Blame information for rev 737

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 709 jeremybenn
;; Machine description for ARM processor synchronization primitives.
2
;; Copyright (C) 2010, 2012 Free Software Foundation, Inc.
3
;; Written by Marcus Shawcroft (marcus.shawcroft@arm.com)
4
;; 64bit Atomics by Dave Gilbert (david.gilbert@linaro.org)
5
;;
6
;; This file is part of GCC.
7
;;
8
;; GCC is free software; you can redistribute it and/or modify it
9
;; under the terms of the GNU General Public License as published by
10
;; the Free Software Foundation; either version 3, or (at your option)
11
;; any later version.
12
;;
13
;; GCC is distributed in the hope that it will be useful, but
14
;; WITHOUT ANY WARRANTY; without even the implied warranty of
15
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16
;; General Public License for more details.
17
;;
18
;; You should have received a copy of the GNU General Public License
19
;; along with GCC; see the file COPYING3.  If not see
20
;; .  */
21
 
22
(define_mode_attr sync_predtab
23
  [(QI "TARGET_HAVE_LDREXBH && TARGET_HAVE_MEMORY_BARRIER")
24
   (HI "TARGET_HAVE_LDREXBH && TARGET_HAVE_MEMORY_BARRIER")
25
   (SI "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER")
26
   (DI "TARGET_HAVE_LDREXD && ARM_DOUBLEWORD_ALIGN
27
        && TARGET_HAVE_MEMORY_BARRIER")])
28
 
29
(define_code_iterator syncop [plus minus ior xor and])
30
 
31
(define_code_attr sync_optab
32
  [(ior "ior") (xor "xor") (and "and") (plus "add") (minus "sub")])
33
 
34
(define_mode_attr sync_sfx
35
  [(QI "b") (HI "h") (SI "") (DI "d")])
36
 
37
(define_expand "memory_barrier"
38
  [(set (match_dup 0)
39
        (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))]
40
  "TARGET_HAVE_MEMORY_BARRIER"
41
{
42
  operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
43
  MEM_VOLATILE_P (operands[0]) = 1;
44
})
45
 
46
(define_insn "*memory_barrier"
47
  [(set (match_operand:BLK 0 "" "")
48
        (unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))]
49
  "TARGET_HAVE_MEMORY_BARRIER"
50
  {
51
    if (TARGET_HAVE_DMB)
52
      {
53
        /* Note we issue a system level barrier. We should consider issuing
54
           a inner shareabilty zone barrier here instead, ie. "DMB ISH".  */
55
        /* ??? Differentiate based on SEQ_CST vs less strict?  */
56
        return "dmb\tsy";
57
      }
58
 
59
    if (TARGET_HAVE_DMB_MCR)
60
      return "mcr\tp15, 0, r0, c7, c10, 5";
61
 
62
    gcc_unreachable ();
63
  }
64
  [(set_attr "length" "4")
65
   (set_attr "conds" "unconditional")
66
   (set_attr "predicable" "no")])
67
 
68
(define_expand "atomic_compare_and_swap"
69
  [(match_operand:SI 0 "s_register_operand" "")         ;; bool out
70
   (match_operand:QHSD 1 "s_register_operand" "")       ;; val out
71
   (match_operand:QHSD 2 "mem_noofs_operand" "")        ;; memory
72
   (match_operand:QHSD 3 "general_operand" "")          ;; expected
73
   (match_operand:QHSD 4 "s_register_operand" "")       ;; desired
74
   (match_operand:SI 5 "const_int_operand")             ;; is_weak
75
   (match_operand:SI 6 "const_int_operand")             ;; mod_s
76
   (match_operand:SI 7 "const_int_operand")]            ;; mod_f
77
  ""
78
{
79
  arm_expand_compare_and_swap (operands);
80
  DONE;
81
})
82
 
83
(define_insn_and_split "atomic_compare_and_swap_1"
84
  [(set (reg:CC_Z CC_REGNUM)                                    ;; bool out
85
        (unspec_volatile:CC_Z [(const_int 0)] VUNSPEC_ATOMIC_CAS))
86
   (set (match_operand:SI 0 "s_register_operand" "=&r")		;; val out
87
        (zero_extend:SI
88
          (match_operand:NARROW 1 "mem_noofs_operand" "+Ua")))  ;; memory
89
   (set (match_dup 1)
90
        (unspec_volatile:NARROW
91
          [(match_operand:SI 2 "arm_add_operand" "rIL")         ;; expected
92
           (match_operand:NARROW 3 "s_register_operand" "r")    ;; desired
93
           (match_operand:SI 4 "const_int_operand")             ;; is_weak
94
           (match_operand:SI 5 "const_int_operand")             ;; mod_s
95
           (match_operand:SI 6 "const_int_operand")]            ;; mod_f
96
          VUNSPEC_ATOMIC_CAS))
97
   (clobber (match_scratch:SI 7 "=&r"))]
98
  ""
99
  "#"
100
  "&& reload_completed"
101
  [(const_int 0)]
102
  {
103
    arm_split_compare_and_swap (operands);
104
    DONE;
105
  })
106
 
107
(define_mode_attr cas_cmp_operand
108
  [(SI "arm_add_operand") (DI "cmpdi_operand")])
109
(define_mode_attr cas_cmp_str
110
  [(SI "rIL") (DI "rDi")])
111
 
112
(define_insn_and_split "atomic_compare_and_swap_1"
113
  [(set (reg:CC_Z CC_REGNUM)                                    ;; bool out
114
        (unspec_volatile:CC_Z [(const_int 0)] VUNSPEC_ATOMIC_CAS))
115
   (set (match_operand:SIDI 0 "s_register_operand" "=&r")	;; val out
116
        (match_operand:SIDI 1 "mem_noofs_operand" "+Ua"))       ;; memory
117
   (set (match_dup 1)
118
        (unspec_volatile:SIDI
119
          [(match_operand:SIDI 2 "" "") ;; expect
120
           (match_operand:SIDI 3 "s_register_operand" "r")      ;; desired
121
           (match_operand:SI 4 "const_int_operand")             ;; is_weak
122
           (match_operand:SI 5 "const_int_operand")             ;; mod_s
123
           (match_operand:SI 6 "const_int_operand")]            ;; mod_f
124
          VUNSPEC_ATOMIC_CAS))
125
   (clobber (match_scratch:SI 7 "=&r"))]
126
  ""
127
  "#"
128
  "&& reload_completed"
129
  [(const_int 0)]
130
  {
131
    arm_split_compare_and_swap (operands);
132
    DONE;
133
  })
134
 
135
(define_insn_and_split "atomic_exchange"
136
  [(set (match_operand:QHSD 0 "s_register_operand" "=&r")	;; output
137
        (match_operand:QHSD 1 "mem_noofs_operand" "+Ua"))       ;; memory
138
   (set (match_dup 1)
139
        (unspec_volatile:QHSD
140
          [(match_operand:QHSD 2 "s_register_operand" "r")      ;; input
141
           (match_operand:SI 3 "const_int_operand" "")]         ;; model
142
          VUNSPEC_ATOMIC_XCHG))
143
   (clobber (reg:CC CC_REGNUM))
144
   (clobber (match_scratch:SI 4 "=&r"))]
145
  ""
146
  "#"
147
  "&& reload_completed"
148
  [(const_int 0)]
149
  {
150
    arm_split_atomic_op (SET, operands[0], NULL, operands[1],
151
                         operands[2], operands[3], operands[4]);
152
    DONE;
153
  })
154
 
155
(define_mode_attr atomic_op_operand
156
  [(QI "reg_or_int_operand")
157
   (HI "reg_or_int_operand")
158
   (SI "reg_or_int_operand")
159
   (DI "s_register_operand")])
160
 
161
(define_mode_attr atomic_op_str
162
  [(QI "rn") (HI "rn") (SI "rn") (DI "r")])
163
 
164
(define_insn_and_split "atomic_"
165
  [(set (match_operand:QHSD 0 "mem_noofs_operand" "+Ua")
166
        (unspec_volatile:QHSD
167
          [(syncop:QHSD (match_dup 0)
168
             (match_operand:QHSD 1 "" ""))
169
           (match_operand:SI 2 "const_int_operand")]            ;; model
170
          VUNSPEC_ATOMIC_OP))
171
   (clobber (reg:CC CC_REGNUM))
172
   (clobber (match_scratch:QHSD 3 "=&r"))
173
   (clobber (match_scratch:SI 4 "=&r"))]
174
  ""
175
  "#"
176
  "&& reload_completed"
177
  [(const_int 0)]
178
  {
179
    arm_split_atomic_op (, NULL, operands[3], operands[0],
180
                         operands[1], operands[2], operands[4]);
181
    DONE;
182
  })
183
 
184
(define_insn_and_split "atomic_nand"
185
  [(set (match_operand:QHSD 0 "mem_noofs_operand" "+Ua")
186
        (unspec_volatile:QHSD
187
          [(not:QHSD
188
             (and:QHSD (match_dup 0)
189
               (match_operand:QHSD 1 "" "")))
190
           (match_operand:SI 2 "const_int_operand")]            ;; model
191
          VUNSPEC_ATOMIC_OP))
192
   (clobber (reg:CC CC_REGNUM))
193
   (clobber (match_scratch:QHSD 3 "=&r"))
194
   (clobber (match_scratch:SI 4 "=&r"))]
195
  ""
196
  "#"
197
  "&& reload_completed"
198
  [(const_int 0)]
199
  {
200
    arm_split_atomic_op (NOT, NULL, operands[3], operands[0],
201
                         operands[1], operands[2], operands[4]);
202
    DONE;
203
  })
204
 
205
(define_insn_and_split "atomic_fetch_"
206
  [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
207
        (match_operand:QHSD 1 "mem_noofs_operand" "+Ua"))
208
   (set (match_dup 1)
209
        (unspec_volatile:QHSD
210
          [(syncop:QHSD (match_dup 1)
211
             (match_operand:QHSD 2 "" ""))
212
           (match_operand:SI 3 "const_int_operand")]            ;; model
213
          VUNSPEC_ATOMIC_OP))
214
   (clobber (reg:CC CC_REGNUM))
215
   (clobber (match_scratch:QHSD 4 "=&r"))
216
   (clobber (match_scratch:SI 5 "=&r"))]
217
  ""
218
  "#"
219
  "&& reload_completed"
220
  [(const_int 0)]
221
  {
222
    arm_split_atomic_op (, operands[0], operands[4], operands[1],
223
                         operands[2], operands[3], operands[5]);
224
    DONE;
225
  })
226
 
227
(define_insn_and_split "atomic_fetch_nand"
228
  [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
229
        (match_operand:QHSD 1 "mem_noofs_operand" "+Ua"))
230
   (set (match_dup 1)
231
        (unspec_volatile:QHSD
232
          [(not:QHSD
233
             (and:QHSD (match_dup 1)
234
               (match_operand:QHSD 2 "" "")))
235
           (match_operand:SI 3 "const_int_operand")]            ;; model
236
          VUNSPEC_ATOMIC_OP))
237
   (clobber (reg:CC CC_REGNUM))
238
   (clobber (match_scratch:QHSD 4 "=&r"))
239
   (clobber (match_scratch:SI 5 "=&r"))]
240
  ""
241
  "#"
242
  "&& reload_completed"
243
  [(const_int 0)]
244
  {
245
    arm_split_atomic_op (NOT, operands[0], operands[4], operands[1],
246
                         operands[2], operands[3], operands[5]);
247
    DONE;
248
  })
249
 
250
(define_insn_and_split "atomic__fetch"
251
  [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
252
        (syncop:QHSD
253
          (match_operand:QHSD 1 "mem_noofs_operand" "+Ua")
254
          (match_operand:QHSD 2 "" "")))
255
   (set (match_dup 1)
256
        (unspec_volatile:QHSD
257
          [(match_dup 1) (match_dup 2)
258
           (match_operand:SI 3 "const_int_operand")]            ;; model
259
          VUNSPEC_ATOMIC_OP))
260
   (clobber (reg:CC CC_REGNUM))
261
   (clobber (match_scratch:SI 4 "=&r"))]
262
  ""
263
  "#"
264
  "&& reload_completed"
265
  [(const_int 0)]
266
  {
267
    arm_split_atomic_op (, NULL, operands[0], operands[1],
268
                         operands[2], operands[3], operands[4]);
269
    DONE;
270
  })
271
 
272
(define_insn_and_split "atomic_nand_fetch"
273
  [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
274
        (not:QHSD
275
          (and:QHSD
276
            (match_operand:QHSD 1 "mem_noofs_operand" "+Ua")
277
            (match_operand:QHSD 2 "" ""))))
278
   (set (match_dup 1)
279
        (unspec_volatile:QHSD
280
          [(match_dup 1) (match_dup 2)
281
           (match_operand:SI 3 "const_int_operand")]            ;; model
282
          VUNSPEC_ATOMIC_OP))
283
   (clobber (reg:CC CC_REGNUM))
284
   (clobber (match_scratch:SI 4 "=&r"))]
285
  ""
286
  "#"
287
  "&& reload_completed"
288
  [(const_int 0)]
289
  {
290
    arm_split_atomic_op (NOT, NULL, operands[0], operands[1],
291
                         operands[2], operands[3], operands[4]);
292
    DONE;
293
  })
294
 
295
(define_insn "arm_load_exclusive"
296
  [(set (match_operand:SI 0 "s_register_operand" "=r")
297
        (zero_extend:SI
298
          (unspec_volatile:NARROW
299
            [(match_operand:NARROW 1 "mem_noofs_operand" "Ua")]
300
            VUNSPEC_LL)))]
301
  "TARGET_HAVE_LDREXBH"
302
  "ldrex%?\t%0, %C1"
303
  [(set_attr "predicable" "yes")])
304
 
305
(define_insn "arm_load_exclusivesi"
306
  [(set (match_operand:SI 0 "s_register_operand" "=r")
307
        (unspec_volatile:SI
308
          [(match_operand:SI 1 "mem_noofs_operand" "Ua")]
309
          VUNSPEC_LL))]
310
  "TARGET_HAVE_LDREX"
311
  "ldrex%?\t%0, %C1"
312
  [(set_attr "predicable" "yes")])
313
 
314
(define_insn "arm_load_exclusivedi"
315
  [(set (match_operand:DI 0 "s_register_operand" "=r")
316
        (unspec_volatile:DI
317
          [(match_operand:DI 1 "mem_noofs_operand" "Ua")]
318
          VUNSPEC_LL))]
319
  "TARGET_HAVE_LDREXD"
320
  {
321
    rtx target = operands[0];
322
    /* The restrictions on target registers in ARM mode are that the two
323
       registers are consecutive and the first one is even; Thumb is
324
       actually more flexible, but DI should give us this anyway.
325
       Note that the 1st register always gets the lowest word in memory.  */
326
    gcc_assert ((REGNO (target) & 1) == 0);
327
    operands[2] = gen_rtx_REG (SImode, REGNO (target) + 1);
328
    return "ldrexd%?\t%0, %2, %C1";
329
  }
330
  [(set_attr "predicable" "yes")])
331
 
332
(define_insn "arm_store_exclusive"
333
  [(set (match_operand:SI 0 "s_register_operand" "=&r")
334
        (unspec_volatile:SI [(const_int 0)] VUNSPEC_SC))
335
   (set (match_operand:QHSD 1 "mem_noofs_operand" "=Ua")
336
        (unspec_volatile:QHSD
337
          [(match_operand:QHSD 2 "s_register_operand" "r")]
338
          VUNSPEC_SC))]
339
  ""
340
  {
341
    if (mode == DImode)
342
      {
343
        rtx value = operands[2];
344
        /* The restrictions on target registers in ARM mode are that the two
345
           registers are consecutive and the first one is even; Thumb is
346
           actually more flexible, but DI should give us this anyway.
347
           Note that the 1st register always gets the lowest word in memory.  */
348
        gcc_assert ((REGNO (value) & 1) == 0 || TARGET_THUMB2);
349
        operands[3] = gen_rtx_REG (SImode, REGNO (value) + 1);
350
        return "strexd%?\t%0, %2, %3, %C1";
351
      }
352
    return "strex%?\t%0, %2, %C1";
353
  }
354
  [(set_attr "predicable" "yes")])

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.