1 /* Linux-specific atomic operations for ARM EABI.
2 Copyright (C) 2008, 2009 Free Software Foundation, Inc.
3 Contributed by CodeSourcery.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 Under Section 7 of GPL version 3, you are granted additional
18 permissions described in the GCC Runtime Library Exception, version
19 3.1, as published by the Free Software Foundation.
21 You should have received a copy of the GNU General Public License and
22 a copy of the GCC Runtime Library Exception along with this program;
23 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
24 <http://www.gnu.org/licenses/>. */
26 /* Kernel helper for compare-and-exchange. */
27 typedef int (__kernel_cmpxchg_t) (int oldval, int newval, int *ptr);
28 #define __kernel_cmpxchg (*(__kernel_cmpxchg_t *) 0xffff0fc0)
30 /* Kernel helper for memory barrier. */
31 typedef void (__kernel_dmb_t) (void);
32 #define __kernel_dmb (*(__kernel_dmb_t *) 0xffff0fa0)
34 /* Note: we implement byte, short and int versions of atomic operations using
35 the above kernel helpers, but there is no support for "long long" (64-bit)
38 #define HIDDEN __attribute__ ((visibility ("hidden")))
41 #define INVERT_MASK_1 0
42 #define INVERT_MASK_2 0
44 #define INVERT_MASK_1 24
45 #define INVERT_MASK_2 16
49 #define MASK_2 0xffffu
51 #define FETCH_AND_OP_WORD(OP, PFX_OP, INF_OP) \
53 __sync_fetch_and_##OP##_4 (int *ptr, int val) \
59 failure = __kernel_cmpxchg (tmp, PFX_OP tmp INF_OP val, ptr); \
60 } while (failure != 0); \
65 FETCH_AND_OP_WORD (add, , +)
66 FETCH_AND_OP_WORD (sub, , -)
67 FETCH_AND_OP_WORD (or, , |)
68 FETCH_AND_OP_WORD (and, , &)
69 FETCH_AND_OP_WORD (xor, , ^)
70 FETCH_AND_OP_WORD (nand, ~, &)
72 #define NAME_oldval(OP, WIDTH) __sync_fetch_and_##OP##_##WIDTH
73 #define NAME_newval(OP, WIDTH) __sync_##OP##_and_fetch_##WIDTH
75 /* Implement both __sync_<op>_and_fetch and __sync_fetch_and_<op> for
76 subword-sized quantities. */
78 #define SUBWORD_SYNC_OP(OP, PFX_OP, INF_OP, TYPE, WIDTH, RETURN) \
80 NAME##_##RETURN (OP, WIDTH) (TYPE *ptr, TYPE val) \
82 int *wordptr = (int *) ((unsigned int) ptr & ~3); \
83 unsigned int mask, shift, oldval, newval; \
86 shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \
87 mask = MASK_##WIDTH << shift; \
91 newval = ((PFX_OP ((oldval & mask) >> shift) \
92 INF_OP (unsigned int) val) << shift) & mask; \
93 newval |= oldval & ~mask; \
94 failure = __kernel_cmpxchg (oldval, newval, wordptr); \
95 } while (failure != 0); \
97 return (RETURN & mask) >> shift; \
100 SUBWORD_SYNC_OP (add, , +, short, 2, oldval)
101 SUBWORD_SYNC_OP (sub, , -, short, 2, oldval)
102 SUBWORD_SYNC_OP (or, , |, short, 2, oldval)
103 SUBWORD_SYNC_OP (and, , &, short, 2, oldval)
104 SUBWORD_SYNC_OP (xor, , ^, short, 2, oldval)
105 SUBWORD_SYNC_OP (nand, ~, &, short, 2, oldval)
107 SUBWORD_SYNC_OP (add, , +, char, 1, oldval)
108 SUBWORD_SYNC_OP (sub, , -, char, 1, oldval)
109 SUBWORD_SYNC_OP (or, , |, char, 1, oldval)
110 SUBWORD_SYNC_OP (and, , &, char, 1, oldval)
111 SUBWORD_SYNC_OP (xor, , ^, char, 1, oldval)
112 SUBWORD_SYNC_OP (nand, ~, &, char, 1, oldval)
114 #define OP_AND_FETCH_WORD(OP, PFX_OP, INF_OP) \
116 __sync_##OP##_and_fetch_4 (int *ptr, int val) \
122 failure = __kernel_cmpxchg (tmp, PFX_OP tmp INF_OP val, ptr); \
123 } while (failure != 0); \
125 return PFX_OP tmp INF_OP val; \
128 OP_AND_FETCH_WORD (add, , +)
129 OP_AND_FETCH_WORD (sub, , -)
130 OP_AND_FETCH_WORD (or, , |)
131 OP_AND_FETCH_WORD (and, , &)
132 OP_AND_FETCH_WORD (xor, , ^)
133 OP_AND_FETCH_WORD (nand, ~, &)
135 SUBWORD_SYNC_OP (add, , +, short, 2, newval)
136 SUBWORD_SYNC_OP (sub, , -, short, 2, newval)
137 SUBWORD_SYNC_OP (or, , |, short, 2, newval)
138 SUBWORD_SYNC_OP (and, , &, short, 2, newval)
139 SUBWORD_SYNC_OP (xor, , ^, short, 2, newval)
140 SUBWORD_SYNC_OP (nand, ~, &, short, 2, newval)
142 SUBWORD_SYNC_OP (add, , +, char, 1, newval)
143 SUBWORD_SYNC_OP (sub, , -, char, 1, newval)
144 SUBWORD_SYNC_OP (or, , |, char, 1, newval)
145 SUBWORD_SYNC_OP (and, , &, char, 1, newval)
146 SUBWORD_SYNC_OP (xor, , ^, char, 1, newval)
147 SUBWORD_SYNC_OP (nand, ~, &, char, 1, newval)
150 __sync_val_compare_and_swap_4 (int *ptr, int oldval, int newval)
152 int actual_oldval, fail;
156 actual_oldval = *ptr;
158 if (oldval != actual_oldval)
159 return actual_oldval;
161 fail = __kernel_cmpxchg (actual_oldval, newval, ptr);
168 #define SUBWORD_VAL_CAS(TYPE, WIDTH) \
170 __sync_val_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval, \
173 int *wordptr = (int *)((unsigned int) ptr & ~3), fail; \
174 unsigned int mask, shift, actual_oldval, actual_newval; \
176 shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \
177 mask = MASK_##WIDTH << shift; \
181 actual_oldval = *wordptr; \
183 if (((actual_oldval & mask) >> shift) != (unsigned int) oldval) \
184 return (actual_oldval & mask) >> shift; \
186 actual_newval = (actual_oldval & ~mask) \
187 | (((unsigned int) newval << shift) & mask); \
189 fail = __kernel_cmpxchg (actual_oldval, actual_newval, \
197 SUBWORD_VAL_CAS (short, 2)
198 SUBWORD_VAL_CAS (char, 1)
200 typedef unsigned char bool;
203 __sync_bool_compare_and_swap_4 (int *ptr, int oldval, int newval)
205 int failure = __kernel_cmpxchg (oldval, newval, ptr);
206 return (failure == 0);
209 #define SUBWORD_BOOL_CAS(TYPE, WIDTH) \
211 __sync_bool_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval, \
215 = __sync_val_compare_and_swap_##WIDTH (ptr, oldval, newval); \
216 return (oldval == actual_oldval); \
219 SUBWORD_BOOL_CAS (short, 2)
220 SUBWORD_BOOL_CAS (char, 1)
223 __sync_synchronize (void)
229 __sync_lock_test_and_set_4 (int *ptr, int val)
235 failure = __kernel_cmpxchg (oldval, val, ptr);
236 } while (failure != 0);
241 #define SUBWORD_TEST_AND_SET(TYPE, WIDTH) \
243 __sync_lock_test_and_set_##WIDTH (TYPE *ptr, TYPE val) \
246 unsigned int oldval, newval, shift, mask; \
247 int *wordptr = (int *) ((unsigned int) ptr & ~3); \
249 shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \
250 mask = MASK_##WIDTH << shift; \
254 newval = (oldval & ~mask) \
255 | (((unsigned int) val << shift) & mask); \
256 failure = __kernel_cmpxchg (oldval, newval, wordptr); \
257 } while (failure != 0); \
259 return (oldval & mask) >> shift; \
262 SUBWORD_TEST_AND_SET (short, 2)
263 SUBWORD_TEST_AND_SET (char, 1)
265 #define SYNC_LOCK_RELEASE(TYPE, WIDTH) \
267 __sync_lock_release_##WIDTH (TYPE *ptr) \
269 /* All writes before this point must be seen before we release \
270 the lock itself. */ \
275 SYNC_LOCK_RELEASE (int, 4)
276 SYNC_LOCK_RELEASE (short, 2)
277 SYNC_LOCK_RELEASE (char, 1)