1 /* Test __atomic routines for existence and proper execution on 8 byte
2 values with each valid memory model. */
4 /* { dg-require-effective-target sync_long_long_runtime } */
5 /* { dg-options "" } */
6 /* { dg-options "-march=pentium" { target { { i?86-*-* x86_64-*-* } && ia32 } } } */
8 /* Test the execution of the __atomic_*OP builtin routines for long long. */
10 extern void abort(void);
12 long long v, count, res;
13 const long long init = ~0;
15 /* The fetch_op routines return the original value before the operation. */
23 if (__atomic_fetch_add (&v, count, __ATOMIC_RELAXED) != 0)
26 if (__atomic_fetch_add (&v, 1, __ATOMIC_CONSUME) != 1)
29 if (__atomic_fetch_add (&v, count, __ATOMIC_ACQUIRE) != 2)
32 if (__atomic_fetch_add (&v, 1, __ATOMIC_RELEASE) != 3)
35 if (__atomic_fetch_add (&v, count, __ATOMIC_ACQ_REL) != 4)
38 if (__atomic_fetch_add (&v, 1, __ATOMIC_SEQ_CST) != 5)
49 if (__atomic_fetch_sub (&v, count + 1, __ATOMIC_RELAXED) != res--)
52 if (__atomic_fetch_sub (&v, 1, __ATOMIC_CONSUME) != res--)
55 if (__atomic_fetch_sub (&v, count + 1, __ATOMIC_ACQUIRE) != res--)
58 if (__atomic_fetch_sub (&v, 1, __ATOMIC_RELEASE) != res--)
61 if (__atomic_fetch_sub (&v, count + 1, __ATOMIC_ACQ_REL) != res--)
64 if (__atomic_fetch_sub (&v, 1, __ATOMIC_SEQ_CST) != res--)
73 if (__atomic_fetch_and (&v, 0, __ATOMIC_RELAXED) != init)
76 if (__atomic_fetch_and (&v, init, __ATOMIC_CONSUME) != 0)
79 if (__atomic_fetch_and (&v, 0, __ATOMIC_ACQUIRE) != 0)
83 if (__atomic_fetch_and (&v, init, __ATOMIC_RELEASE) != init)
86 if (__atomic_fetch_and (&v, 0, __ATOMIC_ACQ_REL) != init)
89 if (__atomic_fetch_and (&v, 0, __ATOMIC_SEQ_CST) != 0)
98 if (__atomic_fetch_nand (&v, 0, __ATOMIC_RELAXED) != init)
101 if (__atomic_fetch_nand (&v, init, __ATOMIC_CONSUME) != init)
104 if (__atomic_fetch_nand (&v, 0, __ATOMIC_ACQUIRE) != 0 )
107 if (__atomic_fetch_nand (&v, init, __ATOMIC_RELEASE) != init)
110 if (__atomic_fetch_nand (&v, init, __ATOMIC_ACQ_REL) != 0)
113 if (__atomic_fetch_nand (&v, 0, __ATOMIC_SEQ_CST) != init)
123 if (__atomic_fetch_xor (&v, count, __ATOMIC_RELAXED) != init)
126 if (__atomic_fetch_xor (&v, ~count, __ATOMIC_CONSUME) != init)
129 if (__atomic_fetch_xor (&v, 0, __ATOMIC_ACQUIRE) != 0)
132 if (__atomic_fetch_xor (&v, ~count, __ATOMIC_RELEASE) != 0)
135 if (__atomic_fetch_xor (&v, 0, __ATOMIC_ACQ_REL) != init)
138 if (__atomic_fetch_xor (&v, ~count, __ATOMIC_SEQ_CST) != init)
148 if (__atomic_fetch_or (&v, count, __ATOMIC_RELAXED) != 0)
152 if (__atomic_fetch_or (&v, 2, __ATOMIC_CONSUME) != 1)
156 if (__atomic_fetch_or (&v, count, __ATOMIC_ACQUIRE) != 3)
160 if (__atomic_fetch_or (&v, 8, __ATOMIC_RELEASE) != 7)
164 if (__atomic_fetch_or (&v, count, __ATOMIC_ACQ_REL) != 15)
168 if (__atomic_fetch_or (&v, count, __ATOMIC_SEQ_CST) != 31)
172 /* The OP_fetch routines return the new value after the operation. */
180 if (__atomic_add_fetch (&v, count, __ATOMIC_RELAXED) != 1)
183 if (__atomic_add_fetch (&v, 1, __ATOMIC_CONSUME) != 2)
186 if (__atomic_add_fetch (&v, count, __ATOMIC_ACQUIRE) != 3)
189 if (__atomic_add_fetch (&v, 1, __ATOMIC_RELEASE) != 4)
192 if (__atomic_add_fetch (&v, count, __ATOMIC_ACQ_REL) != 5)
195 if (__atomic_add_fetch (&v, count, __ATOMIC_SEQ_CST) != 6)
206 if (__atomic_sub_fetch (&v, count + 1, __ATOMIC_RELAXED) != --res)
209 if (__atomic_sub_fetch (&v, 1, __ATOMIC_CONSUME) != --res)
212 if (__atomic_sub_fetch (&v, count + 1, __ATOMIC_ACQUIRE) != --res)
215 if (__atomic_sub_fetch (&v, 1, __ATOMIC_RELEASE) != --res)
218 if (__atomic_sub_fetch (&v, count + 1, __ATOMIC_ACQ_REL) != --res)
221 if (__atomic_sub_fetch (&v, count + 1, __ATOMIC_SEQ_CST) != --res)
230 if (__atomic_and_fetch (&v, 0, __ATOMIC_RELAXED) != 0)
234 if (__atomic_and_fetch (&v, init, __ATOMIC_CONSUME) != init)
237 if (__atomic_and_fetch (&v, 0, __ATOMIC_ACQUIRE) != 0)
241 if (__atomic_and_fetch (&v, init, __ATOMIC_RELEASE) != init)
244 if (__atomic_and_fetch (&v, 0, __ATOMIC_ACQ_REL) != 0)
248 if (__atomic_and_fetch (&v, 0, __ATOMIC_SEQ_CST) != 0)
257 if (__atomic_nand_fetch (&v, 0, __ATOMIC_RELAXED) != init)
260 if (__atomic_nand_fetch (&v, init, __ATOMIC_CONSUME) != 0)
263 if (__atomic_nand_fetch (&v, 0, __ATOMIC_ACQUIRE) != init)
266 if (__atomic_nand_fetch (&v, init, __ATOMIC_RELEASE) != 0)
269 if (__atomic_nand_fetch (&v, init, __ATOMIC_ACQ_REL) != init)
272 if (__atomic_nand_fetch (&v, 0, __ATOMIC_SEQ_CST) != init)
284 if (__atomic_xor_fetch (&v, count, __ATOMIC_RELAXED) != init)
287 if (__atomic_xor_fetch (&v, ~count, __ATOMIC_CONSUME) != 0)
290 if (__atomic_xor_fetch (&v, 0, __ATOMIC_ACQUIRE) != 0)
293 if (__atomic_xor_fetch (&v, ~count, __ATOMIC_RELEASE) != init)
296 if (__atomic_xor_fetch (&v, 0, __ATOMIC_ACQ_REL) != init)
299 if (__atomic_xor_fetch (&v, ~count, __ATOMIC_SEQ_CST) != 0)
309 if (__atomic_or_fetch (&v, count, __ATOMIC_RELAXED) != 1)
313 if (__atomic_or_fetch (&v, 2, __ATOMIC_CONSUME) != 3)
317 if (__atomic_or_fetch (&v, count, __ATOMIC_ACQUIRE) != 7)
321 if (__atomic_or_fetch (&v, 8, __ATOMIC_RELEASE) != 15)
325 if (__atomic_or_fetch (&v, count, __ATOMIC_ACQ_REL) != 31)
329 if (__atomic_or_fetch (&v, count, __ATOMIC_SEQ_CST) != 63)
334 /* Test the OP routines with a result which isn't used. Use both variations
335 within each function. */
343 __atomic_add_fetch (&v, count, __ATOMIC_RELAXED);
347 __atomic_fetch_add (&v, count, __ATOMIC_CONSUME);
351 __atomic_add_fetch (&v, 1 , __ATOMIC_ACQUIRE);
355 __atomic_fetch_add (&v, 1, __ATOMIC_RELEASE);
359 __atomic_add_fetch (&v, count, __ATOMIC_ACQ_REL);
363 __atomic_fetch_add (&v, count, __ATOMIC_SEQ_CST);
375 __atomic_sub_fetch (&v, count + 1, __ATOMIC_RELAXED);
379 __atomic_fetch_sub (&v, count + 1, __ATOMIC_CONSUME);
383 __atomic_sub_fetch (&v, 1, __ATOMIC_ACQUIRE);
387 __atomic_fetch_sub (&v, 1, __ATOMIC_RELEASE);
391 __atomic_sub_fetch (&v, count + 1, __ATOMIC_ACQ_REL);
395 __atomic_fetch_sub (&v, count + 1, __ATOMIC_SEQ_CST);
405 __atomic_and_fetch (&v, 0, __ATOMIC_RELAXED);
410 __atomic_fetch_and (&v, init, __ATOMIC_CONSUME);
414 __atomic_and_fetch (&v, 0, __ATOMIC_ACQUIRE);
419 __atomic_fetch_and (&v, init, __ATOMIC_RELEASE);
423 __atomic_and_fetch (&v, 0, __ATOMIC_ACQ_REL);
428 __atomic_fetch_and (&v, 0, __ATOMIC_SEQ_CST);
438 __atomic_fetch_nand (&v, 0, __ATOMIC_RELAXED);
442 __atomic_fetch_nand (&v, init, __ATOMIC_CONSUME);
446 __atomic_nand_fetch (&v, 0, __ATOMIC_ACQUIRE);
450 __atomic_nand_fetch (&v, init, __ATOMIC_RELEASE);
454 __atomic_fetch_nand (&v, init, __ATOMIC_ACQ_REL);
458 __atomic_nand_fetch (&v, 0, __ATOMIC_SEQ_CST);
471 __atomic_xor_fetch (&v, count, __ATOMIC_RELAXED);
475 __atomic_fetch_xor (&v, ~count, __ATOMIC_CONSUME);
479 __atomic_xor_fetch (&v, 0, __ATOMIC_ACQUIRE);
483 __atomic_fetch_xor (&v, ~count, __ATOMIC_RELEASE);
487 __atomic_fetch_xor (&v, 0, __ATOMIC_ACQ_REL);
491 __atomic_xor_fetch (&v, ~count, __ATOMIC_SEQ_CST);
502 __atomic_or_fetch (&v, count, __ATOMIC_RELAXED);
507 __atomic_fetch_or (&v, count, __ATOMIC_CONSUME);
512 __atomic_or_fetch (&v, 4, __ATOMIC_ACQUIRE);
517 __atomic_fetch_or (&v, 8, __ATOMIC_RELEASE);
522 __atomic_or_fetch (&v, count, __ATOMIC_ACQ_REL);
527 __atomic_fetch_or (&v, count, __ATOMIC_SEQ_CST);