1 /* Test __atomic routines for existence and proper execution on 16 byte
2 values with each valid memory model. */
4 /* { dg-require-effective-target sync_int_128 } */
5 /* { dg-options "-mcx16" { target { x86_64-*-* } } } */
7 /* Test the execution of the __atomic_*OP builtin routines for an int_128. */
9 extern void abort(void);
11 __int128_t v, count, res;
12 const __int128_t init = ~0;
14 /* The fetch_op routines return the original value before the operation. */
22 if (__atomic_fetch_add (&v, count, __ATOMIC_RELAXED) != 0)
25 if (__atomic_fetch_add (&v, 1, __ATOMIC_CONSUME) != 1)
28 if (__atomic_fetch_add (&v, count, __ATOMIC_ACQUIRE) != 2)
31 if (__atomic_fetch_add (&v, 1, __ATOMIC_RELEASE) != 3)
34 if (__atomic_fetch_add (&v, count, __ATOMIC_ACQ_REL) != 4)
37 if (__atomic_fetch_add (&v, 1, __ATOMIC_SEQ_CST) != 5)
48 if (__atomic_fetch_sub (&v, count + 1, __ATOMIC_RELAXED) != res--)
51 if (__atomic_fetch_sub (&v, 1, __ATOMIC_CONSUME) != res--)
54 if (__atomic_fetch_sub (&v, count + 1, __ATOMIC_ACQUIRE) != res--)
57 if (__atomic_fetch_sub (&v, 1, __ATOMIC_RELEASE) != res--)
60 if (__atomic_fetch_sub (&v, count + 1, __ATOMIC_ACQ_REL) != res--)
63 if (__atomic_fetch_sub (&v, 1, __ATOMIC_SEQ_CST) != res--)
72 if (__atomic_fetch_and (&v, 0, __ATOMIC_RELAXED) != init)
75 if (__atomic_fetch_and (&v, init, __ATOMIC_CONSUME) != 0)
78 if (__atomic_fetch_and (&v, 0, __ATOMIC_ACQUIRE) != 0)
82 if (__atomic_fetch_and (&v, init, __ATOMIC_RELEASE) != init)
85 if (__atomic_fetch_and (&v, 0, __ATOMIC_ACQ_REL) != init)
88 if (__atomic_fetch_and (&v, 0, __ATOMIC_SEQ_CST) != 0)
97 if (__atomic_fetch_nand (&v, 0, __ATOMIC_RELAXED) != init)
100 if (__atomic_fetch_nand (&v, init, __ATOMIC_CONSUME) != init)
103 if (__atomic_fetch_nand (&v, 0, __ATOMIC_ACQUIRE) != 0 )
106 if (__atomic_fetch_nand (&v, init, __ATOMIC_RELEASE) != init)
109 if (__atomic_fetch_nand (&v, init, __ATOMIC_ACQ_REL) != 0)
112 if (__atomic_fetch_nand (&v, 0, __ATOMIC_SEQ_CST) != init)
122 if (__atomic_fetch_xor (&v, count, __ATOMIC_RELAXED) != init)
125 if (__atomic_fetch_xor (&v, ~count, __ATOMIC_CONSUME) != init)
128 if (__atomic_fetch_xor (&v, 0, __ATOMIC_ACQUIRE) != 0)
131 if (__atomic_fetch_xor (&v, ~count, __ATOMIC_RELEASE) != 0)
134 if (__atomic_fetch_xor (&v, 0, __ATOMIC_ACQ_REL) != init)
137 if (__atomic_fetch_xor (&v, ~count, __ATOMIC_SEQ_CST) != init)
147 if (__atomic_fetch_or (&v, count, __ATOMIC_RELAXED) != 0)
151 if (__atomic_fetch_or (&v, 2, __ATOMIC_CONSUME) != 1)
155 if (__atomic_fetch_or (&v, count, __ATOMIC_ACQUIRE) != 3)
159 if (__atomic_fetch_or (&v, 8, __ATOMIC_RELEASE) != 7)
163 if (__atomic_fetch_or (&v, count, __ATOMIC_ACQ_REL) != 15)
167 if (__atomic_fetch_or (&v, count, __ATOMIC_SEQ_CST) != 31)
171 /* The OP_fetch routines return the new value after the operation. */
179 if (__atomic_add_fetch (&v, count, __ATOMIC_RELAXED) != 1)
182 if (__atomic_add_fetch (&v, 1, __ATOMIC_CONSUME) != 2)
185 if (__atomic_add_fetch (&v, count, __ATOMIC_ACQUIRE) != 3)
188 if (__atomic_add_fetch (&v, 1, __ATOMIC_RELEASE) != 4)
191 if (__atomic_add_fetch (&v, count, __ATOMIC_ACQ_REL) != 5)
194 if (__atomic_add_fetch (&v, count, __ATOMIC_SEQ_CST) != 6)
205 if (__atomic_sub_fetch (&v, count + 1, __ATOMIC_RELAXED) != --res)
208 if (__atomic_sub_fetch (&v, 1, __ATOMIC_CONSUME) != --res)
211 if (__atomic_sub_fetch (&v, count + 1, __ATOMIC_ACQUIRE) != --res)
214 if (__atomic_sub_fetch (&v, 1, __ATOMIC_RELEASE) != --res)
217 if (__atomic_sub_fetch (&v, count + 1, __ATOMIC_ACQ_REL) != --res)
220 if (__atomic_sub_fetch (&v, count + 1, __ATOMIC_SEQ_CST) != --res)
229 if (__atomic_and_fetch (&v, 0, __ATOMIC_RELAXED) != 0)
233 if (__atomic_and_fetch (&v, init, __ATOMIC_CONSUME) != init)
236 if (__atomic_and_fetch (&v, 0, __ATOMIC_ACQUIRE) != 0)
240 if (__atomic_and_fetch (&v, init, __ATOMIC_RELEASE) != init)
243 if (__atomic_and_fetch (&v, 0, __ATOMIC_ACQ_REL) != 0)
247 if (__atomic_and_fetch (&v, 0, __ATOMIC_SEQ_CST) != 0)
256 if (__atomic_nand_fetch (&v, 0, __ATOMIC_RELAXED) != init)
259 if (__atomic_nand_fetch (&v, init, __ATOMIC_CONSUME) != 0)
262 if (__atomic_nand_fetch (&v, 0, __ATOMIC_ACQUIRE) != init)
265 if (__atomic_nand_fetch (&v, init, __ATOMIC_RELEASE) != 0)
268 if (__atomic_nand_fetch (&v, init, __ATOMIC_ACQ_REL) != init)
271 if (__atomic_nand_fetch (&v, 0, __ATOMIC_SEQ_CST) != init)
283 if (__atomic_xor_fetch (&v, count, __ATOMIC_RELAXED) != init)
286 if (__atomic_xor_fetch (&v, ~count, __ATOMIC_CONSUME) != 0)
289 if (__atomic_xor_fetch (&v, 0, __ATOMIC_ACQUIRE) != 0)
292 if (__atomic_xor_fetch (&v, ~count, __ATOMIC_RELEASE) != init)
295 if (__atomic_xor_fetch (&v, 0, __ATOMIC_ACQ_REL) != init)
298 if (__atomic_xor_fetch (&v, ~count, __ATOMIC_SEQ_CST) != 0)
308 if (__atomic_or_fetch (&v, count, __ATOMIC_RELAXED) != 1)
312 if (__atomic_or_fetch (&v, 2, __ATOMIC_CONSUME) != 3)
316 if (__atomic_or_fetch (&v, count, __ATOMIC_ACQUIRE) != 7)
320 if (__atomic_or_fetch (&v, 8, __ATOMIC_RELEASE) != 15)
324 if (__atomic_or_fetch (&v, count, __ATOMIC_ACQ_REL) != 31)
328 if (__atomic_or_fetch (&v, count, __ATOMIC_SEQ_CST) != 63)
333 /* Test the OP routines with a result which isn't used. Use both variations
334 within each function. */
342 __atomic_add_fetch (&v, count, __ATOMIC_RELAXED);
346 __atomic_fetch_add (&v, count, __ATOMIC_CONSUME);
350 __atomic_add_fetch (&v, 1 , __ATOMIC_ACQUIRE);
354 __atomic_fetch_add (&v, 1, __ATOMIC_RELEASE);
358 __atomic_add_fetch (&v, count, __ATOMIC_ACQ_REL);
362 __atomic_fetch_add (&v, count, __ATOMIC_SEQ_CST);
374 __atomic_sub_fetch (&v, count + 1, __ATOMIC_RELAXED);
378 __atomic_fetch_sub (&v, count + 1, __ATOMIC_CONSUME);
382 __atomic_sub_fetch (&v, 1, __ATOMIC_ACQUIRE);
386 __atomic_fetch_sub (&v, 1, __ATOMIC_RELEASE);
390 __atomic_sub_fetch (&v, count + 1, __ATOMIC_ACQ_REL);
394 __atomic_fetch_sub (&v, count + 1, __ATOMIC_SEQ_CST);
404 __atomic_and_fetch (&v, 0, __ATOMIC_RELAXED);
409 __atomic_fetch_and (&v, init, __ATOMIC_CONSUME);
413 __atomic_and_fetch (&v, 0, __ATOMIC_ACQUIRE);
418 __atomic_fetch_and (&v, init, __ATOMIC_RELEASE);
422 __atomic_and_fetch (&v, 0, __ATOMIC_ACQ_REL);
427 __atomic_fetch_and (&v, 0, __ATOMIC_SEQ_CST);
437 __atomic_fetch_nand (&v, 0, __ATOMIC_RELAXED);
441 __atomic_fetch_nand (&v, init, __ATOMIC_CONSUME);
445 __atomic_nand_fetch (&v, 0, __ATOMIC_ACQUIRE);
449 __atomic_nand_fetch (&v, init, __ATOMIC_RELEASE);
453 __atomic_fetch_nand (&v, init, __ATOMIC_ACQ_REL);
457 __atomic_nand_fetch (&v, 0, __ATOMIC_SEQ_CST);
470 __atomic_xor_fetch (&v, count, __ATOMIC_RELAXED);
474 __atomic_fetch_xor (&v, ~count, __ATOMIC_CONSUME);
478 __atomic_xor_fetch (&v, 0, __ATOMIC_ACQUIRE);
482 __atomic_fetch_xor (&v, ~count, __ATOMIC_RELEASE);
486 __atomic_fetch_xor (&v, 0, __ATOMIC_ACQ_REL);
490 __atomic_xor_fetch (&v, ~count, __ATOMIC_SEQ_CST);
501 __atomic_or_fetch (&v, count, __ATOMIC_RELAXED);
506 __atomic_fetch_or (&v, count, __ATOMIC_CONSUME);
511 __atomic_or_fetch (&v, 4, __ATOMIC_ACQUIRE);
516 __atomic_fetch_or (&v, 8, __ATOMIC_RELEASE);
521 __atomic_or_fetch (&v, count, __ATOMIC_ACQ_REL);
526 __atomic_fetch_or (&v, count, __ATOMIC_SEQ_CST);