+/* Expand a membar instruction for various use cases. Both the LOAD_STORE
+ and BEFORE_AFTER arguments of the form X_Y. They are two-bit masks where
+ bit 0 indicates that X is true, and bit 1 indicates Y is true. */
+
+void
+sparc_emit_membar_for_model (enum memmodel model,
+ int load_store, int before_after)
+{
+ /* Bits for the MEMBAR mmask field. */
+ const int LoadLoad = 1;
+ const int StoreLoad = 2;
+ const int LoadStore = 4;
+ const int StoreStore = 8;
+
+ int mm = 0, implied = 0;
+
+ switch (sparc_memory_model)
+ {
+ case SMM_SC:
+ /* Sequential Consistency. All memory transactions are immediately
+ visible in sequential execution order. No barriers needed. */
+ implied = LoadLoad | StoreLoad | LoadStore | StoreStore;
+ break;
+
+ case SMM_TSO:
+ /* Total Store Ordering: all memory transactions with store semantics
+ are followed by an implied StoreStore. */
+ implied |= StoreStore;
+ /* FALLTHRU */
+
+ case SMM_PSO:
+ /* Partial Store Ordering: all memory transactions with load semantics
+ are followed by an implied LoadLoad | LoadStore. */
+ implied |= LoadLoad | LoadStore;
+
+ /* If we're not looking for a raw barrer (before+after), then atomic
+ operations get the benefit of being both load and store. */
+ if (load_store == 3 && before_after == 2)
+ implied |= StoreLoad | StoreStore;
+ /* FALLTHRU */
+
+ case SMM_RMO:
+ /* Relaxed Memory Ordering: no implicit bits. */
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+
+ if (before_after & 1)
+ {
+ if (model == MEMMODEL_ACQUIRE
+ || model == MEMMODEL_ACQ_REL
+ || model == MEMMODEL_SEQ_CST)
+ {
+ if (load_store & 1)
+ mm |= LoadLoad | LoadStore;
+ if (load_store & 2)
+ mm |= StoreLoad | StoreStore;
+ }
+ }
+ if (before_after & 2)
+ {
+ if (model == MEMMODEL_RELEASE
+ || model == MEMMODEL_ACQ_REL
+ || model == MEMMODEL_SEQ_CST)
+ {
+ if (load_store & 1)
+ mm |= LoadLoad | StoreLoad;
+ if (load_store & 2)
+ mm |= LoadStore | StoreStore;
+ }
+ }
+
+ /* Remove the bits implied by the system memory model. */
+ mm &= ~implied;
+
+ /* For raw barriers (before+after), always emit a barrier.
+ This will become a compile-time barrier if needed. */
+ if (mm || before_after == 3)
+ emit_insn (gen_membar (GEN_INT (mm)));
+}
+