- if (caa_unlikely(v_cmpxchg(config, &ctx->buf->offset, o_old, o_end)
- != o_old))
- goto slow_path;
-
+ if (caa_unlikely(config->sync == RING_BUFFER_SYNC_GLOBAL
+ || rseq_state.cpu_id < 0
+ || uatomic_read(&chan->u.reserve_fallback_ref))) {
+ if (caa_unlikely(v_cmpxchg(config, &ctx->buf->offset, o_old,
+ o_end) != o_old))
+ goto slow_path;
+ } else {
+ /*
+ * Load reserve_fallback_ref before offset. Matches the
+ * implicit memory barrier after v_cmpxchg of offset.
+ */
+ cmm_smp_rmb();
+ if (caa_unlikely(ctx->buf->offset.a != o_old))
+ return -EAGAIN;
+ if (caa_unlikely(!__rseq_finish(NULL, 0, NULL, NULL, 0,
+ (intptr_t *) &ctx->buf->offset.a,
+ (intptr_t) o_end,
+ rseq_state, RSEQ_FINISH_SINGLE, false)))
+ return -EAGAIN;
+ }