* in precise order.
*/
#define wmb() __asm__ __volatile__("" : : : "memory")
+#define rmb() __asm__ __volatile__("" : : : "memory")
#define mb() __asm__ __volatile__("" : : : "memory")
/* Wrapper for the last available index. Makes it easier to change. */
errx(1, "Guest moved used index from %u to %u",
last_avail, vq->vring.avail->idx);
+ /*
+ * Make sure we read the descriptor number *after* we read the ring
+ * update; don't let the cpu or compiler change the order.
+ */
+ rmb();
+
/*
* Grab the next descriptor number they're advertising, and increment
* the index we've seen.
desc = vq->vring.desc;
i = head;
+ /*
+ * We have to read the descriptor after we read the descriptor number,
+ * but there's a data dependency there so the CPU shouldn't reorder
+ * that: no rmb() required.
+ */
+
/*
* If this is an indirect entry, then this buffer contains a descriptor
* table which we handle as if it's any normal descriptor chain.