atomic_inc(io_get_user_counter(niov));
}
+static void io_fill_zcrx_offsets(struct io_uring_zcrx_offsets *offsets)
+{
+ offsets->head = offsetof(struct io_uring, head);
+ offsets->tail = offsetof(struct io_uring, tail);
+ offsets->rqes = ALIGN(sizeof(struct io_uring), L1_CACHE_BYTES);
+}
+
static int io_allocate_rbuf_ring(struct io_ring_ctx *ctx,
struct io_zcrx_ifq *ifq,
struct io_uring_zcrx_ifq_reg *reg,
void *ptr;
int ret;
- off = ALIGN(sizeof(struct io_uring), L1_CACHE_BYTES);
+ io_fill_zcrx_offsets(®->offsets);
+ off = reg->offsets.rqes;
size = off + sizeof(struct io_uring_zcrx_rqe) * reg->rq_entries;
if (size > rd->size)
return -EINVAL;
ifq->rq_ring = (struct io_uring *)ptr;
ifq->rqes = (struct io_uring_zcrx_rqe *)(ptr + off);
- reg->offsets.head = offsetof(struct io_uring, head);
- reg->offsets.tail = offsetof(struct io_uring, tail);
- reg->offsets.rqes = off;
return 0;
}