io_uring/zcrx: move io_zcrx_iov_page
We'll need io_zcrx_iov_page at the top to keep offset calculations closer together, move it there. Reviewed-by: David Wei <dw@davidwei.uk> Signed-off-by: Pavel Begunkov <asml.silence@gmail.com> Link: https://lore.kernel.org/r/575617033a8b84a5985c7eb760b7121efdbe7e56.1745141261.git.asml.silence@gmail.com Signed-off-by: Jens Axboe <axboe@kernel.dk>pull/1250/head
parent
37d26edd6b
commit
a79154ae5d
|
|
@ -31,6 +31,20 @@ static inline struct io_zcrx_ifq *io_pp_to_ifq(struct page_pool *pp)
|
||||||
return pp->mp_priv;
|
return pp->mp_priv;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static inline struct io_zcrx_area *io_zcrx_iov_to_area(const struct net_iov *niov)
|
||||||
|
{
|
||||||
|
struct net_iov_area *owner = net_iov_owner(niov);
|
||||||
|
|
||||||
|
return container_of(owner, struct io_zcrx_area, nia);
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline struct page *io_zcrx_iov_page(const struct net_iov *niov)
|
||||||
|
{
|
||||||
|
struct io_zcrx_area *area = io_zcrx_iov_to_area(niov);
|
||||||
|
|
||||||
|
return area->pages[net_iov_idx(niov)];
|
||||||
|
}
|
||||||
|
|
||||||
#define IO_DMA_ATTR (DMA_ATTR_SKIP_CPU_SYNC | DMA_ATTR_WEAK_ORDERING)
|
#define IO_DMA_ATTR (DMA_ATTR_SKIP_CPU_SYNC | DMA_ATTR_WEAK_ORDERING)
|
||||||
|
|
||||||
static void __io_zcrx_unmap_area(struct io_zcrx_ifq *ifq,
|
static void __io_zcrx_unmap_area(struct io_zcrx_ifq *ifq,
|
||||||
|
|
@ -118,13 +132,6 @@ struct io_zcrx_args {
|
||||||
|
|
||||||
static const struct memory_provider_ops io_uring_pp_zc_ops;
|
static const struct memory_provider_ops io_uring_pp_zc_ops;
|
||||||
|
|
||||||
static inline struct io_zcrx_area *io_zcrx_iov_to_area(const struct net_iov *niov)
|
|
||||||
{
|
|
||||||
struct net_iov_area *owner = net_iov_owner(niov);
|
|
||||||
|
|
||||||
return container_of(owner, struct io_zcrx_area, nia);
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline atomic_t *io_get_user_counter(struct net_iov *niov)
|
static inline atomic_t *io_get_user_counter(struct net_iov *niov)
|
||||||
{
|
{
|
||||||
struct io_zcrx_area *area = io_zcrx_iov_to_area(niov);
|
struct io_zcrx_area *area = io_zcrx_iov_to_area(niov);
|
||||||
|
|
@ -147,13 +154,6 @@ static void io_zcrx_get_niov_uref(struct net_iov *niov)
|
||||||
atomic_inc(io_get_user_counter(niov));
|
atomic_inc(io_get_user_counter(niov));
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline struct page *io_zcrx_iov_page(const struct net_iov *niov)
|
|
||||||
{
|
|
||||||
struct io_zcrx_area *area = io_zcrx_iov_to_area(niov);
|
|
||||||
|
|
||||||
return area->pages[net_iov_idx(niov)];
|
|
||||||
}
|
|
||||||
|
|
||||||
static int io_allocate_rbuf_ring(struct io_zcrx_ifq *ifq,
|
static int io_allocate_rbuf_ring(struct io_zcrx_ifq *ifq,
|
||||||
struct io_uring_zcrx_ifq_reg *reg,
|
struct io_uring_zcrx_ifq_reg *reg,
|
||||||
struct io_uring_region_desc *rd)
|
struct io_uring_region_desc *rd)
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue