1 /* SPDX-License-Identifier: GPL-2.0 */
2 /* Interface for implementing AF_XDP zero-copy support in drivers.
3 * Copyright(c) 2020 Intel Corporation.
6 #ifndef _LINUX_XDP_SOCK_DRV_H
7 #define _LINUX_XDP_SOCK_DRV_H
9 #include <net/xdp_sock.h>
10 #include <net/xsk_buff_pool.h>
12 #define XDP_UMEM_MIN_CHUNK_SHIFT 11
13 #define XDP_UMEM_MIN_CHUNK_SIZE (1 << XDP_UMEM_MIN_CHUNK_SHIFT)
15 #ifdef CONFIG_XDP_SOCKETS
17 void xsk_tx_completed(struct xsk_buff_pool *pool, u32 nb_entries);
18 bool xsk_tx_peek_desc(struct xsk_buff_pool *pool, struct xdp_desc *desc);
19 u32 xsk_tx_peek_release_desc_batch(struct xsk_buff_pool *pool, u32 max);
20 void xsk_tx_release(struct xsk_buff_pool *pool);
21 struct xsk_buff_pool *xsk_get_pool_from_qid(struct net_device *dev,
23 void xsk_set_rx_need_wakeup(struct xsk_buff_pool *pool);
24 void xsk_set_tx_need_wakeup(struct xsk_buff_pool *pool);
25 void xsk_clear_rx_need_wakeup(struct xsk_buff_pool *pool);
26 void xsk_clear_tx_need_wakeup(struct xsk_buff_pool *pool);
27 bool xsk_uses_need_wakeup(struct xsk_buff_pool *pool);
29 static inline u32 xsk_pool_get_headroom(struct xsk_buff_pool *pool)
31 return XDP_PACKET_HEADROOM + pool->headroom;
34 static inline u32 xsk_pool_get_chunk_size(struct xsk_buff_pool *pool)
36 return pool->chunk_size;
39 static inline u32 xsk_pool_get_rx_frame_size(struct xsk_buff_pool *pool)
41 return xsk_pool_get_chunk_size(pool) - xsk_pool_get_headroom(pool);
44 static inline void xsk_pool_set_rxq_info(struct xsk_buff_pool *pool,
45 struct xdp_rxq_info *rxq)
47 xp_set_rxq_info(pool, rxq);
50 static inline unsigned int xsk_pool_get_napi_id(struct xsk_buff_pool *pool)
52 #ifdef CONFIG_NET_RX_BUSY_POLL
53 return pool->heads[0].xdp.rxq->napi_id;
59 static inline void xsk_pool_dma_unmap(struct xsk_buff_pool *pool,
62 xp_dma_unmap(pool, attrs);
65 static inline int xsk_pool_dma_map(struct xsk_buff_pool *pool,
66 struct device *dev, unsigned long attrs)
68 struct xdp_umem *umem = pool->umem;
70 return xp_dma_map(pool, dev, attrs, umem->pgs, umem->npgs);
73 static inline dma_addr_t xsk_buff_xdp_get_dma(struct xdp_buff *xdp)
75 struct xdp_buff_xsk *xskb = container_of(xdp, struct xdp_buff_xsk, xdp);
77 return xp_get_dma(xskb);
80 static inline dma_addr_t xsk_buff_xdp_get_frame_dma(struct xdp_buff *xdp)
82 struct xdp_buff_xsk *xskb = container_of(xdp, struct xdp_buff_xsk, xdp);
84 return xp_get_frame_dma(xskb);
87 static inline struct xdp_buff *xsk_buff_alloc(struct xsk_buff_pool *pool)
89 return xp_alloc(pool);
92 static inline bool xsk_is_eop_desc(struct xdp_desc *desc)
94 return !xp_mb_desc(desc);
97 /* Returns as many entries as possible up to max. 0 <= N <= max. */
98 static inline u32 xsk_buff_alloc_batch(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max)
100 return xp_alloc_batch(pool, xdp, max);
103 static inline bool xsk_buff_can_alloc(struct xsk_buff_pool *pool, u32 count)
105 return xp_can_alloc(pool, count);
108 static inline void xsk_buff_free(struct xdp_buff *xdp)
110 struct xdp_buff_xsk *xskb = container_of(xdp, struct xdp_buff_xsk, xdp);
111 struct list_head *xskb_list = &xskb->pool->xskb_list;
112 struct xdp_buff_xsk *pos, *tmp;
114 if (likely(!xdp_buff_has_frags(xdp)))
117 list_for_each_entry_safe(pos, tmp, xskb_list, xskb_list_node) {
118 list_del(&pos->xskb_list_node);
122 xdp_get_shared_info_from_buff(xdp)->nr_frags = 0;
127 static inline void xsk_buff_add_frag(struct xdp_buff *xdp)
129 struct xdp_buff_xsk *frag = container_of(xdp, struct xdp_buff_xsk, xdp);
131 list_add_tail(&frag->xskb_list_node, &frag->pool->xskb_list);
134 static inline struct xdp_buff *xsk_buff_get_frag(struct xdp_buff *first)
136 struct xdp_buff_xsk *xskb = container_of(first, struct xdp_buff_xsk, xdp);
137 struct xdp_buff *ret = NULL;
138 struct xdp_buff_xsk *frag;
140 frag = list_first_entry_or_null(&xskb->pool->xskb_list,
141 struct xdp_buff_xsk, xskb_list_node);
143 list_del(&frag->xskb_list_node);
150 static inline void xsk_buff_set_size(struct xdp_buff *xdp, u32 size)
152 xdp->data = xdp->data_hard_start + XDP_PACKET_HEADROOM;
153 xdp->data_meta = xdp->data;
154 xdp->data_end = xdp->data + size;
157 static inline dma_addr_t xsk_buff_raw_get_dma(struct xsk_buff_pool *pool,
160 return xp_raw_get_dma(pool, addr);
163 static inline void *xsk_buff_raw_get_data(struct xsk_buff_pool *pool, u64 addr)
165 return xp_raw_get_data(pool, addr);
168 static inline void xsk_buff_dma_sync_for_cpu(struct xdp_buff *xdp, struct xsk_buff_pool *pool)
170 struct xdp_buff_xsk *xskb = container_of(xdp, struct xdp_buff_xsk, xdp);
172 if (!pool->dma_need_sync)
175 xp_dma_sync_for_cpu(xskb);
178 static inline void xsk_buff_raw_dma_sync_for_device(struct xsk_buff_pool *pool,
182 xp_dma_sync_for_device(pool, dma, size);
187 static inline void xsk_tx_completed(struct xsk_buff_pool *pool, u32 nb_entries)
191 static inline bool xsk_tx_peek_desc(struct xsk_buff_pool *pool,
192 struct xdp_desc *desc)
197 static inline u32 xsk_tx_peek_release_desc_batch(struct xsk_buff_pool *pool, u32 max)
202 static inline void xsk_tx_release(struct xsk_buff_pool *pool)
206 static inline struct xsk_buff_pool *
207 xsk_get_pool_from_qid(struct net_device *dev, u16 queue_id)
212 static inline void xsk_set_rx_need_wakeup(struct xsk_buff_pool *pool)
216 static inline void xsk_set_tx_need_wakeup(struct xsk_buff_pool *pool)
220 static inline void xsk_clear_rx_need_wakeup(struct xsk_buff_pool *pool)
224 static inline void xsk_clear_tx_need_wakeup(struct xsk_buff_pool *pool)
228 static inline bool xsk_uses_need_wakeup(struct xsk_buff_pool *pool)
233 static inline u32 xsk_pool_get_headroom(struct xsk_buff_pool *pool)
238 static inline u32 xsk_pool_get_chunk_size(struct xsk_buff_pool *pool)
243 static inline u32 xsk_pool_get_rx_frame_size(struct xsk_buff_pool *pool)
248 static inline void xsk_pool_set_rxq_info(struct xsk_buff_pool *pool,
249 struct xdp_rxq_info *rxq)
253 static inline unsigned int xsk_pool_get_napi_id(struct xsk_buff_pool *pool)
258 static inline void xsk_pool_dma_unmap(struct xsk_buff_pool *pool,
263 static inline int xsk_pool_dma_map(struct xsk_buff_pool *pool,
264 struct device *dev, unsigned long attrs)
269 static inline dma_addr_t xsk_buff_xdp_get_dma(struct xdp_buff *xdp)
274 static inline dma_addr_t xsk_buff_xdp_get_frame_dma(struct xdp_buff *xdp)
279 static inline struct xdp_buff *xsk_buff_alloc(struct xsk_buff_pool *pool)
284 static inline bool xsk_is_eop_desc(struct xdp_desc *desc)
289 static inline u32 xsk_buff_alloc_batch(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max)
294 static inline bool xsk_buff_can_alloc(struct xsk_buff_pool *pool, u32 count)
299 static inline void xsk_buff_free(struct xdp_buff *xdp)
303 static inline void xsk_buff_add_frag(struct xdp_buff *xdp)
307 static inline struct xdp_buff *xsk_buff_get_frag(struct xdp_buff *first)
312 static inline void xsk_buff_set_size(struct xdp_buff *xdp, u32 size)
316 static inline dma_addr_t xsk_buff_raw_get_dma(struct xsk_buff_pool *pool,
322 static inline void *xsk_buff_raw_get_data(struct xsk_buff_pool *pool, u64 addr)
327 static inline void xsk_buff_dma_sync_for_cpu(struct xdp_buff *xdp, struct xsk_buff_pool *pool)
331 static inline void xsk_buff_raw_dma_sync_for_device(struct xsk_buff_pool *pool,
337 #endif /* CONFIG_XDP_SOCKETS */
339 #endif /* _LINUX_XDP_SOCK_DRV_H */