26 #define foreach_avf_input_error \ 27 _(BUFFER_ALLOC, "buffer alloc error") 31 #define _(f,s) AVF_INPUT_ERROR_##f, 43 #define AVF_INPUT_REFILL_TRESHOLD 32 48 #ifdef CLIB_HAVE_VEC256 50 u64x4_store_unaligned (v, (
void *) d);
61 u16 n_refill, mask, n_alloc, slot,
size;
72 slot = (rxq->
next - n_refill - 1) & mask;
80 AVF_INPUT_ERROR_BUFFER_ALLOC, 1);
122 slot = (slot + 8) & mask;
136 u32 tlnifb = 0,
i = 0;
141 while ((qw1 & AVF_RXD_STATUS_EOP) == 0)
147 b->
flags |= VLIB_BUFFER_NEXT_PRESENT;
155 hb->
flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
168 uword n_rx_bytes = 0;
236 u32 n_trace, n_rx_packets = 0, n_rx_bytes = 0;
239 u32 *bi, *to_next, n_left_to_next;
246 #ifdef CLIB_HAVE_VEC256 247 u64x4 q1x4, or_q1x4 = { 0 };
266 if (next + 11 < size)
279 #ifdef CLIB_HAVE_VEC256 283 q1x4 =
u64x4_gather ((
void *) &d[0].qword[1], (
void *) &d[1].qword[1],
284 (
void *) &d[2].qword[1], (
void *) &d[3].qword[1]);
287 if (!u64x4_is_equal (q1x4 & dd_eop_mask4, dd_eop_mask4))
291 u64x4_store_unaligned (q1x4, ptd->
qw1s + n_rx_packets);
295 next = (next + 4) & mask;
308 bi[0] = rxq->
bufs[next];
314 u16 tail_next = next;
319 tail_next = (tail_next + 1) & mask;
326 or_qw1 |= tail->
qw1s[tail_desc] = td[0].
qword[1];
332 n_tail_desc += tail_desc;
335 or_qw1 |= ptd->
qw1s[n_rx_packets] = d[0].
qword[1];
338 next = (next + 1) & mask;
345 if (n_rx_packets == 0)
349 rxq->
n_enqueued -= n_rx_packets + n_tail_desc;
351 #ifdef CLIB_HAVE_VEC256 352 or_qw1 |= or_q1x4[0] | or_q1x4[1] | or_q1x4[2] | or_q1x4[3];
356 if (ad->
flags & AVF_DEVICE_F_VA_DMA)
374 u32 n_left = n_rx_packets,
i = 0, j;
377 while (n_trace && n_left)
417 n_left_to_next -= n_rx_packets;
440 if ((ad->
flags & AVF_DEVICE_F_ADMIN_UP) == 0)
450 .sibling_of =
"device-input",
453 .state = VLIB_NODE_STATE_DISABLED,
static u32 vlib_get_trace_count(vlib_main_t *vm, vlib_node_runtime_t *rt)
static void vlib_increment_combined_counter(vlib_combined_counter_main_t *cm, u32 thread_index, u32 index, u64 n_packets, u64 n_bytes)
Increment a combined counter.
vnet_main_t * vnet_get_main(void)
vnet_interface_main_t interface_main
static uword vlib_buffer_get_pa(vlib_main_t *vm, vlib_buffer_t *b)
#define CLIB_MEMORY_STORE_BARRIER()
static void vlib_error_count(vlib_main_t *vm, uword node_index, uword counter, uword increment)
#define clib_memcpy_fast(a, b, c)
static_always_inline void vlib_get_buffers_with_offset(vlib_main_t *vm, u32 *bi, void **b, int count, i32 offset)
Translate array of buffer indices into buffer pointers with offset.
#define VLIB_NODE_FN(node)
#define AVF_RXD_STATUS_DD
format_function_t format_avf_input_trace
static void vlib_trace_buffer(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, vlib_buffer_t *b, int follow_chain)
#define static_always_inline
#define ETH_INPUT_FRAME_F_SINGLE_SW_IF_IDX
vlib_combined_counter_main_t * combined_sw_if_counters
#define vlib_prefetch_buffer_header(b, type)
Prefetch buffer metadata.
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
#define vlib_get_new_next_frame(vm, node, next_index, vectors, n_vectors_left)
avf_rx_tail_t tails[AVF_RX_VECTOR_SZ]
static vlib_next_frame_t * vlib_node_runtime_get_next_frame(vlib_main_t *vm, vlib_node_runtime_t *n, u32 next_index)
u64 qw1s[AVF_RX_MAX_DESC_IN_CHAIN-1]
static void vlib_buffer_free_from_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Free buffers from ring.
u16 current_length
Nbytes between current data and the end of this buffer.
u64 qw1s[AVF_RX_VECTOR_SZ]
static_always_inline u64x4 u64x4_gather(void *p0, void *p1, void *p2, void *p3)
#define AVF_RXD_LEN_SHIFT
#define ETH_INPUT_FRAME_F_IP4_CKSUM_OK
static vlib_frame_t * vlib_get_frame(vlib_main_t *vm, uword frame_index)
u32 node_index
Node index.
#define AVF_RXD_STATUS_EOP
#define VLIB_REGISTER_NODE(x,...)
static_always_inline uword vlib_get_thread_index(void)
#define CLIB_PREFETCH(addr, size, type)
static_always_inline int avf_rxd_is_not_dd(avf_rx_desc_t *d)
static_always_inline void clib_memcpy64_x4(void *d0, void *d1, void *d2, void *d3, void *s)
static void * vlib_frame_scalar_args(vlib_frame_t *f)
Get pointer to frame scalar data.
void vlib_put_next_frame(vlib_main_t *vm, vlib_node_runtime_t *r, u32 next_index, u32 n_vectors_left)
Release pointer to next frame vector data.
vlib_buffer_t * bufs[AVF_RX_VECTOR_SZ]
u32 per_interface_next_index
u32 next_buffer
Next buffer for this linked-list of buffers.
vlib_buffer_t buffer_template
#define AVF_RXD_ERROR_IPE
static uword pointer_to_uword(const void *p)
static void * vlib_add_trace(vlib_main_t *vm, vlib_node_runtime_t *r, vlib_buffer_t *b, u32 n_data_bytes)
u32 total_length_not_including_first_buffer
Only valid for first buffer in chain.
#define foreach_device_and_queue(var, vec)
static u32 vlib_buffer_alloc_to_ring(vlib_main_t *vm, u32 *ring, u32 start, u32 ring_size, u32 n_buffers)
Allocate buffers into ring.
#define VLIB_BUFFER_TRACE_TRAJECTORY_INIT(b)
avf_per_thread_data_t * per_thread_data
static void vlib_frame_no_append(vlib_frame_t *f)
static_always_inline int avf_rxd_is_not_eop(avf_rx_desc_t *d)
static void vlib_set_trace_count(vlib_main_t *vm, vlib_node_runtime_t *rt, u32 count)
static_always_inline void vlib_get_buffers(vlib_main_t *vm, u32 *bi, vlib_buffer_t **b, int count)
Translate array of buffer indices into buffer pointers.
#define CLIB_CACHE_LINE_BYTES
u32 flags
buffer flags: VLIB_BUFFER_FREE_LIST_INDEX_MASK: bits used to store free list index, VLIB_BUFFER_IS_TRACED: trace this buffer.
#define AVF_RX_MAX_DESC_IN_CHAIN
static vlib_buffer_t * vlib_get_buffer(vlib_main_t *vm, u32 buffer_index)
Translate buffer index into buffer pointer.
u32 buffers[AVF_RX_MAX_DESC_IN_CHAIN-1]