Lines Matching refs:rx_ring

505 static void ixgbevf_process_skb_fields(struct ixgbevf_ring *rx_ring,  in ixgbevf_process_skb_fields()  argument
509 ixgbevf_rx_checksum(rx_ring, rx_desc, skb); in ixgbevf_process_skb_fields()
513 unsigned long *active_vlans = netdev_priv(rx_ring->netdev); in ixgbevf_process_skb_fields()
519 skb->protocol = eth_type_trans(skb, rx_ring->netdev); in ixgbevf_process_skb_fields()
533 static bool ixgbevf_is_non_eop(struct ixgbevf_ring *rx_ring, in ixgbevf_is_non_eop() argument
536 u32 ntc = rx_ring->next_to_clean + 1; in ixgbevf_is_non_eop()
539 ntc = (ntc < rx_ring->count) ? ntc : 0; in ixgbevf_is_non_eop()
540 rx_ring->next_to_clean = ntc; in ixgbevf_is_non_eop()
542 prefetch(IXGBEVF_RX_DESC(rx_ring, ntc)); in ixgbevf_is_non_eop()
550 static bool ixgbevf_alloc_mapped_page(struct ixgbevf_ring *rx_ring, in ixgbevf_alloc_mapped_page() argument
563 rx_ring->rx_stats.alloc_rx_page_failed++; in ixgbevf_alloc_mapped_page()
568 dma = dma_map_page(rx_ring->dev, page, 0, in ixgbevf_alloc_mapped_page()
574 if (dma_mapping_error(rx_ring->dev, dma)) { in ixgbevf_alloc_mapped_page()
577 rx_ring->rx_stats.alloc_rx_buff_failed++; in ixgbevf_alloc_mapped_page()
593 static void ixgbevf_alloc_rx_buffers(struct ixgbevf_ring *rx_ring, in ixgbevf_alloc_rx_buffers() argument
598 unsigned int i = rx_ring->next_to_use; in ixgbevf_alloc_rx_buffers()
601 if (!cleaned_count || !rx_ring->netdev) in ixgbevf_alloc_rx_buffers()
604 rx_desc = IXGBEVF_RX_DESC(rx_ring, i); in ixgbevf_alloc_rx_buffers()
605 bi = &rx_ring->rx_buffer_info[i]; in ixgbevf_alloc_rx_buffers()
606 i -= rx_ring->count; in ixgbevf_alloc_rx_buffers()
609 if (!ixgbevf_alloc_mapped_page(rx_ring, bi)) in ixgbevf_alloc_rx_buffers()
621 rx_desc = IXGBEVF_RX_DESC(rx_ring, 0); in ixgbevf_alloc_rx_buffers()
622 bi = rx_ring->rx_buffer_info; in ixgbevf_alloc_rx_buffers()
623 i -= rx_ring->count; in ixgbevf_alloc_rx_buffers()
632 i += rx_ring->count; in ixgbevf_alloc_rx_buffers()
634 if (rx_ring->next_to_use != i) { in ixgbevf_alloc_rx_buffers()
636 rx_ring->next_to_use = i; in ixgbevf_alloc_rx_buffers()
639 rx_ring->next_to_alloc = i; in ixgbevf_alloc_rx_buffers()
647 ixgbevf_write_tail(rx_ring, i); in ixgbevf_alloc_rx_buffers()
663 static void ixgbevf_pull_tail(struct ixgbevf_ring *rx_ring, in ixgbevf_pull_tail() argument
709 static bool ixgbevf_cleanup_headers(struct ixgbevf_ring *rx_ring, in ixgbevf_cleanup_headers() argument
716 struct net_device *netdev = rx_ring->netdev; in ixgbevf_cleanup_headers()
726 ixgbevf_pull_tail(rx_ring, skb); in ixgbevf_cleanup_headers()
742 static void ixgbevf_reuse_rx_page(struct ixgbevf_ring *rx_ring, in ixgbevf_reuse_rx_page() argument
746 u16 nta = rx_ring->next_to_alloc; in ixgbevf_reuse_rx_page()
748 new_buff = &rx_ring->rx_buffer_info[nta]; in ixgbevf_reuse_rx_page()
752 rx_ring->next_to_alloc = (nta < rx_ring->count) ? nta : 0; in ixgbevf_reuse_rx_page()
760 dma_sync_single_range_for_device(rx_ring->dev, new_buff->dma, in ixgbevf_reuse_rx_page()
786 static bool ixgbevf_add_rx_frag(struct ixgbevf_ring *rx_ring, in ixgbevf_add_rx_frag() argument
844 static struct sk_buff *ixgbevf_fetch_rx_buffer(struct ixgbevf_ring *rx_ring, in ixgbevf_fetch_rx_buffer() argument
851 rx_buffer = &rx_ring->rx_buffer_info[rx_ring->next_to_clean]; in ixgbevf_fetch_rx_buffer()
866 skb = netdev_alloc_skb_ip_align(rx_ring->netdev, in ixgbevf_fetch_rx_buffer()
869 rx_ring->rx_stats.alloc_rx_buff_failed++; in ixgbevf_fetch_rx_buffer()
881 dma_sync_single_range_for_cpu(rx_ring->dev, in ixgbevf_fetch_rx_buffer()
888 if (ixgbevf_add_rx_frag(rx_ring, rx_buffer, rx_desc, skb)) { in ixgbevf_fetch_rx_buffer()
890 ixgbevf_reuse_rx_page(rx_ring, rx_buffer); in ixgbevf_fetch_rx_buffer()
893 dma_unmap_page(rx_ring->dev, rx_buffer->dma, in ixgbevf_fetch_rx_buffer()
913 struct ixgbevf_ring *rx_ring, in ixgbevf_clean_rx_irq() argument
917 u16 cleaned_count = ixgbevf_desc_unused(rx_ring); in ixgbevf_clean_rx_irq()
918 struct sk_buff *skb = rx_ring->skb; in ixgbevf_clean_rx_irq()
925 ixgbevf_alloc_rx_buffers(rx_ring, cleaned_count); in ixgbevf_clean_rx_irq()
929 rx_desc = IXGBEVF_RX_DESC(rx_ring, rx_ring->next_to_clean); in ixgbevf_clean_rx_irq()
941 skb = ixgbevf_fetch_rx_buffer(rx_ring, rx_desc, skb); in ixgbevf_clean_rx_irq()
950 if (ixgbevf_is_non_eop(rx_ring, rx_desc)) in ixgbevf_clean_rx_irq()
954 if (ixgbevf_cleanup_headers(rx_ring, rx_desc, skb)) { in ixgbevf_clean_rx_irq()
967 ether_addr_equal(rx_ring->netdev->dev_addr, in ixgbevf_clean_rx_irq()
974 ixgbevf_process_skb_fields(rx_ring, rx_desc, skb); in ixgbevf_clean_rx_irq()
986 rx_ring->skb = skb; in ixgbevf_clean_rx_irq()
988 u64_stats_update_begin(&rx_ring->syncp); in ixgbevf_clean_rx_irq()
989 rx_ring->stats.packets += total_rx_packets; in ixgbevf_clean_rx_irq()
990 rx_ring->stats.bytes += total_rx_bytes; in ixgbevf_clean_rx_irq()
991 u64_stats_update_end(&rx_ring->syncp); in ixgbevf_clean_rx_irq()
1300 a->rx_ring[r_idx]->next = q_vector->rx.ring; in map_vector_to_rxq()
1301 q_vector->rx.ring = a->rx_ring[r_idx]; in map_vector_to_rxq()
1795 ixgbevf_configure_rx_ring(adapter, adapter->rx_ring[i]); in ixgbevf_configure_rx()
2096 static void ixgbevf_clean_rx_ring(struct ixgbevf_ring *rx_ring) in ixgbevf_clean_rx_ring() argument
2098 struct device *dev = rx_ring->dev; in ixgbevf_clean_rx_ring()
2103 if (rx_ring->skb) { in ixgbevf_clean_rx_ring()
2104 dev_kfree_skb(rx_ring->skb); in ixgbevf_clean_rx_ring()
2105 rx_ring->skb = NULL; in ixgbevf_clean_rx_ring()
2109 if (!rx_ring->rx_buffer_info) in ixgbevf_clean_rx_ring()
2113 for (i = 0; i < rx_ring->count; i++) { in ixgbevf_clean_rx_ring()
2116 rx_buffer = &rx_ring->rx_buffer_info[i]; in ixgbevf_clean_rx_ring()
2126 size = sizeof(struct ixgbevf_rx_buffer) * rx_ring->count; in ixgbevf_clean_rx_ring()
2127 memset(rx_ring->rx_buffer_info, 0, size); in ixgbevf_clean_rx_ring()
2130 memset(rx_ring->desc, 0, rx_ring->size); in ixgbevf_clean_rx_ring()
2167 ixgbevf_clean_rx_ring(adapter->rx_ring[i]); in ixgbevf_clean_all_rx_rings()
2194 ixgbevf_disable_rx_queue(adapter, adapter->rx_ring[i]); in ixgbevf_down()
2384 adapter->rx_ring[rx] = ring; in ixgbevf_alloc_queues()
2396 kfree(adapter->rx_ring[--rx]); in ixgbevf_alloc_queues()
2397 adapter->rx_ring[rx] = NULL; in ixgbevf_alloc_queues()
2594 kfree(adapter->rx_ring[i]); in ixgbevf_clear_interrupt_scheme()
2595 adapter->rx_ring[i] = NULL; in ixgbevf_clear_interrupt_scheme()
2727 adapter->rx_ring[i]->hw_csum_rx_error; in ixgbevf_update_stats()
2728 adapter->rx_ring[i]->hw_csum_rx_error = 0; in ixgbevf_update_stats()
3024 int ixgbevf_setup_rx_resources(struct ixgbevf_ring *rx_ring) in ixgbevf_setup_rx_resources() argument
3028 size = sizeof(struct ixgbevf_rx_buffer) * rx_ring->count; in ixgbevf_setup_rx_resources()
3029 rx_ring->rx_buffer_info = vzalloc(size); in ixgbevf_setup_rx_resources()
3030 if (!rx_ring->rx_buffer_info) in ixgbevf_setup_rx_resources()
3034 rx_ring->size = rx_ring->count * sizeof(union ixgbe_adv_rx_desc); in ixgbevf_setup_rx_resources()
3035 rx_ring->size = ALIGN(rx_ring->size, 4096); in ixgbevf_setup_rx_resources()
3037 rx_ring->desc = dma_alloc_coherent(rx_ring->dev, rx_ring->size, in ixgbevf_setup_rx_resources()
3038 &rx_ring->dma, GFP_KERNEL); in ixgbevf_setup_rx_resources()
3040 if (!rx_ring->desc) in ixgbevf_setup_rx_resources()
3045 vfree(rx_ring->rx_buffer_info); in ixgbevf_setup_rx_resources()
3046 rx_ring->rx_buffer_info = NULL; in ixgbevf_setup_rx_resources()
3047 dev_err(rx_ring->dev, "Unable to allocate memory for the Rx descriptor ring\n"); in ixgbevf_setup_rx_resources()
3066 err = ixgbevf_setup_rx_resources(adapter->rx_ring[i]); in ixgbevf_setup_all_rx_resources()
3081 void ixgbevf_free_rx_resources(struct ixgbevf_ring *rx_ring) in ixgbevf_free_rx_resources() argument
3083 ixgbevf_clean_rx_ring(rx_ring); in ixgbevf_free_rx_resources()
3085 vfree(rx_ring->rx_buffer_info); in ixgbevf_free_rx_resources()
3086 rx_ring->rx_buffer_info = NULL; in ixgbevf_free_rx_resources()
3088 dma_free_coherent(rx_ring->dev, rx_ring->size, rx_ring->desc, in ixgbevf_free_rx_resources()
3089 rx_ring->dma); in ixgbevf_free_rx_resources()
3091 rx_ring->desc = NULL; in ixgbevf_free_rx_resources()
3105 if (adapter->rx_ring[i]->desc) in ixgbevf_free_all_rx_resources()
3106 ixgbevf_free_rx_resources(adapter->rx_ring[i]); in ixgbevf_free_all_rx_resources()
3855 ring = adapter->rx_ring[i]; in ixgbevf_get_stats()