Lines Matching refs:skb_shinfo
258 shinfo = skb_shinfo(skb); in __alloc_skb()
326 shinfo = skb_shinfo(skb); in __build_skb()
586 skb_frag_t *frag = &skb_shinfo(skb)->frags[i]; in skb_coalesce_rx_frag()
603 skb_drop_list(&skb_shinfo(skb)->frag_list); in skb_drop_fraglist()
624 struct skb_shared_info *shinfo = skb_shinfo(skb); in skb_release_data()
768 if (skb_shinfo(skb)->tx_flags & SKBTX_DEV_ZEROCOPY) { in skb_tx_error()
771 uarg = skb_shinfo(skb)->destructor_arg; in skb_tx_error()
774 skb_shinfo(skb)->tx_flags &= ~SKBTX_DEV_ZEROCOPY; in skb_tx_error()
887 atomic_inc(&(skb_shinfo(skb)->dataref)); in __skb_clone()
929 int num_frags = skb_shinfo(skb)->nr_frags; in skb_copy_ubufs()
931 struct ubuf_info *uarg = skb_shinfo(skb)->destructor_arg; in skb_copy_ubufs()
935 skb_frag_t *f = &skb_shinfo(skb)->frags[i]; in skb_copy_ubufs()
963 skb_shinfo(skb)->frags[i].size); in skb_copy_ubufs()
967 skb_shinfo(skb)->tx_flags &= ~SKBTX_DEV_ZEROCOPY; in skb_copy_ubufs()
1035 skb_shinfo(new)->gso_size = skb_shinfo(old)->gso_size; in copy_skb_header()
1036 skb_shinfo(new)->gso_segs = skb_shinfo(old)->gso_segs; in copy_skb_header()
1037 skb_shinfo(new)->gso_type = skb_shinfo(old)->gso_type; in copy_skb_header()
1125 if (skb_shinfo(skb)->nr_frags) { in __pskb_copy_fclone()
1133 for (i = 0; i < skb_shinfo(skb)->nr_frags; i++) { in __pskb_copy_fclone()
1134 skb_shinfo(n)->frags[i] = skb_shinfo(skb)->frags[i]; in __pskb_copy_fclone()
1137 skb_shinfo(n)->nr_frags = i; in __pskb_copy_fclone()
1141 skb_shinfo(n)->frag_list = skb_shinfo(skb)->frag_list; in __pskb_copy_fclone()
1196 skb_shinfo(skb), in pskb_expand_head()
1197 offsetof(struct skb_shared_info, frags[skb_shinfo(skb)->nr_frags])); in pskb_expand_head()
1208 for (i = 0; i < skb_shinfo(skb)->nr_frags; i++) in pskb_expand_head()
1234 atomic_set(&skb_shinfo(skb)->dataref, 1); in pskb_expand_head()
1473 int nfrags = skb_shinfo(skb)->nr_frags; in ___pskb_trim()
1486 int end = offset + skb_frag_size(&skb_shinfo(skb)->frags[i]); in ___pskb_trim()
1493 skb_frag_size_set(&skb_shinfo(skb)->frags[i++], len - offset); in ___pskb_trim()
1496 skb_shinfo(skb)->nr_frags = i; in ___pskb_trim()
1506 for (fragp = &skb_shinfo(skb)->frag_list; (frag = *fragp); in ___pskb_trim()
1601 for (i = 0; i < skb_shinfo(skb)->nr_frags; i++) { in __pskb_pull_tail()
1602 int size = skb_frag_size(&skb_shinfo(skb)->frags[i]); in __pskb_pull_tail()
1617 struct sk_buff *list = skb_shinfo(skb)->frag_list; in __pskb_pull_tail()
1653 while ((list = skb_shinfo(skb)->frag_list) != insp) { in __pskb_pull_tail()
1654 skb_shinfo(skb)->frag_list = list->next; in __pskb_pull_tail()
1660 skb_shinfo(skb)->frag_list = clone; in __pskb_pull_tail()
1668 for (i = 0; i < skb_shinfo(skb)->nr_frags; i++) { in __pskb_pull_tail()
1669 int size = skb_frag_size(&skb_shinfo(skb)->frags[i]); in __pskb_pull_tail()
1675 skb_shinfo(skb)->frags[k] = skb_shinfo(skb)->frags[i]; in __pskb_pull_tail()
1677 skb_shinfo(skb)->frags[k].page_offset += eat; in __pskb_pull_tail()
1678 skb_frag_size_sub(&skb_shinfo(skb)->frags[k], eat); in __pskb_pull_tail()
1684 skb_shinfo(skb)->nr_frags = k; in __pskb_pull_tail()
1728 for (i = 0; i < skb_shinfo(skb)->nr_frags; i++) { in skb_copy_bits()
1730 skb_frag_t *f = &skb_shinfo(skb)->frags[i]; in skb_copy_bits()
1911 for (seg = 0; seg < skb_shinfo(skb)->nr_frags; seg++) { in __skb_splice_bits()
1912 const skb_frag_t *f = &skb_shinfo(skb)->frags[seg]; in __skb_splice_bits()
2016 for (i = 0; i < skb_shinfo(skb)->nr_frags; i++) { in skb_store_bits()
2017 skb_frag_t *frag = &skb_shinfo(skb)->frags[i]; in skb_store_bits()
2089 for (i = 0; i < skb_shinfo(skb)->nr_frags; i++) { in __skb_checksum()
2091 skb_frag_t *frag = &skb_shinfo(skb)->frags[i]; in __skb_checksum()
2176 for (i = 0; i < skb_shinfo(skb)->nr_frags; i++) { in skb_copy_and_csum_bits()
2181 end = start + skb_frag_size(&skb_shinfo(skb)->frags[i]); in skb_copy_and_csum_bits()
2185 skb_frag_t *frag = &skb_shinfo(skb)->frags[i]; in skb_copy_and_csum_bits()
2246 skb_shinfo(from)->nr_frags >= MAX_SKB_FRAGS) in skb_zerocopy_headlen()
2315 for (i = 0; i < skb_shinfo(from)->nr_frags; i++) { in skb_zerocopy()
2318 skb_shinfo(to)->frags[j] = skb_shinfo(from)->frags[i]; in skb_zerocopy()
2319 skb_shinfo(to)->frags[j].size = min_t(int, skb_shinfo(to)->frags[j].size, len); in skb_zerocopy()
2320 len -= skb_shinfo(to)->frags[j].size; in skb_zerocopy()
2324 skb_shinfo(to)->nr_frags = j; in skb_zerocopy()
2527 for (i = 0; i < skb_shinfo(skb)->nr_frags; i++) in skb_split_inside_header()
2528 skb_shinfo(skb1)->frags[i] = skb_shinfo(skb)->frags[i]; in skb_split_inside_header()
2530 skb_shinfo(skb1)->nr_frags = skb_shinfo(skb)->nr_frags; in skb_split_inside_header()
2531 skb_shinfo(skb)->nr_frags = 0; in skb_split_inside_header()
2544 const int nfrags = skb_shinfo(skb)->nr_frags; in skb_split_no_header()
2546 skb_shinfo(skb)->nr_frags = 0; in skb_split_no_header()
2552 int size = skb_frag_size(&skb_shinfo(skb)->frags[i]); in skb_split_no_header()
2555 skb_shinfo(skb1)->frags[k] = skb_shinfo(skb)->frags[i]; in skb_split_no_header()
2567 skb_shinfo(skb1)->frags[0].page_offset += len - pos; in skb_split_no_header()
2568 skb_frag_size_sub(&skb_shinfo(skb1)->frags[0], len - pos); in skb_split_no_header()
2569 skb_frag_size_set(&skb_shinfo(skb)->frags[i], len - pos); in skb_split_no_header()
2570 skb_shinfo(skb)->nr_frags++; in skb_split_no_header()
2574 skb_shinfo(skb)->nr_frags++; in skb_split_no_header()
2577 skb_shinfo(skb1)->nr_frags = k; in skb_split_no_header()
2590 skb_shinfo(skb1)->tx_flags = skb_shinfo(skb)->tx_flags & SKBTX_SHARED_FRAG; in skb_split()
2635 to = skb_shinfo(tgt)->nr_frags; in skb_shift()
2636 fragfrom = &skb_shinfo(skb)->frags[from]; in skb_shift()
2655 fragfrom = &skb_shinfo(skb)->frags[from]; in skb_shift()
2656 fragto = &skb_shinfo(tgt)->frags[merge]; in skb_shift()
2670 (skb_shinfo(skb)->nr_frags - from) > (MAX_SKB_FRAGS - to)) in skb_shift()
2676 while ((todo > 0) && (from < skb_shinfo(skb)->nr_frags)) { in skb_shift()
2680 fragfrom = &skb_shinfo(skb)->frags[from]; in skb_shift()
2681 fragto = &skb_shinfo(tgt)->frags[to]; in skb_shift()
2705 skb_shinfo(tgt)->nr_frags = to; in skb_shift()
2708 fragfrom = &skb_shinfo(skb)->frags[0]; in skb_shift()
2709 fragto = &skb_shinfo(tgt)->frags[merge]; in skb_shift()
2717 while (from < skb_shinfo(skb)->nr_frags) in skb_shift()
2718 skb_shinfo(skb)->frags[to++] = skb_shinfo(skb)->frags[from++]; in skb_shift()
2719 skb_shinfo(skb)->nr_frags = to; in skb_shift()
2721 BUG_ON(todo > 0 && !skb_shinfo(skb)->nr_frags); in skb_shift()
2812 while (st->frag_idx < skb_shinfo(st->cur_skb)->nr_frags) { in skb_seq_read()
2813 frag = &skb_shinfo(st->cur_skb)->frags[st->frag_idx]; in skb_seq_read()
2841 st->cur_skb = skb_shinfo(st->root_skb)->frag_list; in skb_seq_read()
2926 int frg_cnt = skb_shinfo(skb)->nr_frags; in skb_append_datato_frags()
3004 struct sk_buff *list_skb = skb_shinfo(head_skb)->frag_list; in skb_segment()
3005 skb_frag_t *frag = skb_shinfo(head_skb)->frags; in skb_segment()
3006 unsigned int mss = skb_shinfo(head_skb)->gso_size; in skb_segment()
3016 int nfrags = skb_shinfo(head_skb)->nr_frags; in skb_segment()
3054 nfrags = skb_shinfo(list_skb)->nr_frags; in skb_segment()
3055 frag = skb_shinfo(list_skb)->frags; in skb_segment()
3131 nskb_frag = skb_shinfo(nskb)->frags; in skb_segment()
3136 skb_shinfo(nskb)->tx_flags = skb_shinfo(head_skb)->tx_flags & in skb_segment()
3144 nfrags = skb_shinfo(list_skb)->nr_frags; in skb_segment()
3145 frag = skb_shinfo(list_skb)->frags; in skb_segment()
3153 if (unlikely(skb_shinfo(nskb)->nr_frags >= in skb_segment()
3173 skb_shinfo(nskb)->nr_frags++; in skb_segment()
3227 struct skb_shared_info *pinfo, *skbinfo = skb_shinfo(skb); in skb_gro_receive()
3238 pinfo = skb_shinfo(lp); in skb_gro_receive()
3315 skb_shinfo(p)->frag_list = skb; in skb_gro_receive()
3378 for (i = 0; i < skb_shinfo(skb)->nr_frags; i++) { in __skb_to_sgvec()
3383 end = start + skb_frag_size(&skb_shinfo(skb)->frags[i]); in __skb_to_sgvec()
3385 skb_frag_t *frag = &skb_shinfo(skb)->frags[i]; in __skb_to_sgvec()
3483 if ((skb_cloned(skb) || skb_shinfo(skb)->nr_frags) && in skb_cow_data()
3506 skb_p = &skb_shinfo(skb)->frag_list; in skb_cow_data()
3522 if (skb_shinfo(skb1)->nr_frags || in skb_cow_data()
3531 skb_shinfo(skb1)->nr_frags || in skb_cow_data()
3665 serr->ee.ee_data = skb_shinfo(skb)->tskey; in __skb_complete_tx_timestamp()
3731 skb_shinfo(skb)->tx_flags = skb_shinfo(orig_skb)->tx_flags; in __skb_tstamp_tx()
3732 skb_shinfo(skb)->tskey = skb_shinfo(orig_skb)->tskey; in __skb_tstamp_tx()
4086 if (skb_shinfo(to)->nr_frags + in skb_try_coalesce()
4087 skb_shinfo(from)->nr_frags >= MAX_SKB_FRAGS) in skb_try_coalesce()
4098 skb_fill_page_desc(to, skb_shinfo(to)->nr_frags, in skb_try_coalesce()
4102 if (skb_shinfo(to)->nr_frags + in skb_try_coalesce()
4103 skb_shinfo(from)->nr_frags > MAX_SKB_FRAGS) in skb_try_coalesce()
4111 memcpy(skb_shinfo(to)->frags + skb_shinfo(to)->nr_frags, in skb_try_coalesce()
4112 skb_shinfo(from)->frags, in skb_try_coalesce()
4113 skb_shinfo(from)->nr_frags * sizeof(skb_frag_t)); in skb_try_coalesce()
4114 skb_shinfo(to)->nr_frags += skb_shinfo(from)->nr_frags; in skb_try_coalesce()
4117 skb_shinfo(from)->nr_frags = 0; in skb_try_coalesce()
4122 for (i = 0; i < skb_shinfo(from)->nr_frags; i++) in skb_try_coalesce()
4179 const struct skb_shared_info *shinfo = skb_shinfo(skb); in skb_gso_transport_seglen()