Lines Matching refs:skb

32 lowpan_addr_info *lowpan_skb_priv(const struct sk_buff *skb)  in lowpan_skb_priv()  argument
34 WARN_ON_ONCE(skb_headroom(skb) < sizeof(struct lowpan_addr_info)); in lowpan_skb_priv()
35 return (struct lowpan_addr_info *)(skb->data - in lowpan_skb_priv()
39 int lowpan_header_create(struct sk_buff *skb, struct net_device *dev, in lowpan_header_create() argument
59 info = lowpan_skb_priv(skb); in lowpan_header_create()
73 lowpan_alloc_frag(struct sk_buff *skb, int size, in lowpan_alloc_frag() argument
76 struct net_device *real_dev = lowpan_dev_info(skb->dev)->real_dev; in lowpan_alloc_frag()
86 frag->priority = skb->priority; in lowpan_alloc_frag()
89 *mac_cb(frag) = *mac_cb(skb); in lowpan_alloc_frag()
105 lowpan_xmit_fragment(struct sk_buff *skb, const struct ieee802154_hdr *wpan_hdr, in lowpan_xmit_fragment() argument
113 frag = lowpan_alloc_frag(skb, frag_hdrlen + len, wpan_hdr); in lowpan_xmit_fragment()
118 memcpy(skb_put(frag, len), skb_network_header(skb) + offset, len); in lowpan_xmit_fragment()
126 lowpan_xmit_fragmented(struct sk_buff *skb, struct net_device *dev, in lowpan_xmit_fragmented() argument
135 dgram_size = lowpan_uncompress_size(skb, &dgram_offset) - in lowpan_xmit_fragmented()
136 skb->mac_len; in lowpan_xmit_fragmented()
147 skb_network_header_len(skb), 8); in lowpan_xmit_fragmented()
149 skb_offset = skb_network_header_len(skb); in lowpan_xmit_fragmented()
150 skb_unprocessed = skb->len - skb->mac_len - skb_offset; in lowpan_xmit_fragmented()
152 rc = lowpan_xmit_fragment(skb, wpan_hdr, frag_hdr, in lowpan_xmit_fragmented()
154 frag_len + skb_network_header_len(skb)); in lowpan_xmit_fragmented()
173 rc = lowpan_xmit_fragment(skb, wpan_hdr, frag_hdr, in lowpan_xmit_fragmented()
183 consume_skb(skb); in lowpan_xmit_fragmented()
187 kfree_skb(skb); in lowpan_xmit_fragmented()
191 static int lowpan_header(struct sk_buff *skb, struct net_device *dev) in lowpan_header() argument
194 struct ieee802154_mac_cb *cb = mac_cb_init(skb); in lowpan_header()
198 memcpy(&info, lowpan_skb_priv(skb), sizeof(info)); in lowpan_header()
204 lowpan_header_compress(skb, dev, ETH_P_IPV6, daddr, saddr, skb->len); in lowpan_header()
229 return dev_hard_header(skb, lowpan_dev_info(dev)->real_dev, in lowpan_header()
233 netdev_tx_t lowpan_xmit(struct sk_buff *skb, struct net_device *dev) in lowpan_xmit() argument
243 skb = skb_unshare(skb, GFP_ATOMIC); in lowpan_xmit()
244 if (!skb) in lowpan_xmit()
247 ret = lowpan_header(skb, dev); in lowpan_xmit()
249 kfree_skb(skb); in lowpan_xmit()
253 if (ieee802154_hdr_peek(skb, &wpan_hdr) < 0) { in lowpan_xmit()
254 kfree_skb(skb); in lowpan_xmit()
260 if (skb_tail_pointer(skb) - skb_network_header(skb) <= max_single) { in lowpan_xmit()
261 skb->dev = lowpan_dev_info(dev)->real_dev; in lowpan_xmit()
262 return dev_queue_xmit(skb); in lowpan_xmit()
267 rc = lowpan_xmit_fragmented(skb, dev, &wpan_hdr); in lowpan_xmit()