Home
last modified time | relevance | path

Searched refs:sg (Results 1 – 200 of 745) sorted by relevance

1234

/linux-4.4.14/tools/virtio/linux/
Dscatterlist.h13 #define sg_is_chain(sg) ((sg)->page_link & 0x01) argument
14 #define sg_is_last(sg) ((sg)->page_link & 0x02) argument
15 #define sg_chain_ptr(sg) \ argument
16 ((struct scatterlist *) ((sg)->page_link & ~0x03))
28 static inline void sg_assign_page(struct scatterlist *sg, struct page *page) in sg_assign_page() argument
30 unsigned long page_link = sg->page_link & 0x3; in sg_assign_page()
38 BUG_ON(sg->sg_magic != SG_MAGIC); in sg_assign_page()
39 BUG_ON(sg_is_chain(sg)); in sg_assign_page()
41 sg->page_link = page_link | (unsigned long) page; in sg_assign_page()
58 static inline void sg_set_page(struct scatterlist *sg, struct page *page, in sg_set_page() argument
[all …]
/linux-4.4.14/include/linux/
Dscatterlist.h30 #define sg_dma_address(sg) ((sg)->dma_address) argument
33 #define sg_dma_len(sg) ((sg)->dma_length) argument
35 #define sg_dma_len(sg) ((sg)->length) argument
67 #define sg_is_chain(sg) ((sg)->page_link & 0x01) argument
68 #define sg_is_last(sg) ((sg)->page_link & 0x02) argument
69 #define sg_chain_ptr(sg) \ argument
70 ((struct scatterlist *) ((sg)->page_link & ~0x03))
82 static inline void sg_assign_page(struct scatterlist *sg, struct page *page) in sg_assign_page() argument
84 unsigned long page_link = sg->page_link & 0x3; in sg_assign_page()
92 BUG_ON(sg->sg_magic != SG_MAGIC); in sg_assign_page()
[all …]
Ddma-debug.h47 extern void debug_dma_map_sg(struct device *dev, struct scatterlist *sg,
79 struct scatterlist *sg,
83 struct scatterlist *sg,
123 static inline void debug_dma_map_sg(struct device *dev, struct scatterlist *sg, in debug_dma_map_sg() argument
173 struct scatterlist *sg, in debug_dma_sync_sg_for_cpu() argument
179 struct scatterlist *sg, in debug_dma_sync_sg_for_device() argument
Dswiotlb.h74 swiotlb_map_sg(struct device *hwdev, struct scatterlist *sg, int nents,
78 swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sg, int nents,
95 swiotlb_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg,
103 swiotlb_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg,
/linux-4.4.14/Documentation/scsi/
Dscsi-generic.txt1 Notes on Linux SCSI Generic (sg) driver
6 The SCSI Generic driver (sg) is one of the four "high level" SCSI device
10 Thus sg is used for scanners, CD writers and reading audio CDs digitally
18 Major versions of the sg driver
20 There are three major versions of sg found in the linux kernel (lk):
21 - sg version 1 (original) from 1992 to early 1999 (lk 2.2.5) .
23 - sg version 2 from lk 2.2.6 in the 2.2 series. It is based on
25 - sg version 3 found in the lk 2.4 series (and the lk 2.5 series).
31 The most recent documentation of the sg driver is kept at the Linux
34 This describes the sg version 3 driver found in the lk 2.4 series.
[all …]
/linux-4.4.14/lib/
Dscatterlist.c25 struct scatterlist *sg_next(struct scatterlist *sg) in sg_next() argument
28 BUG_ON(sg->sg_magic != SG_MAGIC); in sg_next()
30 if (sg_is_last(sg)) in sg_next()
33 sg++; in sg_next()
34 if (unlikely(sg_is_chain(sg))) in sg_next()
35 sg = sg_chain_ptr(sg); in sg_next()
37 return sg; in sg_next()
50 int sg_nents(struct scatterlist *sg) in sg_nents() argument
53 for (nents = 0; sg; sg = sg_next(sg)) in sg_nents()
73 int sg_nents_for_len(struct scatterlist *sg, u64 len) in sg_nents_for_len() argument
[all …]
Dswiotlb.c882 struct scatterlist *sg; in swiotlb_map_sg_attrs() local
887 for_each_sg(sgl, sg, nelems, i) { in swiotlb_map_sg_attrs()
888 phys_addr_t paddr = sg_phys(sg); in swiotlb_map_sg_attrs()
892 !dma_capable(hwdev, dev_addr, sg->length)) { in swiotlb_map_sg_attrs()
893 phys_addr_t map = map_single(hwdev, sg_phys(sg), in swiotlb_map_sg_attrs()
894 sg->length, dir); in swiotlb_map_sg_attrs()
898 swiotlb_full(hwdev, sg->length, dir, 0); in swiotlb_map_sg_attrs()
904 sg->dma_address = phys_to_dma(hwdev, map); in swiotlb_map_sg_attrs()
906 sg->dma_address = dev_addr; in swiotlb_map_sg_attrs()
907 sg_dma_len(sg) = sg->length; in swiotlb_map_sg_attrs()
[all …]
Dsg_split.c30 struct scatterlist *sg; in sg_calculate_split() local
37 for_each_sg(in, sg, nents, i) { in sg_calculate_split()
38 sglen = mapped ? sg_dma_len(sg) : sg->length; in sg_calculate_split()
46 curr->in_sg0 = sg; in sg_calculate_split()
59 curr->in_sg0 = sg; in sg_calculate_split()
/linux-4.4.14/crypto/
Dscatterwalk.c33 void scatterwalk_start(struct scatter_walk *walk, struct scatterlist *sg) in scatterwalk_start() argument
35 walk->sg = sg; in scatterwalk_start()
37 BUG_ON(!sg->length); in scatterwalk_start()
39 walk->offset = sg->offset; in scatterwalk_start()
56 page = sg_page(walk->sg) + ((walk->offset - 1) >> PAGE_SHIFT); in scatterwalk_pagedone()
68 if (walk->offset >= walk->sg->offset + walk->sg->length) in scatterwalk_pagedone()
69 scatterwalk_start(walk, sg_next(walk->sg)); in scatterwalk_pagedone()
107 void scatterwalk_map_and_copy(void *buf, struct scatterlist *sg, in scatterwalk_map_and_copy() argument
116 sg = scatterwalk_ffwd(tmp, sg, start); in scatterwalk_map_and_copy()
118 if (sg_page(sg) == virt_to_page(buf) && in scatterwalk_map_and_copy()
[all …]
Dalgif_skcipher.c31 struct scatterlist sg[0]; member
79 struct scatterlist *sg; in skcipher_free_async_sgls() local
89 for_each_sg(sgl, sg, n, i) in skcipher_free_async_sgls()
90 put_page(sg_page(sg)); in skcipher_free_async_sgls()
125 struct scatterlist *sg = NULL; in skcipher_alloc_sgl() local
129 sg = sgl->sg; in skcipher_alloc_sgl()
131 if (!sg || sgl->cur >= MAX_SGL_ENTS) { in skcipher_alloc_sgl()
133 sizeof(sgl->sg[0]) * (MAX_SGL_ENTS + 1), in skcipher_alloc_sgl()
138 sg_init_table(sgl->sg, MAX_SGL_ENTS + 1); in skcipher_alloc_sgl()
141 if (sg) in skcipher_alloc_sgl()
[all …]
Dalgif_aead.c29 struct scatterlist sg[ALG_MAX_PAGES]; member
83 struct scatterlist *sg = sgl->sg; in aead_put_sgl() local
87 if (!sg_page(sg + i)) in aead_put_sgl()
90 put_page(sg_page(sg + i)); in aead_put_sgl()
91 sg_assign_page(sg + i, NULL); in aead_put_sgl()
93 sg_init_table(sg, ALG_MAX_PAGES); in aead_put_sgl()
217 struct scatterlist *sg = NULL; in aead_sendmsg() local
221 sg = sgl->sg + sgl->cur - 1; in aead_sendmsg()
223 PAGE_SIZE - sg->offset - sg->length); in aead_sendmsg()
224 err = memcpy_from_msg(page_address(sg_page(sg)) + in aead_sendmsg()
[all …]
Dtcrypt.c95 struct scatterlist *sg, int blen, int secs) in test_cipher_jiffies() argument
104 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen); in test_cipher_jiffies()
106 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen); in test_cipher_jiffies()
118 struct scatterlist *sg, int blen) in test_cipher_cycles() argument
129 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen); in test_cipher_cycles()
131 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen); in test_cipher_cycles()
143 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen); in test_cipher_cycles()
145 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen); in test_cipher_cycles()
280 static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE], in sg_init_aead() argument
293 sg_init_table(sg, np + 1); in sg_init_aead()
[all …]
Dtwofish_common.c500 ctx->s[2][i] = mds[2][q1[(a) ^ sc] ^ sg]; \
508 ctx->s[2][i] = mds[2][q1[q0[(a) ^ sc] ^ sg] ^ sk]; \
516 ctx->s[2][i] = mds[2][q1[q0[q0[(a) ^ sc] ^ sg] ^ sk] ^ so]; \
593 u8 sa = 0, sb = 0, sc = 0, sd = 0, se = 0, sf = 0, sg = 0, sh = 0; in __twofish_setkey() local
618 CALC_S (se, sf, sg, sh, 8, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */ in __twofish_setkey()
619 CALC_S (se, sf, sg, sh, 9, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */ in __twofish_setkey()
620 CALC_S (se, sf, sg, sh, 10, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */ in __twofish_setkey()
621 CALC_S (se, sf, sg, sh, 11, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */ in __twofish_setkey()
622 CALC_S (se, sf, sg, sh, 12, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */ in __twofish_setkey()
623 CALC_S (se, sf, sg, sh, 13, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */ in __twofish_setkey()
[all …]
Dahash.c69 struct scatterlist *sg; in hash_walk_new_entry() local
71 sg = walk->sg; in hash_walk_new_entry()
72 walk->offset = sg->offset; in hash_walk_new_entry()
73 walk->pg = sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT); in hash_walk_new_entry()
75 walk->entrylen = sg->length; in hash_walk_new_entry()
125 walk->sg = sg_next(walk->sg); in crypto_hash_walk_done()
142 walk->sg = req->src; in crypto_hash_walk_first()
160 walk->sg = req->src; in crypto_ahash_walk_first()
172 struct scatterlist *sg, unsigned int len) in crypto_hash_walk_first_compat() argument
182 walk->sg = sg; in crypto_hash_walk_first_compat()
Dkeywrap.c114 struct scatterlist *sg, in crypto_kw_scatterlist_ff() argument
123 while (sg) { in crypto_kw_scatterlist_ff()
124 if (sg->length > skip) { in crypto_kw_scatterlist_ff()
125 scatterwalk_start(walk, sg); in crypto_kw_scatterlist_ff()
129 skip -= sg->length; in crypto_kw_scatterlist_ff()
131 sg = sg_next(sg); in crypto_kw_scatterlist_ff()
Dccm.c211 struct scatterlist *sg, unsigned int len) in get_data_to_compute() argument
217 scatterwalk_start(&walk, sg); in get_data_to_compute()
222 scatterwalk_start(&walk, sg_next(walk.sg)); in get_data_to_compute()
313 struct scatterlist *sg; in crypto_ccm_init_crypt() local
330 sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen); in crypto_ccm_init_crypt()
331 if (sg != pctx->src + 1) in crypto_ccm_init_crypt()
332 sg_chain(pctx->src, 2, sg); in crypto_ccm_init_crypt()
337 sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen); in crypto_ccm_init_crypt()
338 if (sg != pctx->dst + 1) in crypto_ccm_init_crypt()
339 sg_chain(pctx->dst, 2, sg); in crypto_ccm_init_crypt()
[all …]
Dgcm.c73 struct scatterlist sg; member
88 struct scatterlist sg; member
124 struct scatterlist sg[1]; in crypto_gcm_setkey() member
144 sg_init_one(data->sg, &data->hash, sizeof(data->hash)); in crypto_gcm_setkey()
150 ablkcipher_request_set_crypt(&data->req, data->sg, data->sg, in crypto_gcm_setkey()
199 struct scatterlist *sg; in crypto_gcm_init_common() local
207 sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen); in crypto_gcm_init_common()
208 if (sg != pctx->src + 1) in crypto_gcm_init_common()
209 sg_chain(pctx->src, 2, sg); in crypto_gcm_init_common()
214 sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen); in crypto_gcm_init_common()
[all …]
Dshash.c277 struct scatterlist *sg = req->src; in shash_ahash_digest() local
278 unsigned int offset = sg->offset; in shash_ahash_digest()
282 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { in shash_ahash_digest()
285 data = kmap_atomic(sg_page(sg)); in shash_ahash_digest()
390 static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg, in shash_compat_update() argument
398 for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len); in shash_compat_update()
412 static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg, in shash_compat_digest() argument
415 unsigned int offset = sg->offset; in shash_compat_digest()
418 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { in shash_compat_digest()
425 data = kmap_atomic(sg_page(sg)); in shash_compat_digest()
[all …]
/linux-4.4.14/arch/nios2/mm/
Ddma-mapping.c59 int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() argument
66 for_each_sg(sg, sg, nents, i) { in dma_map_sg()
69 addr = sg_virt(sg); in dma_map_sg()
71 __dma_sync_for_device(addr, sg->length, direction); in dma_map_sg()
72 sg->dma_address = sg_phys(sg); in dma_map_sg()
104 void dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, in dma_unmap_sg() argument
115 for_each_sg(sg, sg, nhwentries, i) { in dma_unmap_sg()
116 addr = sg_virt(sg); in dma_unmap_sg()
118 __dma_sync_for_cpu(addr, sg->length, direction); in dma_unmap_sg()
161 void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems, in dma_sync_sg_for_cpu() argument
[all …]
/linux-4.4.14/net/mac80211/
Daes_gcm.c21 struct scatterlist sg[3]; in ieee80211_aes_gcm_encrypt() local
30 sg_init_table(sg, 3); in ieee80211_aes_gcm_encrypt()
31 sg_set_buf(&sg[0], &aad[2], be16_to_cpup((__be16 *)aad)); in ieee80211_aes_gcm_encrypt()
32 sg_set_buf(&sg[1], data, data_len); in ieee80211_aes_gcm_encrypt()
33 sg_set_buf(&sg[2], mic, IEEE80211_GCMP_MIC_LEN); in ieee80211_aes_gcm_encrypt()
36 aead_request_set_crypt(aead_req, sg, sg, data_len, j_0); in ieee80211_aes_gcm_encrypt()
37 aead_request_set_ad(aead_req, sg[0].length); in ieee80211_aes_gcm_encrypt()
45 struct scatterlist sg[3]; in ieee80211_aes_gcm_decrypt() local
56 sg_init_table(sg, 3); in ieee80211_aes_gcm_decrypt()
57 sg_set_buf(&sg[0], &aad[2], be16_to_cpup((__be16 *)aad)); in ieee80211_aes_gcm_decrypt()
[all …]
Daes_ccm.c25 struct scatterlist sg[3]; in ieee80211_aes_ccm_encrypt() local
34 sg_init_table(sg, 3); in ieee80211_aes_ccm_encrypt()
35 sg_set_buf(&sg[0], &aad[2], be16_to_cpup((__be16 *)aad)); in ieee80211_aes_ccm_encrypt()
36 sg_set_buf(&sg[1], data, data_len); in ieee80211_aes_ccm_encrypt()
37 sg_set_buf(&sg[2], mic, mic_len); in ieee80211_aes_ccm_encrypt()
40 aead_request_set_crypt(aead_req, sg, sg, data_len, b_0); in ieee80211_aes_ccm_encrypt()
41 aead_request_set_ad(aead_req, sg[0].length); in ieee80211_aes_ccm_encrypt()
50 struct scatterlist sg[3]; in ieee80211_aes_ccm_decrypt() local
61 sg_init_table(sg, 3); in ieee80211_aes_ccm_decrypt()
62 sg_set_buf(&sg[0], &aad[2], be16_to_cpup((__be16 *)aad)); in ieee80211_aes_ccm_decrypt()
[all …]
Daes_gmac.c27 struct scatterlist sg[4]; in ieee80211_aes_gmac() local
40 sg_init_table(sg, 4); in ieee80211_aes_gmac()
41 sg_set_buf(&sg[0], aad, AAD_LEN); in ieee80211_aes_gmac()
42 sg_set_buf(&sg[1], data, data_len - GMAC_MIC_LEN); in ieee80211_aes_gmac()
43 sg_set_buf(&sg[2], zero, GMAC_MIC_LEN); in ieee80211_aes_gmac()
44 sg_set_buf(&sg[3], mic, GMAC_MIC_LEN); in ieee80211_aes_gmac()
51 aead_request_set_crypt(aead_req, sg, sg, 0, iv); in ieee80211_aes_gmac()
/linux-4.4.14/samples/kfifo/
Ddma-example.c28 struct scatterlist sg[10]; in example_init() local
64 sg_init_table(sg, ARRAY_SIZE(sg)); in example_init()
65 nents = kfifo_dma_in_prepare(&fifo, sg, ARRAY_SIZE(sg), FIFO_SIZE); in example_init()
79 i, sg[i].page_link, sg[i].offset, sg[i].length); in example_init()
81 if (sg_is_last(&sg[i])) in example_init()
95 nents = kfifo_dma_out_prepare(&fifo, sg, ARRAY_SIZE(sg), 8); in example_init()
108 i, sg[i].page_link, sg[i].offset, sg[i].length); in example_init()
110 if (sg_is_last(&sg[i])) in example_init()
/linux-4.4.14/include/crypto/
Dscatterwalk.h29 struct scatterlist *sg, in scatterwalk_crypto_chain() argument
33 head->length += sg->length; in scatterwalk_crypto_chain()
34 sg = sg_next(sg); in scatterwalk_crypto_chain()
37 if (sg) in scatterwalk_crypto_chain()
38 sg_chain(head, num, sg); in scatterwalk_crypto_chain()
46 return !(((sg_page(walk_in->sg) - sg_page(walk_out->sg)) << PAGE_SHIFT) + in scatterwalk_samebuf()
52 unsigned int len = walk->sg->offset + walk->sg->length - walk->offset; in scatterwalk_pagelen()
78 return sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT); in scatterwalk_page()
86 void scatterwalk_start(struct scatter_walk *walk, struct scatterlist *sg);
92 void scatterwalk_map_and_copy(void *buf, struct scatterlist *sg,
[all …]
/linux-4.4.14/drivers/media/pci/tw68/
Dtw68-risc.c46 struct scatterlist *sg; in tw68_risc_field() local
62 sg = sglist; in tw68_risc_field()
65 while (offset && offset >= sg_dma_len(sg)) { in tw68_risc_field()
66 offset -= sg_dma_len(sg); in tw68_risc_field()
67 sg = sg_next(sg); in tw68_risc_field()
69 if (bpl <= sg_dma_len(sg) - offset) { in tw68_risc_field()
73 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in tw68_risc_field()
84 done = (sg_dma_len(sg) - offset); in tw68_risc_field()
88 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in tw68_risc_field()
90 sg = sg_next(sg); in tw68_risc_field()
[all …]
/linux-4.4.14/arch/tile/kernel/
Dpci-dma.c207 struct scatterlist *sg; in tile_dma_map_sg() local
214 for_each_sg(sglist, sg, nents, i) { in tile_dma_map_sg()
215 sg->dma_address = sg_phys(sg); in tile_dma_map_sg()
216 __dma_prep_pa_range(sg->dma_address, sg->length, direction); in tile_dma_map_sg()
218 sg->dma_length = sg->length; in tile_dma_map_sg()
229 struct scatterlist *sg; in tile_dma_unmap_sg() local
233 for_each_sg(sglist, sg, nents, i) { in tile_dma_unmap_sg()
234 sg->dma_address = sg_phys(sg); in tile_dma_unmap_sg()
235 __dma_complete_pa_range(sg->dma_address, sg->length, in tile_dma_unmap_sg()
284 struct scatterlist *sg; in tile_dma_sync_sg_for_cpu() local
[all …]
/linux-4.4.14/drivers/crypto/caam/
Dsg_sw_sec4.h32 sg_to_sec4_sg(struct scatterlist *sg, int sg_count, in sg_to_sec4_sg() argument
36 dma_to_sec4_sg_one(sec4_sg_ptr, sg_dma_address(sg), in sg_to_sec4_sg()
37 sg_dma_len(sg), offset); in sg_to_sec4_sg()
39 sg = sg_next(sg); in sg_to_sec4_sg()
49 static inline void sg_to_sec4_sg_last(struct scatterlist *sg, int sg_count, in sg_to_sec4_sg_last() argument
53 sec4_sg_ptr = sg_to_sec4_sg(sg, sg_count, sec4_sg_ptr, offset); in sg_to_sec4_sg_last()
58 struct scatterlist *sg, unsigned int total, in sg_to_sec4_sg_len() argument
62 unsigned int len = min(sg_dma_len(sg), total); in sg_to_sec4_sg_len()
64 dma_to_sec4_sg_one(sec4_sg_ptr, sg_dma_address(sg), len, 0); in sg_to_sec4_sg_len()
66 sg = sg_next(sg); in sg_to_sec4_sg_len()
/linux-4.4.14/arch/metag/include/asm/
Ddma-mapping.h51 struct scatterlist *sg; in dma_map_sg() local
57 for_each_sg(sglist, sg, nents, i) { in dma_map_sg()
58 BUG_ON(!sg_page(sg)); in dma_map_sg()
60 sg->dma_address = sg_phys(sg); in dma_map_sg()
61 dma_sync_for_device(sg_virt(sg), sg->length, direction); in dma_map_sg()
90 struct scatterlist *sg; in dma_unmap_sg() local
96 for_each_sg(sglist, sg, nhwentries, i) { in dma_unmap_sg()
97 BUG_ON(!sg_page(sg)); in dma_unmap_sg()
99 sg->dma_address = sg_phys(sg); in dma_unmap_sg()
100 dma_sync_for_cpu(sg_virt(sg), sg->length, direction); in dma_unmap_sg()
[all …]
/linux-4.4.14/arch/c6x/kernel/
Ddma.c67 struct scatterlist *sg; in dma_map_sg() local
70 for_each_sg(sglist, sg, nents, i) in dma_map_sg()
71 sg->dma_address = dma_map_single(dev, sg_virt(sg), sg->length, in dma_map_sg()
84 struct scatterlist *sg; in dma_unmap_sg() local
87 for_each_sg(sglist, sg, nents, i) in dma_unmap_sg()
88 dma_unmap_single(dev, sg_dma_address(sg), sg->length, dir); in dma_unmap_sg()
117 struct scatterlist *sg; in dma_sync_sg_for_cpu() local
120 for_each_sg(sglist, sg, nents, i) in dma_sync_sg_for_cpu()
121 dma_sync_single_for_cpu(dev, sg_dma_address(sg), in dma_sync_sg_for_cpu()
122 sg->length, dir); in dma_sync_sg_for_cpu()
[all …]
/linux-4.4.14/drivers/scsi/aacraid/
Dcommctrl.c560 if (user_srbcmd->sg.count > ARRAY_SIZE(sg_list)) { in aac_send_raw_srb()
562 le32_to_cpu(srbcmd->sg.count))); in aac_send_raw_srb()
567 ((user_srbcmd->sg.count & 0xff) * sizeof(struct sgentry)); in aac_send_raw_srb()
568 actual_fibsize64 = actual_fibsize + (user_srbcmd->sg.count & 0xff) * in aac_send_raw_srb()
576 actual_fibsize, actual_fibsize64, user_srbcmd->sg.count, in aac_send_raw_srb()
582 if ((data_dir == DMA_NONE) && user_srbcmd->sg.count) { in aac_send_raw_srb()
589 struct user_sgmap64* upsg = (struct user_sgmap64*)&user_srbcmd->sg; in aac_send_raw_srb()
590 struct sgmap64* psg = (struct sgmap64*)&srbcmd->sg; in aac_send_raw_srb()
600 if (upsg->sg[i].count > in aac_send_raw_srb()
609 p = kmalloc(upsg->sg[i].count,GFP_KERNEL|__GFP_DMA); in aac_send_raw_srb()
[all …]
Daachba.c1183 ret = aac_build_sgraw(cmd, &readcmd->sg); in aac_read_raw_io()
1188 ((le32_to_cpu(readcmd->sg.count)-1) * sizeof(struct sgentryraw)); in aac_read_raw_io()
1219 ret = aac_build_sg64(cmd, &readcmd->sg); in aac_read_block64()
1223 ((le32_to_cpu(readcmd->sg.count) - 1) * in aac_read_block64()
1254 ret = aac_build_sg(cmd, &readcmd->sg); in aac_read_block()
1258 ((le32_to_cpu(readcmd->sg.count) - 1) * in aac_read_block()
1315 ret = aac_build_sgraw(cmd, &writecmd->sg); in aac_write_raw_io()
1320 ((le32_to_cpu(writecmd->sg.count)-1) * sizeof (struct sgentryraw)); in aac_write_raw_io()
1351 ret = aac_build_sg64(cmd, &writecmd->sg); in aac_write_block64()
1355 ((le32_to_cpu(writecmd->sg.count) - 1) * in aac_write_block64()
[all …]
/linux-4.4.14/drivers/crypto/qce/
Ddma.c60 struct scatterlist *sg = sgt->sgl, *sg_last = NULL; in qce_sgtable_add() local
62 while (sg) { in qce_sgtable_add()
63 if (!sg_page(sg)) in qce_sgtable_add()
65 sg = sg_next(sg); in qce_sgtable_add()
68 if (!sg) in qce_sgtable_add()
71 while (new_sgl && sg) { in qce_sgtable_add()
72 sg_set_page(sg, sg_page(new_sgl), new_sgl->length, in qce_sgtable_add()
74 sg_last = sg; in qce_sgtable_add()
75 sg = sg_next(sg); in qce_sgtable_add()
82 static int qce_dma_prep_sg(struct dma_chan *chan, struct scatterlist *sg, in qce_dma_prep_sg() argument
[all …]
Dsha.c235 struct scatterlist *sg_last, *sg; in qce_ahash_update() local
277 sg = sg_last = req->src; in qce_ahash_update()
279 while (len < nbytes && sg) { in qce_ahash_update()
280 if (len + sg_dma_len(sg) > nbytes) in qce_ahash_update()
282 len += sg_dma_len(sg); in qce_ahash_update()
283 sg_last = sg; in qce_ahash_update()
284 sg = sg_next(sg); in qce_ahash_update()
293 sg_init_table(rctx->sg, 2); in qce_ahash_update()
294 sg_set_buf(rctx->sg, rctx->tmpbuf, rctx->buflen); in qce_ahash_update()
295 sg_chain(rctx->sg, 2, req->src); in qce_ahash_update()
[all …]
Dablkcipher.c68 struct scatterlist *sg; in qce_ablkcipher_async_req_handle() local
98 sg = qce_sgtable_add(&rctx->dst_tbl, req->dst); in qce_ablkcipher_async_req_handle()
99 if (IS_ERR(sg)) { in qce_ablkcipher_async_req_handle()
100 ret = PTR_ERR(sg); in qce_ablkcipher_async_req_handle()
104 sg = qce_sgtable_add(&rctx->dst_tbl, &rctx->result_sg); in qce_ablkcipher_async_req_handle()
105 if (IS_ERR(sg)) { in qce_ablkcipher_async_req_handle()
106 ret = PTR_ERR(sg); in qce_ablkcipher_async_req_handle()
110 sg_mark_end(sg); in qce_ablkcipher_async_req_handle()
/linux-4.4.14/net/sunrpc/auth_gss/
Dgss_krb5_crypto.c61 struct scatterlist sg[1]; in krb5_encrypt() local
78 sg_init_one(sg, out, length); in krb5_encrypt()
80 ret = crypto_blkcipher_encrypt_iv(&desc, sg, sg, length); in krb5_encrypt()
95 struct scatterlist sg[1]; in krb5_decrypt() local
111 sg_init_one(sg, out, length); in krb5_decrypt()
113 ret = crypto_blkcipher_decrypt_iv(&desc, sg, sg, length); in krb5_decrypt()
120 checksummer(struct scatterlist *sg, void *data) in checksummer() argument
124 return crypto_hash_update(desc, sg, sg->length); in checksummer()
156 struct scatterlist sg[1]; in make_checksum_hmac_md5() local
194 sg_init_one(sg, rc4salt, 4); in make_checksum_hmac_md5()
[all …]
/linux-4.4.14/arch/sparc/mm/
Diommu.c238 static void iommu_get_scsi_sgl_gflush(struct device *dev, struct scatterlist *sg, int sz) in iommu_get_scsi_sgl_gflush() argument
245 n = (sg->length + sg->offset + PAGE_SIZE-1) >> PAGE_SHIFT; in iommu_get_scsi_sgl_gflush()
246 sg->dma_address = iommu_get_one(dev, sg_page(sg), n) + sg->offset; in iommu_get_scsi_sgl_gflush()
247 sg->dma_length = sg->length; in iommu_get_scsi_sgl_gflush()
248 sg = sg_next(sg); in iommu_get_scsi_sgl_gflush()
252 static void iommu_get_scsi_sgl_pflush(struct device *dev, struct scatterlist *sg, int sz) in iommu_get_scsi_sgl_pflush() argument
260 n = (sg->length + sg->offset + PAGE_SIZE-1) >> PAGE_SHIFT; in iommu_get_scsi_sgl_pflush()
267 if ((page = (unsigned long) page_address(sg_page(sg))) != 0) { in iommu_get_scsi_sgl_pflush()
277 sg->dma_address = iommu_get_one(dev, sg_page(sg), n) + sg->offset; in iommu_get_scsi_sgl_pflush()
278 sg->dma_length = sg->length; in iommu_get_scsi_sgl_pflush()
[all …]
Dio-unit.c153 static void iounit_get_scsi_sgl(struct device *dev, struct scatterlist *sg, int sz) in iounit_get_scsi_sgl() argument
162 sg->dma_address = iounit_get_area(iounit, (unsigned long) sg_virt(sg), sg->length); in iounit_get_scsi_sgl()
163 sg->dma_length = sg->length; in iounit_get_scsi_sgl()
164 sg = sg_next(sg); in iounit_get_scsi_sgl()
183 static void iounit_release_scsi_sgl(struct device *dev, struct scatterlist *sg, int sz) in iounit_release_scsi_sgl() argument
192 len = ((sg->dma_address & ~PAGE_MASK) + sg->length + (PAGE_SIZE-1)) >> PAGE_SHIFT; in iounit_release_scsi_sgl()
193 vaddr = (sg->dma_address - IOUNIT_DMA_BASE) >> PAGE_SHIFT; in iounit_release_scsi_sgl()
197 sg = sg_next(sg); in iounit_release_scsi_sgl()
/linux-4.4.14/net/rxrpc/
Drxkad.c117 struct scatterlist sg[2]; in rxkad_prime_packet_security() local
140 sg_init_one(&sg[0], &tmpbuf, sizeof(tmpbuf)); in rxkad_prime_packet_security()
141 sg_init_one(&sg[1], &tmpbuf, sizeof(tmpbuf)); in rxkad_prime_packet_security()
142 crypto_blkcipher_encrypt_iv(&desc, &sg[0], &sg[1], sizeof(tmpbuf)); in rxkad_prime_packet_security()
161 struct scatterlist sg[2]; in rxkad_secure_packet_auth() local
184 sg_init_one(&sg[0], &tmpbuf, sizeof(tmpbuf)); in rxkad_secure_packet_auth()
185 sg_init_one(&sg[1], &tmpbuf, sizeof(tmpbuf)); in rxkad_secure_packet_auth()
186 crypto_blkcipher_encrypt_iv(&desc, &sg[0], &sg[1], sizeof(tmpbuf)); in rxkad_secure_packet_auth()
208 struct scatterlist sg[16]; in rxkad_secure_packet_encrypt() local
230 sg_init_one(&sg[0], sechdr, sizeof(rxkhdr)); in rxkad_secure_packet_encrypt()
[all …]
/linux-4.4.14/arch/microblaze/kernel/
Ddma.c58 struct scatterlist *sg; in dma_direct_map_sg() local
62 for_each_sg(sgl, sg, nents, i) { in dma_direct_map_sg()
63 sg->dma_address = sg_phys(sg); in dma_direct_map_sg()
64 __dma_sync(page_to_phys(sg_page(sg)) + sg->offset, in dma_direct_map_sg()
65 sg->length, direction); in dma_direct_map_sg()
134 struct scatterlist *sg; in dma_direct_sync_sg_for_cpu() local
139 for_each_sg(sgl, sg, nents, i) in dma_direct_sync_sg_for_cpu()
140 __dma_sync(sg->dma_address, sg->length, direction); in dma_direct_sync_sg_for_cpu()
148 struct scatterlist *sg; in dma_direct_sync_sg_for_device() local
153 for_each_sg(sgl, sg, nents, i) in dma_direct_sync_sg_for_device()
[all …]
/linux-4.4.14/arch/mips/mm/
Ddma-default.c306 struct scatterlist *sg; in mips_dma_map_sg() local
308 for_each_sg(sglist, sg, nents, i) { in mips_dma_map_sg()
310 __dma_sync(sg_page(sg), sg->offset, sg->length, in mips_dma_map_sg()
313 sg->dma_length = sg->length; in mips_dma_map_sg()
315 sg->dma_address = plat_map_dma_mem_page(dev, sg_page(sg)) + in mips_dma_map_sg()
316 sg->offset; in mips_dma_map_sg()
337 struct scatterlist *sg; in mips_dma_unmap_sg() local
339 for_each_sg(sglist, sg, nhwentries, i) { in mips_dma_unmap_sg()
342 __dma_sync(sg_page(sg), sg->offset, sg->length, in mips_dma_unmap_sg()
344 plat_unmap_dma_mem(dev, sg->dma_address, sg->length, direction); in mips_dma_unmap_sg()
[all …]
/linux-4.4.14/arch/alpha/kernel/
Dpci_iommu.c504 sg_classify(struct device *dev, struct scatterlist *sg, struct scatterlist *end, in sg_classify() argument
512 leader = sg; in sg_classify()
519 for (++sg; sg < end; ++sg) { in sg_classify()
521 addr = SG_ENT_PHYS_ADDRESS(sg); in sg_classify()
522 len = sg->length; in sg_classify()
528 sg->dma_address = -1; in sg_classify()
531 sg->dma_address = -2; in sg_classify()
538 leader = sg; in sg_classify()
560 struct scatterlist *sg; in sg_fill() local
617 sg = leader; in sg_fill()
[all …]
Dpci-noop.c145 struct scatterlist *sg; in alpha_noop_map_sg() local
147 for_each_sg(sgl, sg, nents, i) { in alpha_noop_map_sg()
150 BUG_ON(!sg_page(sg)); in alpha_noop_map_sg()
151 va = sg_virt(sg); in alpha_noop_map_sg()
152 sg_dma_address(sg) = (dma_addr_t)virt_to_phys(va); in alpha_noop_map_sg()
153 sg_dma_len(sg) = sg->length; in alpha_noop_map_sg()
/linux-4.4.14/drivers/dma/
Ddma-axi-dmac.c92 struct axi_dmac_sg sg[]; member
184 struct axi_dmac_sg *sg; in axi_dmac_start_transfer() local
201 sg = &desc->sg[desc->num_submitted]; in axi_dmac_start_transfer()
209 sg->id = axi_dmac_read(dmac, AXI_DMAC_REG_TRANSFER_ID); in axi_dmac_start_transfer()
212 axi_dmac_write(dmac, AXI_DMAC_REG_DEST_ADDRESS, sg->dest_addr); in axi_dmac_start_transfer()
213 axi_dmac_write(dmac, AXI_DMAC_REG_DEST_STRIDE, sg->dest_stride); in axi_dmac_start_transfer()
217 axi_dmac_write(dmac, AXI_DMAC_REG_SRC_ADDRESS, sg->src_addr); in axi_dmac_start_transfer()
218 axi_dmac_write(dmac, AXI_DMAC_REG_SRC_STRIDE, sg->src_stride); in axi_dmac_start_transfer()
228 axi_dmac_write(dmac, AXI_DMAC_REG_X_LENGTH, sg->x_len - 1); in axi_dmac_start_transfer()
229 axi_dmac_write(dmac, AXI_DMAC_REG_Y_LENGTH, sg->y_len - 1); in axi_dmac_start_transfer()
[all …]
Ddma-jz4740.c112 struct jz4740_dma_sg sg[]; member
294 struct jz4740_dma_sg *sg; in jz4740_dma_start_transfer() local
310 sg = &chan->desc->sg[chan->next_sg]; in jz4740_dma_start_transfer()
313 src_addr = sg->addr; in jz4740_dma_start_transfer()
317 dst_addr = sg->addr; in jz4740_dma_start_transfer()
322 sg->len >> chan->transfer_shift); in jz4740_dma_start_transfer()
396 struct scatterlist *sg; in jz4740_dma_prep_slave_sg() local
403 for_each_sg(sgl, sg, sg_len, i) { in jz4740_dma_prep_slave_sg()
404 desc->sg[i].addr = sg_dma_address(sg); in jz4740_dma_prep_slave_sg()
405 desc->sg[i].len = sg_dma_len(sg); in jz4740_dma_prep_slave_sg()
[all …]
Dsa11x0-dma.c85 struct sa11x0_dma_sg sg[0]; member
166 struct sa11x0_dma_sg *sg; in sa11x0_dma_start_sg() local
202 sg = &txd->sg[p->sg_load++]; in sa11x0_dma_start_sg()
216 writel_relaxed(sg->addr, base + dbsx); in sa11x0_dma_start_sg()
217 writel_relaxed(sg->len, base + dbtx); in sa11x0_dma_start_sg()
222 'A' + (dbsx == DMA_DBSB), sg->addr, in sa11x0_dma_start_sg()
223 'A' + (dbtx == DMA_DBTB), sg->len); in sa11x0_dma_start_sg()
470 i, txd->sg[i].addr, txd->sg[i].len); in sa11x0_dma_tx_status()
471 if (addr >= txd->sg[i].addr && in sa11x0_dma_tx_status()
472 addr < txd->sg[i].addr + txd->sg[i].len) { in sa11x0_dma_tx_status()
[all …]
Domap-dma.c79 struct omap_sg sg[0]; member
366 struct omap_sg *sg = d->sg + idx; in omap_dma_start_sg() local
379 omap_dma_chan_write(c, cxsa, sg->addr); in omap_dma_start_sg()
382 omap_dma_chan_write(c, CEN, sg->en); in omap_dma_start_sg()
383 omap_dma_chan_write(c, CFN, sg->fn); in omap_dma_start_sg()
599 static size_t omap_dma_sg_size(struct omap_sg *sg) in omap_dma_sg_size() argument
601 return sg->en * sg->fn; in omap_dma_sg_size()
610 size += omap_dma_sg_size(&d->sg[i]); in omap_dma_desc_size()
621 size_t this_size = omap_dma_sg_size(&d->sg[i]) * es_size; in omap_dma_desc_size_pos()
625 else if (addr >= d->sg[i].addr && in omap_dma_desc_size_pos()
[all …]
Dtimb_dma.c154 struct scatterlist *sg, bool last) in td_fill_desc() argument
156 if (sg_dma_len(sg) > USHRT_MAX) { in td_fill_desc()
162 if (sg_dma_len(sg) % sizeof(u32)) { in td_fill_desc()
164 sg_dma_len(sg)); in td_fill_desc()
169 dma_desc, (unsigned long long)sg_dma_address(sg)); in td_fill_desc()
171 dma_desc[7] = (sg_dma_address(sg) >> 24) & 0xff; in td_fill_desc()
172 dma_desc[6] = (sg_dma_address(sg) >> 16) & 0xff; in td_fill_desc()
173 dma_desc[5] = (sg_dma_address(sg) >> 8) & 0xff; in td_fill_desc()
174 dma_desc[4] = (sg_dma_address(sg) >> 0) & 0xff; in td_fill_desc()
176 dma_desc[3] = (sg_dma_len(sg) >> 8) & 0xff; in td_fill_desc()
[all …]
Dcoh901318_lli.c239 struct scatterlist *sg; in coh901318_lli_fill_sg() local
258 for_each_sg(sgl, sg, nents, i) { in coh901318_lli_fill_sg()
259 if (sg_is_chain(sg)) { in coh901318_lli_fill_sg()
273 src = sg_dma_address(sg); in coh901318_lli_fill_sg()
276 dst = sg_dma_address(sg); in coh901318_lli_fill_sg()
278 bytes_to_transfer = sg_dma_len(sg); in coh901318_lli_fill_sg()
Dimx-dma.c143 struct scatterlist *sg; member
286 struct scatterlist *sg = d->sg; in imxdma_sg_next() local
289 now = min(d->len, sg_dma_len(sg)); in imxdma_sg_next()
294 imx_dmav1_writel(imxdma, sg->dma_address, in imxdma_sg_next()
297 imx_dmav1_writel(imxdma, sg->dma_address, in imxdma_sg_next()
329 d->sg && imxdma_hw_chain(imxdmac)) { in imxdma_enable_hw()
330 d->sg = sg_next(d->sg); in imxdma_enable_hw()
331 if (d->sg) { in imxdma_enable_hw()
448 if (desc->sg) { in dma_irq_handle_channel()
450 desc->sg = sg_next(desc->sg); in dma_irq_handle_channel()
[all …]
Dmoxart-dma.c130 struct moxart_sg sg[0]; member
311 d = kzalloc(sizeof(*d) + sg_len * sizeof(d->sg[0]), GFP_ATOMIC); in moxart_prep_slave_sg()
320 d->sg[i].addr = sg_dma_address(sgent); in moxart_prep_slave_sg()
321 d->sg[i].len = sg_dma_len(sgent); in moxart_prep_slave_sg()
406 struct moxart_sg *sg = ch->desc->sg + idx; in moxart_dma_start_sg() local
409 moxart_dma_set_params(ch, sg->addr, d->dev_addr); in moxart_dma_start_sg()
411 moxart_dma_set_params(ch, d->dev_addr, sg->addr); in moxart_dma_start_sg()
413 moxart_set_transfer_params(ch, sg->len); in moxart_dma_start_sg()
456 size += d->sg[i].len; in moxart_dma_desc_size()
/linux-4.4.14/drivers/gpu/drm/msm/
Dmsm_iommu.c51 struct scatterlist *sg; in msm_iommu_map() local
59 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in msm_iommu_map()
60 u32 pa = sg_phys(sg) - sg->offset; in msm_iommu_map()
61 size_t bytes = sg->length + sg->offset; in msm_iommu_map()
77 for_each_sg(sgt->sgl, sg, i, j) { in msm_iommu_map()
78 size_t bytes = sg->length + sg->offset; in msm_iommu_map()
90 struct scatterlist *sg; in msm_iommu_unmap() local
94 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in msm_iommu_unmap()
95 size_t bytes = sg->length + sg->offset; in msm_iommu_unmap()
/linux-4.4.14/arch/blackfin/kernel/
Ddma-mapping.c119 struct scatterlist *sg; in dma_map_sg() local
122 for_each_sg(sg_list, sg, nents, i) { in dma_map_sg()
123 sg->dma_address = (dma_addr_t) sg_virt(sg); in dma_map_sg()
124 __dma_sync(sg_dma_address(sg), sg_dma_len(sg), direction); in dma_map_sg()
134 struct scatterlist *sg; in dma_sync_sg_for_device() local
137 for_each_sg(sg_list, sg, nelems, i) { in dma_sync_sg_for_device()
138 sg->dma_address = (dma_addr_t) sg_virt(sg); in dma_sync_sg_for_device()
139 __dma_sync(sg_dma_address(sg), sg_dma_len(sg), direction); in dma_sync_sg_for_device()
/linux-4.4.14/drivers/gpu/drm/omapdrm/
Domap_gem_dmabuf.c29 struct sg_table *sg; in omap_gem_map_dma_buf() local
33 sg = kzalloc(sizeof(*sg), GFP_KERNEL); in omap_gem_map_dma_buf()
34 if (!sg) in omap_gem_map_dma_buf()
44 ret = sg_alloc_table(sg, 1, GFP_KERNEL); in omap_gem_map_dma_buf()
48 sg_init_table(sg->sgl, 1); in omap_gem_map_dma_buf()
49 sg_dma_len(sg->sgl) = obj->size; in omap_gem_map_dma_buf()
50 sg_set_page(sg->sgl, pfn_to_page(PFN_DOWN(paddr)), obj->size, 0); in omap_gem_map_dma_buf()
51 sg_dma_address(sg->sgl) = paddr; in omap_gem_map_dma_buf()
56 return sg; in omap_gem_map_dma_buf()
58 kfree(sg); in omap_gem_map_dma_buf()
[all …]
/linux-4.4.14/drivers/target/tcm_fc/
Dtfc_io.c60 struct scatterlist *sg = NULL; in ft_queue_data_in() local
94 sg = se_cmd->t_data_sg; in ft_queue_data_in()
95 mem_len = sg->length; in ft_queue_data_in()
96 mem_off = sg->offset; in ft_queue_data_in()
97 page = sg_page(sg); in ft_queue_data_in()
112 sg = sg_next(sg); in ft_queue_data_in()
113 mem_len = min((size_t)sg->length, remaining); in ft_queue_data_in()
114 mem_off = sg->offset; in ft_queue_data_in()
115 page = sg_page(sg); in ft_queue_data_in()
216 struct scatterlist *sg = NULL; in ft_recv_write_data() local
[all …]
/linux-4.4.14/drivers/mmc/card/
Dqueue.c145 struct scatterlist *sg; in mmc_alloc_sg() local
147 sg = kmalloc(sizeof(struct scatterlist)*sg_len, GFP_KERNEL); in mmc_alloc_sg()
148 if (!sg) in mmc_alloc_sg()
152 sg_init_table(sg, sg_len); in mmc_alloc_sg()
155 return sg; in mmc_alloc_sg()
251 mqrq_cur->sg = mmc_alloc_sg(1, &ret); in mmc_init_queue()
260 mqrq_prev->sg = mmc_alloc_sg(1, &ret); in mmc_init_queue()
279 mqrq_cur->sg = mmc_alloc_sg(host->max_segs, &ret); in mmc_init_queue()
284 mqrq_prev->sg = mmc_alloc_sg(host->max_segs, &ret); in mmc_init_queue()
307 kfree(mqrq_cur->sg); in mmc_init_queue()
[all …]
Dmmc_test.c84 struct scatterlist *sg; member
191 struct mmc_request *mrq, struct scatterlist *sg, unsigned sg_len, in mmc_test_prepare_mrq() argument
221 mrq->data->sg = sg; in mmc_test_prepare_mrq()
276 struct scatterlist sg; in mmc_test_buffer_transfer() local
282 sg_init_one(&sg, buffer, blksz); in mmc_test_buffer_transfer()
284 mmc_test_prepare_mrq(test, &mrq, &sg, 1, addr, 1, blksz, write); in mmc_test_buffer_transfer()
393 struct scatterlist *sg = NULL; in mmc_test_map_sg() local
412 if (sg) in mmc_test_map_sg()
413 sg = sg_next(sg); in mmc_test_map_sg()
415 sg = sglist; in mmc_test_map_sg()
[all …]
/linux-4.4.14/drivers/staging/android/ion/
Dion_chunk_heap.c44 struct scatterlist *sg; in ion_chunk_heap_allocate() local
67 sg = table->sgl; in ion_chunk_heap_allocate()
73 sg_set_page(sg, pfn_to_page(PFN_DOWN(paddr)), in ion_chunk_heap_allocate()
75 sg = sg_next(sg); in ion_chunk_heap_allocate()
82 sg = table->sgl; in ion_chunk_heap_allocate()
84 gen_pool_free(chunk_heap->pool, page_to_phys(sg_page(sg)), in ion_chunk_heap_allocate()
85 sg->length); in ion_chunk_heap_allocate()
86 sg = sg_next(sg); in ion_chunk_heap_allocate()
99 struct scatterlist *sg; in ion_chunk_heap_free() local
111 for_each_sg(table->sgl, sg, table->nents, i) { in ion_chunk_heap_free()
[all …]
Dion_heap.c31 struct scatterlist *sg; in ion_heap_map_kernel() local
48 for_each_sg(table->sgl, sg, table->nents, i) { in ion_heap_map_kernel()
49 int npages_this_entry = PAGE_ALIGN(sg->length) / PAGE_SIZE; in ion_heap_map_kernel()
50 struct page *page = sg_page(sg); in ion_heap_map_kernel()
77 struct scatterlist *sg; in ion_heap_map_user() local
81 for_each_sg(table->sgl, sg, table->nents, i) { in ion_heap_map_user()
82 struct page *page = sg_page(sg); in ion_heap_map_user()
84 unsigned long len = sg->length; in ion_heap_map_user()
86 if (offset >= sg->length) { in ion_heap_map_user()
87 offset -= sg->length; in ion_heap_map_user()
[all …]
Dion_system_heap.c129 struct scatterlist *sg; in ion_system_heap_allocate() local
160 sg = table->sgl; in ion_system_heap_allocate()
162 sg_set_page(sg, page, PAGE_SIZE << compound_order(page), 0); in ion_system_heap_allocate()
163 sg = sg_next(sg); in ion_system_heap_allocate()
185 struct scatterlist *sg; in ion_system_heap_free() local
196 for_each_sg(table->sgl, sg, table->nents, i) in ion_system_heap_free()
197 free_buffer_page(sys_heap, buffer, sg_page(sg)); in ion_system_heap_free()
/linux-4.4.14/drivers/usb/storage/
Dprotocol.c139 struct scatterlist *sg = *sgptr; in usb_stor_access_xfer_buf() local
143 if (sg) in usb_stor_access_xfer_buf()
144 nents = sg_nents(sg); in usb_stor_access_xfer_buf()
146 sg = scsi_sglist(srb); in usb_stor_access_xfer_buf()
148 sg_miter_start(&miter, sg, nents, dir == FROM_XFER_BUF ? in usb_stor_access_xfer_buf()
163 if (*offset + len < miter.piter.sg->length) { in usb_stor_access_xfer_buf()
165 *sgptr = miter.piter.sg; in usb_stor_access_xfer_buf()
168 *sgptr = sg_next(miter.piter.sg); in usb_stor_access_xfer_buf()
185 struct scatterlist *sg = NULL; in usb_stor_set_xfer_buf() local
188 buflen = usb_stor_access_xfer_buf(buffer, buflen, srb, &sg, &offset, in usb_stor_set_xfer_buf()
/linux-4.4.14/drivers/s390/scsi/
Dzfcp_qdio.h179 int zfcp_qdio_sg_one_sbale(struct scatterlist *sg) in zfcp_qdio_sg_one_sbale() argument
181 return sg_is_last(sg) && sg->length <= ZFCP_QDIO_SBALE_LEN; in zfcp_qdio_sg_one_sbale()
232 unsigned int zfcp_qdio_sbale_count(struct scatterlist *sg) in zfcp_qdio_sbale_count() argument
236 for (; sg; sg = sg_next(sg)) in zfcp_qdio_sbale_count()
247 unsigned int zfcp_qdio_real_bytes(struct scatterlist *sg) in zfcp_qdio_real_bytes() argument
251 for (; sg; sg = sg_next(sg)) in zfcp_qdio_real_bytes()
252 real_bytes += sg->length; in zfcp_qdio_real_bytes()
Dzfcp_aux.c547 void zfcp_sg_free_table(struct scatterlist *sg, int count) in zfcp_sg_free_table() argument
551 for (i = 0; i < count; i++, sg++) in zfcp_sg_free_table()
552 if (sg) in zfcp_sg_free_table()
553 free_page((unsigned long) sg_virt(sg)); in zfcp_sg_free_table()
566 int zfcp_sg_setup_table(struct scatterlist *sg, int count) in zfcp_sg_setup_table() argument
571 sg_init_table(sg, count); in zfcp_sg_setup_table()
572 for (i = 0; i < count; i++, sg++) { in zfcp_sg_setup_table()
575 zfcp_sg_free_table(sg, i); in zfcp_sg_setup_table()
578 sg_set_buf(sg, addr, PAGE_SIZE); in zfcp_sg_setup_table()
/linux-4.4.14/arch/avr32/include/asm/
Ddma-mapping.h216 struct scatterlist *sg; in dma_map_sg() local
218 for_each_sg(sglist, sg, nents, i) { in dma_map_sg()
221 sg->dma_address = page_to_bus(sg_page(sg)) + sg->offset; in dma_map_sg()
222 virt = sg_virt(sg); in dma_map_sg()
223 dma_cache_sync(dev, virt, sg->length, direction); in dma_map_sg()
241 dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, in dma_unmap_sg() argument
314 dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in dma_sync_sg_for_cpu() argument
329 struct scatterlist *sg; in dma_sync_sg_for_device() local
331 for_each_sg(sglist, sg, nents, i) in dma_sync_sg_for_device()
332 dma_cache_sync(dev, sg_virt(sg), sg->length, direction); in dma_sync_sg_for_device()
/linux-4.4.14/drivers/gpu/drm/udl/
Dudl_dmabuf.c101 obj->sg = drm_prime_pages_to_sg(obj->pages, page_count); in udl_map_dma_buf()
102 if (IS_ERR(obj->sg)) { in udl_map_dma_buf()
104 return ERR_CAST(obj->sg); in udl_map_dma_buf()
109 ret = sg_alloc_table(sgt, obj->sg->orig_nents, GFP_KERNEL); in udl_map_dma_buf()
117 rd = obj->sg->sgl; in udl_map_dma_buf()
217 struct sg_table *sg, in udl_prime_create() argument
230 obj->sg = sg; in udl_prime_create()
237 drm_prime_sg_to_page_addr_arrays(sg, obj->pages, NULL, npages); in udl_prime_create()
247 struct sg_table *sg; in udl_gem_prime_import() local
261 sg = dma_buf_map_attachment(attach, DMA_BIDIRECTIONAL); in udl_gem_prime_import()
[all …]
/linux-4.4.14/arch/ia64/sn/pci/
Dpci_dma.c21 #define SG_ENT_VIRT_ADDRESS(sg) (sg_virt((sg))) argument
248 struct scatterlist *sg; in sn_dma_unmap_sg() local
252 for_each_sg(sgl, sg, nhwentries, i) { in sn_dma_unmap_sg()
253 provider->dma_unmap(pdev, sg->dma_address, dir); in sn_dma_unmap_sg()
254 sg->dma_address = (dma_addr_t) NULL; in sn_dma_unmap_sg()
255 sg->dma_length = 0; in sn_dma_unmap_sg()
279 struct scatterlist *saved_sg = sgl, *sg; in sn_dma_map_sg() local
292 for_each_sg(sgl, sg, nhwentries, i) { in sn_dma_map_sg()
294 phys_addr = SG_ENT_PHYS_ADDRESS(sg); in sn_dma_map_sg()
298 sg->length, in sn_dma_map_sg()
[all …]
/linux-4.4.14/drivers/xen/
Dswiotlb-xen.c543 struct scatterlist *sg; in xen_swiotlb_map_sg_attrs() local
548 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_map_sg_attrs()
549 phys_addr_t paddr = sg_phys(sg); in xen_swiotlb_map_sg_attrs()
554 !dma_capable(hwdev, dev_addr, sg->length) || in xen_swiotlb_map_sg_attrs()
555 range_straddles_page_boundary(paddr, sg->length)) { in xen_swiotlb_map_sg_attrs()
558 sg_phys(sg), in xen_swiotlb_map_sg_attrs()
559 sg->length, in xen_swiotlb_map_sg_attrs()
573 sg->length, in xen_swiotlb_map_sg_attrs()
576 sg->dma_address = xen_phys_to_bus(map); in xen_swiotlb_map_sg_attrs()
584 sg->length, in xen_swiotlb_map_sg_attrs()
[all …]
/linux-4.4.14/net/rds/
Dtcp_send.c81 unsigned int hdr_off, unsigned int sg, unsigned int off) in rds_tcp_xmit() argument
121 while (sg < rm->data.op_nents) { in rds_tcp_xmit()
125 sg_page(&rm->data.op_sg[sg]), in rds_tcp_xmit()
126 rm->data.op_sg[sg].offset + off, in rds_tcp_xmit()
127 rm->data.op_sg[sg].length - off, in rds_tcp_xmit()
129 rdsdebug("tcp sendpage %p:%u:%u ret %d\n", (void *)sg_page(&rm->data.op_sg[sg]), in rds_tcp_xmit()
130 rm->data.op_sg[sg].offset + off, rm->data.op_sg[sg].length - off, in rds_tcp_xmit()
137 if (off == rm->data.op_sg[sg].length) { in rds_tcp_xmit()
139 sg++; in rds_tcp_xmit()
141 if (sg == rm->data.op_nents - 1) in rds_tcp_xmit()
Dmessage.c271 struct scatterlist *sg; in rds_message_copy_from_user() local
279 sg = rm->data.op_sg; in rds_message_copy_from_user()
283 if (!sg_page(sg)) { in rds_message_copy_from_user()
284 ret = rds_page_remainder_alloc(sg, iov_iter_count(from), in rds_message_copy_from_user()
293 sg->length - sg_off); in rds_message_copy_from_user()
296 nbytes = copy_page_from_iter(sg_page(sg), sg->offset + sg_off, in rds_message_copy_from_user()
303 if (sg_off == sg->length) in rds_message_copy_from_user()
304 sg++; in rds_message_copy_from_user()
313 struct scatterlist *sg; in rds_message_inc_copy_to_user() local
323 sg = rm->data.op_sg; in rds_message_inc_copy_to_user()
[all …]
Diw_rdma.c82 struct scatterlist *sg, unsigned int nents);
250 static void rds_iw_set_scatterlist(struct rds_iw_scatterlist *sg, argument
253 sg->list = list;
254 sg->len = sg_len;
255 sg->dma_len = 0;
256 sg->dma_npages = 0;
257 sg->bytes = 0;
261 struct rds_iw_scatterlist *sg) argument
266 WARN_ON(sg->dma_len);
268 sg->dma_len = ib_dma_map_sg(dev, sg->list, sg->len, DMA_BIDIRECTIONAL);
[all …]
Dib.h284 struct scatterlist *sg; in rds_ib_dma_sync_sg_for_cpu() local
287 for_each_sg(sglist, sg, sg_dma_len, i) { in rds_ib_dma_sync_sg_for_cpu()
289 ib_sg_dma_address(dev, sg), in rds_ib_dma_sync_sg_for_cpu()
290 ib_sg_dma_len(dev, sg), in rds_ib_dma_sync_sg_for_cpu()
301 struct scatterlist *sg; in rds_ib_dma_sync_sg_for_device() local
304 for_each_sg(sglist, sg, sg_dma_len, i) { in rds_ib_dma_sync_sg_for_device()
306 ib_sg_dma_address(dev, sg), in rds_ib_dma_sync_sg_for_device()
307 ib_sg_dma_len(dev, sg), in rds_ib_dma_sync_sg_for_device()
355 void *rds_ib_get_mr(struct scatterlist *sg, unsigned long nents,
398 unsigned int hdr_off, unsigned int sg, unsigned int off);
Drdma.c179 struct scatterlist *sg; in __rds_rdma_map() local
246 sg = kcalloc(nents, sizeof(*sg), GFP_KERNEL); in __rds_rdma_map()
247 if (!sg) { in __rds_rdma_map()
252 sg_init_table(sg, nents); in __rds_rdma_map()
256 sg_set_page(&sg[i], pages[i], PAGE_SIZE, 0); in __rds_rdma_map()
264 trans_private = rs->rs_transport->get_mr(sg, nents, rs, in __rds_rdma_map()
269 put_page(sg_page(&sg[i])); in __rds_rdma_map()
270 kfree(sg); in __rds_rdma_map()
672 struct scatterlist *sg; in rds_cmsg_rdma_args() local
674 sg = &op->op_sg[op->op_nents + j]; in rds_cmsg_rdma_args()
[all …]
Diw.h241 struct scatterlist *sg, unsigned int sg_dma_len, int direction) in rds_iw_dma_sync_sg_for_cpu() argument
247 ib_sg_dma_address(dev, &sg[i]), in rds_iw_dma_sync_sg_for_cpu()
248 ib_sg_dma_len(dev, &sg[i]), in rds_iw_dma_sync_sg_for_cpu()
255 struct scatterlist *sg, unsigned int sg_dma_len, int direction) in rds_iw_dma_sync_sg_for_device() argument
261 ib_sg_dma_address(dev, &sg[i]), in rds_iw_dma_sync_sg_for_device()
262 ib_sg_dma_len(dev, &sg[i]), in rds_iw_dma_sync_sg_for_device()
318 void *rds_iw_get_mr(struct scatterlist *sg, unsigned long nents,
356 unsigned int hdr_off, unsigned int sg, unsigned int off);
Dib_rdma.c58 struct scatterlist *sg; member
418 struct scatterlist *sg, unsigned int nents) in rds_ib_map_fmr() argument
421 struct scatterlist *scat = sg; in rds_ib_map_fmr()
429 sg_dma_len = ib_dma_map_sg(dev, sg, nents, in rds_ib_map_fmr()
487 ibmr->sg = scat; in rds_ib_map_fmr()
511 ib_dma_sync_sg_for_cpu(rds_ibdev->dev, ibmr->sg, in rds_ib_sync_mr()
515 ib_dma_sync_sg_for_device(rds_ibdev->dev, ibmr->sg, in rds_ib_sync_mr()
527 ibmr->sg, ibmr->sg_len, in __rds_ib_teardown_mr()
537 struct page *page = sg_page(&ibmr->sg[i]); in __rds_ib_teardown_mr()
545 kfree(ibmr->sg); in __rds_ib_teardown_mr()
[all …]
/linux-4.4.14/arch/sparc/kernel/
Dioport.c379 static int sbus_map_sg(struct device *dev, struct scatterlist *sg, int n, in sbus_map_sg() argument
382 mmu_get_scsi_sgl(dev, sg, n); in sbus_map_sg()
386 static void sbus_unmap_sg(struct device *dev, struct scatterlist *sg, int n, in sbus_unmap_sg() argument
389 mmu_release_scsi_sgl(dev, sg, n); in sbus_unmap_sg()
392 static void sbus_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in sbus_sync_sg_for_cpu() argument
398 static void sbus_sync_sg_for_device(struct device *dev, struct scatterlist *sg, in sbus_sync_sg_for_device() argument
553 struct scatterlist *sg; in pci32_map_sg() local
557 for_each_sg(sgl, sg, nents, n) { in pci32_map_sg()
558 sg->dma_address = sg_phys(sg); in pci32_map_sg()
559 sg->dma_length = sg->length; in pci32_map_sg()
[all …]
Diommu_common.h42 struct scatterlist *sg) in is_span_boundary() argument
45 int nr = iommu_num_pages(paddr, outs->dma_length + sg->length, in is_span_boundary()
Diommu.c590 static unsigned long fetch_sg_ctx(struct iommu *iommu, struct scatterlist *sg) in fetch_sg_ctx() argument
599 bus_addr = sg->dma_address & IO_PAGE_MASK; in fetch_sg_ctx()
613 struct scatterlist *sg; in dma_4u_unmap_sg() local
626 sg = sglist; in dma_4u_unmap_sg()
628 dma_addr_t dma_handle = sg->dma_address; in dma_4u_unmap_sg()
629 unsigned int len = sg->dma_length; in dma_4u_unmap_sg()
652 sg = sg_next(sg); in dma_4u_unmap_sg()
705 struct scatterlist *sg, *sgprv; in dma_4u_sync_sg_for_cpu() local
731 for_each_sg(sglist, sg, nelems, i) { in dma_4u_sync_sg_for_cpu()
732 if (sg->dma_length == 0) in dma_4u_sync_sg_for_cpu()
[all …]
/linux-4.4.14/drivers/mmc/host/
Dtmio_mmc_dma.c49 struct scatterlist *sg = host->sg_ptr, *sg_tmp; in tmio_mmc_start_dma_rx() local
57 for_each_sg(sg, sg_tmp, host->sg_len, i) { in tmio_mmc_start_dma_rx()
66 if ((!aligned && (host->sg_len > 1 || sg->length > PAGE_CACHE_SIZE || in tmio_mmc_start_dma_rx()
72 if (sg->length < TMIO_MMC_MIN_DMA_LEN) { in tmio_mmc_start_dma_rx()
81 sg_init_one(&host->bounce_sg, host->bounce_buf, sg->length); in tmio_mmc_start_dma_rx()
83 sg = host->sg_ptr; in tmio_mmc_start_dma_rx()
86 ret = dma_map_sg(chan->device->dev, sg, host->sg_len, DMA_FROM_DEVICE); in tmio_mmc_start_dma_rx()
88 desc = dmaengine_prep_slave_sg(chan, sg, ret, in tmio_mmc_start_dma_rx()
125 struct scatterlist *sg = host->sg_ptr, *sg_tmp; in tmio_mmc_start_dma_tx() local
133 for_each_sg(sg, sg_tmp, host->sg_len, i) { in tmio_mmc_start_dma_tx()
[all …]
Datmel-mci.c184 struct scatterlist *sg; member
758 buf_size = sg_dma_len(host->sg); in atmci_pdc_set_single_buf()
759 atmci_writel(host, pointer_reg, sg_dma_address(host->sg)); in atmci_pdc_set_single_buf()
774 atmci_writel(host, counter_reg, sg_dma_len(host->sg) / 4); in atmci_pdc_set_single_buf()
775 host->data_size -= sg_dma_len(host->sg); in atmci_pdc_set_single_buf()
777 host->sg = sg_next(host->sg); in atmci_pdc_set_single_buf()
802 data->sg, data->sg_len, in atmci_pdc_cleanup()
824 sg_copy_from_buffer(host->data->sg, host->data->sg_len, in atmci_pdc_complete()
841 data->sg, data->sg_len, in atmci_dma_cleanup()
906 host->sg = data->sg; in atmci_prepare_data()
[all …]
Dusdhi6rol0.c180 struct scatterlist *sg; /* current SG segment */ member
311 struct scatterlist *sg) in usdhi6_blk_bounce() argument
318 data->blksz, data->blocks, sg->offset); in usdhi6_blk_bounce()
345 host->sg = data->sg; in usdhi6_sg_prep()
347 host->offset = host->sg->offset; in usdhi6_sg_prep()
354 struct scatterlist *sg = data->sg_len > 1 ? host->sg : data->sg; in usdhi6_sg_map() local
355 size_t head = PAGE_SIZE - sg->offset; in usdhi6_sg_map()
359 if (WARN(sg_dma_len(sg) % data->blksz, in usdhi6_sg_map()
361 sg_dma_len(sg), data->blksz)) in usdhi6_sg_map()
364 host->pg.page = sg_page(sg); in usdhi6_sg_map()
[all …]
Dmxcmmc.c291 struct scatterlist *sg; in mxcmci_swap_buffers() local
294 for_each_sg(data->sg, sg, data->sg_len, i) in mxcmci_swap_buffers()
295 buffer_swap32(sg_virt(sg), sg->length); in mxcmci_swap_buffers()
306 struct scatterlist *sg; in mxcmci_setup_data() local
323 for_each_sg(data->sg, sg, data->sg_len, i) { in mxcmci_setup_data()
324 if (sg->offset & 3 || sg->length & 3 || sg->length < 512) { in mxcmci_setup_data()
340 nents = dma_map_sg(host->dma->device->dev, data->sg, in mxcmci_setup_data()
346 data->sg, data->sg_len, slave_dirn, in mxcmci_setup_data()
350 dma_unmap_sg(host->dma->device->dev, data->sg, data->sg_len, in mxcmci_setup_data()
460 dma_unmap_sg(host->dma->device->dev, data->sg, data->sg_len, in mxcmci_finish_data()
[all …]
Dtifm_sd.c170 struct scatterlist *sg = r_data->sg; in tifm_sd_transfer_data() local
178 cnt = sg[host->sg_pos].length - host->block_pos; in tifm_sd_transfer_data()
191 cnt = sg[host->sg_pos].length; in tifm_sd_transfer_data()
193 off = sg[host->sg_pos].offset + host->block_pos; in tifm_sd_transfer_data()
195 pg = nth_page(sg_page(&sg[host->sg_pos]), off >> PAGE_SHIFT); in tifm_sd_transfer_data()
226 struct scatterlist *sg = r_data->sg; in tifm_sd_bounce_block() local
234 cnt = sg[host->sg_pos].length - host->block_pos; in tifm_sd_bounce_block()
240 cnt = sg[host->sg_pos].length; in tifm_sd_bounce_block()
242 off = sg[host->sg_pos].offset + host->block_pos; in tifm_sd_bounce_block()
244 pg = nth_page(sg_page(&sg[host->sg_pos]), off >> PAGE_SHIFT); in tifm_sd_bounce_block()
[all …]
Dtmio_mmc.h118 static inline char *tmio_mmc_kmap_atomic(struct scatterlist *sg, in tmio_mmc_kmap_atomic() argument
122 return kmap_atomic(sg_page(sg)) + sg->offset; in tmio_mmc_kmap_atomic()
125 static inline void tmio_mmc_kunmap_atomic(struct scatterlist *sg, in tmio_mmc_kunmap_atomic() argument
128 kunmap_atomic(virt - sg->offset); in tmio_mmc_kunmap_atomic()
Dau1xmmc.c353 dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, host->dma.dir); in au1xmmc_data_complete()
399 struct scatterlist *sg; in au1xmmc_send_pio() local
407 sg = &data->sg[host->pio.index]; in au1xmmc_send_pio()
408 sg_ptr = sg_virt(sg) + host->pio.offset; in au1xmmc_send_pio()
411 sg_len = data->sg[host->pio.index].length - host->pio.offset; in au1xmmc_send_pio()
454 struct scatterlist *sg; in au1xmmc_receive_pio() local
464 sg = &data->sg[host->pio.index]; in au1xmmc_receive_pio()
465 sg_ptr = sg_virt(sg) + host->pio.offset; in au1xmmc_receive_pio()
468 sg_len = sg_dma_len(&data->sg[host->pio.index]) - host->pio.offset; in au1xmmc_receive_pio()
630 host->dma.len = dma_map_sg(mmc_dev(host->mmc), data->sg, in au1xmmc_prepare_data()
[all …]
Dandroid-goldfish.c225 uint8_t *dest = (uint8_t *)sg_virt(data->sg); in goldfish_mmc_xfer_done()
226 memcpy(dest, host->virt_base, data->sg->length); in goldfish_mmc_xfer_done()
228 host->data->bytes_xfered += data->sg->length; in goldfish_mmc_xfer_done()
229 dma_unmap_sg(mmc_dev(host->mmc), data->sg, host->sg_len, in goldfish_mmc_xfer_done()
398 host->sg_len = dma_map_sg(mmc_dev(host->mmc), data->sg, in goldfish_mmc_prepare_data()
408 const uint8_t *src = (uint8_t *)sg_virt(data->sg); in goldfish_mmc_prepare_data()
409 memcpy(host->virt_base, src, data->sg->length); in goldfish_mmc_prepare_data()
/linux-4.4.14/arch/arc/include/asm/
Ddma-mapping.h105 dma_map_sg(struct device *dev, struct scatterlist *sg, in dma_map_sg() argument
111 for_each_sg(sg, s, nents, i) in dma_map_sg()
119 dma_unmap_sg(struct device *dev, struct scatterlist *sg, in dma_unmap_sg() argument
125 for_each_sg(sg, s, nents, i) in dma_unmap_sg()
164 struct scatterlist *sg; in dma_sync_sg_for_cpu() local
166 for_each_sg(sglist, sg, nelems, i) in dma_sync_sg_for_cpu()
167 _dma_cache_sync((unsigned int)sg_virt(sg), sg->length, dir); in dma_sync_sg_for_cpu()
175 struct scatterlist *sg; in dma_sync_sg_for_device() local
177 for_each_sg(sglist, sg, nelems, i) in dma_sync_sg_for_device()
178 _dma_cache_sync((unsigned int)sg_virt(sg), sg->length, dir); in dma_sync_sg_for_device()
/linux-4.4.14/drivers/gpu/drm/nouveau/
Dnouveau_sgdma.c32 if (ttm->sg) { in nv04_sgdma_bind()
33 node->sg = ttm->sg; in nv04_sgdma_bind()
36 node->sg = NULL; in nv04_sgdma_bind()
67 if (ttm->sg) { in nv50_sgdma_bind()
68 node->sg = ttm->sg; in nv50_sgdma_bind()
71 node->sg = NULL; in nv50_sgdma_bind()
/linux-4.4.14/arch/m68k/kernel/
Ddma.c127 struct scatterlist *sg; in dma_sync_sg_for_device() local
129 for_each_sg(sglist, sg, nents, i) { in dma_sync_sg_for_device()
130 dma_sync_single_for_device(dev, sg->dma_address, sg->length, in dma_sync_sg_for_device()
161 struct scatterlist *sg; in dma_map_sg() local
163 for_each_sg(sglist, sg, nents, i) { in dma_map_sg()
164 sg->dma_address = sg_phys(sg); in dma_map_sg()
165 dma_sync_single_for_device(dev, sg->dma_address, sg->length, in dma_map_sg()
/linux-4.4.14/arch/powerpc/kernel/
Ddma.c200 struct scatterlist *sg; in dma_direct_map_sg() local
203 for_each_sg(sgl, sg, nents, i) { in dma_direct_map_sg()
204 sg->dma_address = sg_phys(sg) + get_dma_offset(dev); in dma_direct_map_sg()
205 sg->dma_length = sg->length; in dma_direct_map_sg()
206 __dma_sync_page(sg_page(sg), sg->offset, sg->length, direction); in dma_direct_map_sg()
212 static void dma_direct_unmap_sg(struct device *dev, struct scatterlist *sg, in dma_direct_unmap_sg() argument
255 struct scatterlist *sg; in dma_direct_sync_sg() local
258 for_each_sg(sgl, sg, nents, i) in dma_direct_sync_sg()
259 __dma_sync_page(sg_page(sg), sg->offset, sg->length, direction); in dma_direct_sync_sg()
/linux-4.4.14/drivers/staging/rdma/ipath/
Dipath_dma.c104 struct scatterlist *sg; in ipath_map_sg() local
111 for_each_sg(sgl, sg, nents, i) { in ipath_map_sg()
112 addr = (u64) page_address(sg_page(sg)); in ipath_map_sg()
118 sg->dma_address = addr + sg->offset; in ipath_map_sg()
120 sg->dma_length = sg->length; in ipath_map_sg()
127 struct scatterlist *sg, int nents, in ipath_unmap_sg() argument
/linux-4.4.14/drivers/infiniband/hw/qib/
Dqib_dma.c97 struct scatterlist *sg; in qib_map_sg() local
104 for_each_sg(sgl, sg, nents, i) { in qib_map_sg()
105 addr = (u64) page_address(sg_page(sg)); in qib_map_sg()
111 sg->dma_address = addr + sg->offset; in qib_map_sg()
113 sg->dma_length = sg->length; in qib_map_sg()
120 struct scatterlist *sg, int nents, in qib_unmap_sg() argument
/linux-4.4.14/drivers/staging/rdma/hfi1/
Ddma.c114 struct scatterlist *sg; in hfi1_map_sg() local
122 for_each_sg(sgl, sg, nents, i) { in hfi1_map_sg()
123 addr = (u64) page_address(sg_page(sg)); in hfi1_map_sg()
128 sg->dma_address = addr + sg->offset; in hfi1_map_sg()
130 sg->dma_length = sg->length; in hfi1_map_sg()
137 struct scatterlist *sg, int nents, in hfi1_unmap_sg() argument
/linux-4.4.14/arch/arm/mach-rpc/
Ddma.c56 static void iomd_get_next_sg(struct scatterlist *sg, struct iomd_dma *idma) in iomd_get_next_sg() argument
60 if (idma->dma.sg) { in iomd_get_next_sg()
61 sg->dma_address = idma->dma_addr; in iomd_get_next_sg()
62 offset = sg->dma_address & ~PAGE_MASK; in iomd_get_next_sg()
72 sg->length = end - TRANSFER_SIZE; in iomd_get_next_sg()
79 idma->dma.sg = sg_next(idma->dma.sg); in iomd_get_next_sg()
80 idma->dma_addr = idma->dma.sg->dma_address; in iomd_get_next_sg()
81 idma->dma_len = idma->dma.sg->length; in iomd_get_next_sg()
84 idma->dma.sg = NULL; in iomd_get_next_sg()
90 sg->dma_address = 0; in iomd_get_next_sg()
[all …]
/linux-4.4.14/drivers/scsi/arm/
Dscsi.h27 static inline int copy_SCp_to_sg(struct scatterlist *sg, struct scsi_pointer *SCp, int max) in copy_SCp_to_sg() argument
36 sg_set_buf(sg, SCp->ptr, SCp->this_residual); in copy_SCp_to_sg()
43 *(++sg) = *src_sg; in copy_SCp_to_sg()
44 sg_mark_end(sg); in copy_SCp_to_sg()
100 struct scatterlist *sg; in init_SCp() local
103 scsi_for_each_sg(SCpnt, sg, sg_count, i) in init_SCp()
104 len += sg->length; in init_SCp()
/linux-4.4.14/net/9p/
Dtrans_virtio.c92 struct scatterlist sg[VIRTQUEUE_NUM]; member
185 static int pack_sg_list(struct scatterlist *sg, int start, in pack_sg_list() argument
197 sg_unmark_end(&sg[index]); in pack_sg_list()
198 sg_set_buf(&sg[index++], data, s); in pack_sg_list()
203 sg_mark_end(&sg[index - 1]); in pack_sg_list()
224 pack_sg_list_p(struct scatterlist *sg, int start, int limit, in pack_sg_list_p() argument
241 sg_unmark_end(&sg[index]); in pack_sg_list_p()
242 sg_set_page(&sg[index++], pdata[i++], s, data_off); in pack_sg_list_p()
249 sg_mark_end(&sg[index - 1]); in pack_sg_list_p()
277 out = pack_sg_list(chan->sg, 0, in p9_virtio_request()
[all …]
/linux-4.4.14/block/
Dblk-merge.c338 struct scatterlist **sg, int *nsegs, int *cluster) in __blk_segment_map_sg() argument
343 if (*sg && *cluster) { in __blk_segment_map_sg()
344 if ((*sg)->length + nbytes > queue_max_segment_size(q)) in __blk_segment_map_sg()
352 (*sg)->length += nbytes; in __blk_segment_map_sg()
355 if (!*sg) in __blk_segment_map_sg()
356 *sg = sglist; in __blk_segment_map_sg()
368 sg_unmark_end(*sg); in __blk_segment_map_sg()
369 *sg = sg_next(*sg); in __blk_segment_map_sg()
372 sg_set_page(*sg, bvec->bv_page, nbytes, bvec->bv_offset); in __blk_segment_map_sg()
380 struct scatterlist **sg) in __blk_bios_map_sg() argument
[all …]
Dblk-integrity.c90 struct scatterlist *sg = NULL; in blk_rq_map_integrity_sg() local
104 if (sg->length + iv.bv_len > queue_max_segment_size(q)) in blk_rq_map_integrity_sg()
107 sg->length += iv.bv_len; in blk_rq_map_integrity_sg()
110 if (!sg) in blk_rq_map_integrity_sg()
111 sg = sglist; in blk_rq_map_integrity_sg()
113 sg_unmark_end(sg); in blk_rq_map_integrity_sg()
114 sg = sg_next(sg); in blk_rq_map_integrity_sg()
117 sg_set_page(sg, iv.bv_page, iv.bv_len, iv.bv_offset); in blk_rq_map_integrity_sg()
125 if (sg) in blk_rq_map_integrity_sg()
126 sg_mark_end(sg); in blk_rq_map_integrity_sg()
/linux-4.4.14/drivers/target/
Dtarget_core_iblock.c419 struct scatterlist *sg; in iblock_execute_write_same() local
432 sg = &cmd->t_data_sg[0]; in iblock_execute_write_same()
435 sg->length != cmd->se_dev->dev_attrib.block_size) { in iblock_execute_write_same()
437 " block_size: %u\n", cmd->t_data_nents, sg->length, in iblock_execute_write_same()
457 while (bio_add_page(bio, sg_page(sg), sg->length, sg->offset) in iblock_execute_write_same()
458 != sg->length) { in iblock_execute_write_same()
469 block_lba += sg->length >> IBLOCK_LBA_SHIFT; in iblock_execute_write_same()
596 struct scatterlist *sg; in iblock_alloc_bip() local
618 for_each_sg(cmd->t_prot_sg, sg, cmd->t_prot_nents, i) { in iblock_alloc_bip()
620 rc = bio_integrity_add_page(bio, sg_page(sg), sg->length, in iblock_alloc_bip()
[all …]
Dtarget_core_rd.c80 struct scatterlist *sg; in rd_release_sgl_table() local
84 sg = sg_table[i].sg_table; in rd_release_sgl_table()
88 pg = sg_page(&sg[j]); in rd_release_sgl_table()
94 kfree(sg); in rd_release_sgl_table()
132 struct scatterlist *sg; in rd_allocate_sgl_table() local
147 sg = kcalloc(sg_per_table + chain_entry, sizeof(*sg), in rd_allocate_sgl_table()
149 if (!sg) { in rd_allocate_sgl_table()
155 sg_init_table(sg, sg_per_table + chain_entry); in rd_allocate_sgl_table()
159 max_sg_per_table + 1, sg); in rd_allocate_sgl_table()
162 sg_table[i].sg_table = sg; in rd_allocate_sgl_table()
[all …]
Dtarget_core_sbc.c378 struct scatterlist *sg; in xdreadwrite_callback() local
413 for_each_sg(cmd->t_bidi_data_sg, sg, cmd->t_bidi_data_nents, count) { in xdreadwrite_callback()
414 addr = kmap_atomic(sg_page(sg)); in xdreadwrite_callback()
420 for (i = 0; i < sg->length; i++) in xdreadwrite_callback()
421 *(addr + sg->offset + i) ^= *(buf + offset + i); in xdreadwrite_callback()
423 offset += sg->length; in xdreadwrite_callback()
471 struct scatterlist *write_sg = NULL, *sg; in compare_and_write_callback() local
530 for_each_sg(cmd->t_bidi_data_sg, sg, cmd->t_bidi_data_nents, i) { in compare_and_write_callback()
531 addr = (unsigned char *)kmap_atomic(sg_page(sg)); in compare_and_write_callback()
537 len = min(sg->length, compare_len); in compare_and_write_callback()
[all …]
Dtarget_core_user.c242 struct scatterlist *sg; in alloc_and_scatter_data_area() local
244 for_each_sg(data_sg, sg, data_nents, i) { in alloc_and_scatter_data_area()
245 copy_bytes = min_t(size_t, sg->length, in alloc_and_scatter_data_area()
247 from = kmap_atomic(sg_page(sg)) + sg->offset; in alloc_and_scatter_data_area()
265 if (sg->length != copy_bytes) { in alloc_and_scatter_data_area()
268 copy_bytes = sg->length - copy_bytes; in alloc_and_scatter_data_area()
288 kunmap_atomic(from - sg->offset); in alloc_and_scatter_data_area()
298 struct scatterlist *sg; in gather_and_free_data_area() local
301 for_each_sg(data_sg, sg, data_nents, i) { in gather_and_free_data_area()
302 copy_bytes = min_t(size_t, sg->length, in gather_and_free_data_area()
[all …]
/linux-4.4.14/drivers/crypto/
Dbfin_crc.c83 struct scatterlist *sg; /* sg list head for this update*/ member
105 struct scatterlist *sg = NULL; in sg_get() local
108 for_each_sg(sg_list, sg, nents, i) in sg_get()
112 return sg; in sg_get()
167 struct scatterlist *sg; in bfin_crypto_crc_config_dma() local
176 dma_map_sg(crc->dev, ctx->sg, ctx->sg_nents, DMA_TO_DEVICE); in bfin_crypto_crc_config_dma()
178 for_each_sg(ctx->sg, sg, ctx->sg_nents, j) { in bfin_crypto_crc_config_dma()
179 dma_addr = sg_dma_address(sg); in bfin_crypto_crc_config_dma()
181 if (sg_is_last(sg)) in bfin_crypto_crc_config_dma()
182 dma_count = sg_dma_len(sg) - ctx->bufnext_len; in bfin_crypto_crc_config_dma()
[all …]
Datmel-sha.c92 struct scatterlist *sg; member
163 count = min(ctx->sg->length - ctx->offset, ctx->total); in atmel_sha_append_sg()
173 if ((ctx->sg->length == 0) && !sg_is_last(ctx->sg)) { in atmel_sha_append_sg()
174 ctx->sg = sg_next(ctx->sg); in atmel_sha_append_sg()
181 scatterwalk_map_and_copy(ctx->buffer + ctx->bufcnt, ctx->sg, in atmel_sha_append_sg()
188 if (ctx->offset == ctx->sg->length) { in atmel_sha_append_sg()
189 ctx->sg = sg_next(ctx->sg); in atmel_sha_append_sg()
190 if (ctx->sg) in atmel_sha_append_sg()
430 struct scatterlist sg[2]; in atmel_sha_xmit_dma() local
441 sg_init_table(sg, 2); in atmel_sha_xmit_dma()
[all …]
Ds5p-sss.c235 static void s5p_set_dma_indata(struct s5p_aes_dev *dev, struct scatterlist *sg) in s5p_set_dma_indata() argument
237 SSS_WRITE(dev, FCBRDMAS, sg_dma_address(sg)); in s5p_set_dma_indata()
238 SSS_WRITE(dev, FCBRDMAL, sg_dma_len(sg)); in s5p_set_dma_indata()
241 static void s5p_set_dma_outdata(struct s5p_aes_dev *dev, struct scatterlist *sg) in s5p_set_dma_outdata() argument
243 SSS_WRITE(dev, FCBTDMAS, sg_dma_address(sg)); in s5p_set_dma_outdata()
244 SSS_WRITE(dev, FCBTDMAL, sg_dma_len(sg)); in s5p_set_dma_outdata()
264 static int s5p_set_outdata(struct s5p_aes_dev *dev, struct scatterlist *sg) in s5p_set_outdata() argument
268 if (!IS_ALIGNED(sg_dma_len(sg), AES_BLOCK_SIZE)) { in s5p_set_outdata()
272 if (!sg_dma_len(sg)) { in s5p_set_outdata()
277 err = dma_map_sg(dev->dev, sg, 1, DMA_FROM_DEVICE); in s5p_set_outdata()
[all …]
Dimg-hash.c96 struct scatterlist *sg; member
217 if (ctx->sg) in img_hash_dma_callback()
221 static int img_hash_xmit_dma(struct img_hash_dev *hdev, struct scatterlist *sg) in img_hash_xmit_dma() argument
226 ctx->dma_ct = dma_map_sg(hdev->dev, sg, 1, DMA_MEM_TO_DEV); in img_hash_xmit_dma()
234 sg, in img_hash_xmit_dma()
241 dma_unmap_sg(hdev->dev, sg, 1, DMA_MEM_TO_DEV); in img_hash_xmit_dma()
256 ctx->bufcnt = sg_copy_to_buffer(hdev->req->src, sg_nents(ctx->sg), in img_hash_write_via_cpu()
364 if (!ctx->sg) in img_hash_dma_task()
367 addr = sg_virt(ctx->sg); in img_hash_dma_task()
368 nbytes = ctx->sg->length - ctx->offset; in img_hash_dma_task()
[all …]
Dsahara.c453 struct scatterlist *sg; in sahara_hw_descriptor_create() local
502 sg = dev->in_sg; in sahara_hw_descriptor_create()
504 dev->hw_link[i]->len = sg->length; in sahara_hw_descriptor_create()
505 dev->hw_link[i]->p = sg->dma_address; in sahara_hw_descriptor_create()
510 sg = sg_next(sg); in sahara_hw_descriptor_create()
516 sg = dev->out_sg; in sahara_hw_descriptor_create()
518 dev->hw_link[j]->len = sg->length; in sahara_hw_descriptor_create()
519 dev->hw_link[j]->p = sg->dma_address; in sahara_hw_descriptor_create()
524 sg = sg_next(sg); in sahara_hw_descriptor_create()
789 struct scatterlist *sg; in sahara_sha_hw_links_create() local
[all …]
Domap-sham.c152 struct scatterlist *sg; member
591 sg_assign_page(&ctx->sgl, sg_page(ctx->sg)); in omap_sham_xmit_dma()
592 ctx->sgl.offset = ctx->sg->offset; in omap_sham_xmit_dma()
594 sg_dma_address(&ctx->sgl) = sg_dma_address(ctx->sg); in omap_sham_xmit_dma()
647 while (ctx->sg) { in omap_sham_append_sg()
648 vaddr = kmap_atomic(sg_page(ctx->sg)); in omap_sham_append_sg()
649 vaddr += ctx->sg->offset; in omap_sham_append_sg()
653 ctx->sg->length - ctx->offset); in omap_sham_append_sg()
661 if (ctx->offset == ctx->sg->length) { in omap_sham_append_sg()
662 ctx->sg = sg_next(ctx->sg); in omap_sham_append_sg()
[all …]
/linux-4.4.14/arch/parisc/kernel/
Dpci-dma.c481 struct scatterlist *sg; in pa11_dma_map_sg() local
485 for_each_sg(sglist, sg, nents, i) { in pa11_dma_map_sg()
486 unsigned long vaddr = (unsigned long)sg_virt(sg); in pa11_dma_map_sg()
488 sg_dma_address(sg) = (dma_addr_t) virt_to_phys(vaddr); in pa11_dma_map_sg()
489 sg_dma_len(sg) = sg->length; in pa11_dma_map_sg()
490 flush_kernel_dcache_range(vaddr, sg->length); in pa11_dma_map_sg()
498 struct scatterlist *sg; in pa11_dma_unmap_sg() local
507 for_each_sg(sglist, sg, nents, i) in pa11_dma_unmap_sg()
508 flush_kernel_vmap_range(sg_virt(sg), sg->length); in pa11_dma_unmap_sg()
529 struct scatterlist *sg; in pa11_dma_sync_sg_for_cpu() local
[all …]
/linux-4.4.14/arch/mn10300/include/asm/
Ddma-mapping.h54 struct scatterlist *sg; in dma_map_sg() local
60 for_each_sg(sglist, sg, nents, i) { in dma_map_sg()
61 BUG_ON(!sg_page(sg)); in dma_map_sg()
63 sg->dma_address = sg_phys(sg); in dma_map_sg()
71 void dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, in dma_unmap_sg() argument
123 void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in dma_sync_sg_for_cpu() argument
129 void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, in dma_sync_sg_for_device() argument
/linux-4.4.14/include/asm-generic/
Dpci-dma-compat.h59 pci_map_sg(struct pci_dev *hwdev, struct scatterlist *sg, in pci_map_sg() argument
62 …return dma_map_sg(hwdev == NULL ? NULL : &hwdev->dev, sg, nents, (enum dma_data_direction)directio… in pci_map_sg()
66 pci_unmap_sg(struct pci_dev *hwdev, struct scatterlist *sg, in pci_unmap_sg() argument
69 dma_unmap_sg(hwdev == NULL ? NULL : &hwdev->dev, sg, nents, (enum dma_data_direction)direction); in pci_unmap_sg()
87 pci_dma_sync_sg_for_cpu(struct pci_dev *hwdev, struct scatterlist *sg, in pci_dma_sync_sg_for_cpu() argument
90 …dma_sync_sg_for_cpu(hwdev == NULL ? NULL : &hwdev->dev, sg, nelems, (enum dma_data_direction)direc… in pci_dma_sync_sg_for_cpu()
94 pci_dma_sync_sg_for_device(struct pci_dev *hwdev, struct scatterlist *sg, in pci_dma_sync_sg_for_device() argument
97 …dma_sync_sg_for_device(hwdev == NULL ? NULL : &hwdev->dev, sg, nelems, (enum dma_data_direction)di… in pci_dma_sync_sg_for_device()
Ddma-mapping-common.h47 static inline int dma_map_sg_attrs(struct device *dev, struct scatterlist *sg, in dma_map_sg_attrs() argument
55 for_each_sg(sg, s, nents, i) in dma_map_sg_attrs()
58 ents = ops->map_sg(dev, sg, nents, dir, attrs); in dma_map_sg_attrs()
60 debug_dma_map_sg(dev, sg, nents, ents, dir); in dma_map_sg_attrs()
65 static inline void dma_unmap_sg_attrs(struct device *dev, struct scatterlist *sg, in dma_unmap_sg_attrs() argument
72 debug_dma_unmap_sg(dev, sg, nents, dir); in dma_unmap_sg_attrs()
74 ops->unmap_sg(dev, sg, nents, dir, attrs); in dma_unmap_sg_attrs()
156 dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in dma_sync_sg_for_cpu() argument
163 ops->sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu()
164 debug_dma_sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu()
[all …]
Ddma-mapping-broken.h47 dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
51 dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries,
72 dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems,
/linux-4.4.14/drivers/crypto/ccp/
Dccp-crypto-main.c303 struct scatterlist *sg, *sg_last = NULL; in ccp_crypto_sg_table_add() local
305 for (sg = table->sgl; sg; sg = sg_next(sg)) in ccp_crypto_sg_table_add()
306 if (!sg_page(sg)) in ccp_crypto_sg_table_add()
308 if (WARN_ON(!sg)) in ccp_crypto_sg_table_add()
311 for (; sg && sg_add; sg = sg_next(sg), sg_add = sg_next(sg_add)) { in ccp_crypto_sg_table_add()
312 sg_set_page(sg, sg_page(sg_add), sg_add->length, in ccp_crypto_sg_table_add()
314 sg_last = sg; in ccp_crypto_sg_table_add()
Dccp-crypto-aes-cmac.c64 struct scatterlist *sg, *cmac_key_sg = NULL; in ccp_do_cmac_update() local
117 sg = NULL; in ccp_do_cmac_update()
120 sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->buf_sg); in ccp_do_cmac_update()
121 if (!sg) { in ccp_do_cmac_update()
128 sg = ccp_crypto_sg_table_add(&rctx->data_sg, req->src); in ccp_do_cmac_update()
129 if (!sg) { in ccp_do_cmac_update()
143 sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->pad_sg); in ccp_do_cmac_update()
144 if (!sg) { in ccp_do_cmac_update()
149 if (sg) { in ccp_do_cmac_update()
150 sg_mark_end(sg); in ccp_do_cmac_update()
[all …]
Dccp-ops.c54 struct scatterlist *sg; member
489 struct scatterlist *sg, u64 len, in ccp_init_sg_workarea() argument
494 wa->sg = sg; in ccp_init_sg_workarea()
495 if (!sg) in ccp_init_sg_workarea()
498 wa->nents = sg_nents_for_len(sg, len); in ccp_init_sg_workarea()
511 wa->dma_sg = sg; in ccp_init_sg_workarea()
514 wa->dma_count = dma_map_sg(dev, sg, wa->nents, dma_dir); in ccp_init_sg_workarea()
525 if (!wa->sg) in ccp_update_sg_workarea()
530 if (wa->sg_used == wa->sg->length) { in ccp_update_sg_workarea()
531 wa->sg = sg_next(wa->sg); in ccp_update_sg_workarea()
[all …]
Dccp-crypto-sha.c63 struct scatterlist *sg; in ccp_do_sha_update() local
96 sg = NULL; in ccp_do_sha_update()
109 sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->buf_sg); in ccp_do_sha_update()
110 if (!sg) { in ccp_do_sha_update()
114 sg = ccp_crypto_sg_table_add(&rctx->data_sg, req->src); in ccp_do_sha_update()
115 if (!sg) { in ccp_do_sha_update()
119 sg_mark_end(sg); in ccp_do_sha_update()
121 sg = rctx->data_sg.sgl; in ccp_do_sha_update()
125 sg = &rctx->buf_sg; in ccp_do_sha_update()
127 sg = req->src; in ccp_do_sha_update()
[all …]
/linux-4.4.14/drivers/scsi/
Dlibiscsi_tcp.c98 struct scatterlist *sg, unsigned int offset) in iscsi_tcp_segment_init_sg() argument
100 segment->sg = sg; in iscsi_tcp_segment_init_sg()
102 segment->size = min(sg->length - offset, in iscsi_tcp_segment_init_sg()
118 struct scatterlist *sg; in iscsi_tcp_segment_map() local
120 if (segment->data != NULL || !segment->sg) in iscsi_tcp_segment_map()
123 sg = segment->sg; in iscsi_tcp_segment_map()
125 BUG_ON(sg->length == 0); in iscsi_tcp_segment_map()
133 if (page_count(sg_page(sg)) >= 1 && !recv) in iscsi_tcp_segment_map()
138 segment->sg_mapped = kmap_atomic(sg_page(sg)); in iscsi_tcp_segment_map()
142 segment->sg_mapped = kmap(sg_page(sg)); in iscsi_tcp_segment_map()
[all …]
/linux-4.4.14/arch/parisc/include/asm/
Ddma-mapping.h16 …int (*map_sg)(struct device *dev, struct scatterlist *sg, int nents, enum dma_data_direction dire…
17 …void (*unmap_sg)(struct device *dev, struct scatterlist *sg, int nhwents, enum dma_data_direction …
20 …void (*dma_sync_sg_for_cpu)(struct device *dev, struct scatterlist *sg, int nelems, enum dma_data_…
21 …void (*dma_sync_sg_for_device)(struct device *dev, struct scatterlist *sg, int nelems, enum dma_da…
95 dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() argument
98 return hppa_dma_ops->map_sg(dev, sg, nents, direction); in dma_map_sg()
102 dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, in dma_unmap_sg() argument
105 hppa_dma_ops->unmap_sg(dev, sg, nhwentries, direction); in dma_unmap_sg()
158 dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems, in dma_sync_sg_for_cpu() argument
162 hppa_dma_ops->dma_sync_sg_for_cpu(dev, sg, nelems, direction); in dma_sync_sg_for_cpu()
[all …]
/linux-4.4.14/drivers/net/wireless/orinoco/
Dmic.c51 struct scatterlist sg[2]; in orinoco_mic() local
68 sg_init_table(sg, 2); in orinoco_mic()
69 sg_set_buf(&sg[0], hdr, sizeof(hdr)); in orinoco_mic()
70 sg_set_buf(&sg[1], data, data_len); in orinoco_mic()
77 return crypto_hash_digest(&desc, sg, data_len + sizeof(hdr), in orinoco_mic()
/linux-4.4.14/drivers/mmc/core/
Dsd_ops.c254 struct scatterlist sg; in mmc_app_send_scr() local
284 data.sg = &sg; in mmc_app_send_scr()
287 sg_init_one(&sg, data_buf, 8); in mmc_app_send_scr()
313 struct scatterlist sg; in mmc_sd_switch() local
335 data.sg = &sg; in mmc_sd_switch()
338 sg_init_one(&sg, resp, 64); in mmc_sd_switch()
358 struct scatterlist sg; in mmc_app_sd_status() local
380 data.sg = &sg; in mmc_app_sd_status()
383 sg_init_one(&sg, ssr, 64); in mmc_app_sd_status()
Dsdio_ops.c127 struct scatterlist sg, *sg_ptr; in mmc_io_rw_extended() local
165 data.sg = sgtable.sgl; in mmc_io_rw_extended()
168 for_each_sg(data.sg, sg_ptr, data.sg_len, i) { in mmc_io_rw_extended()
175 data.sg = &sg; in mmc_io_rw_extended()
178 sg_init_one(&sg, buf, left_size); in mmc_io_rw_extended()
Dmmc_ops.c297 struct scatterlist sg; in mmc_send_cxd_data() local
315 data.sg = &sg; in mmc_send_cxd_data()
318 sg_init_one(&sg, buf, len); in mmc_send_cxd_data()
596 struct scatterlist sg; in mmc_send_tuning() local
632 data.sg = &sg; in mmc_send_tuning()
634 sg_init_one(&sg, data_buf, size); in mmc_send_tuning()
667 struct scatterlist sg; in mmc_send_bus_test() local
714 data.sg = &sg; in mmc_send_bus_test()
717 sg_init_one(&sg, data_buf, len); in mmc_send_bus_test()
/linux-4.4.14/drivers/target/iscsi/
Discsi_target_auth.c190 struct scatterlist sg; in chap_server_compute_md5() local
263 sg_init_one(&sg, &chap->id, 1); in chap_server_compute_md5()
264 ret = crypto_hash_update(&desc, &sg, 1); in chap_server_compute_md5()
271 sg_init_one(&sg, &auth->password, strlen(auth->password)); in chap_server_compute_md5()
272 ret = crypto_hash_update(&desc, &sg, strlen(auth->password)); in chap_server_compute_md5()
279 sg_init_one(&sg, chap->challenge, CHAP_CHALLENGE_LENGTH); in chap_server_compute_md5()
280 ret = crypto_hash_update(&desc, &sg, CHAP_CHALLENGE_LENGTH); in chap_server_compute_md5()
392 sg_init_one(&sg, &id_as_uchar, 1); in chap_server_compute_md5()
393 ret = crypto_hash_update(&desc, &sg, 1); in chap_server_compute_md5()
400 sg_init_one(&sg, auth->password_mutual, in chap_server_compute_md5()
[all …]
/linux-4.4.14/drivers/spi/
Dspi-topcliff-pch.c931 struct scatterlist *sg; in pch_spi_handle_dma() local
1018 sg = dma->sg_rx_p; in pch_spi_handle_dma()
1019 for (i = 0; i < num; i++, sg++) { in pch_spi_handle_dma()
1021 sg->offset = size * i; in pch_spi_handle_dma()
1022 sg->offset = sg->offset * (*bpw / 8); in pch_spi_handle_dma()
1023 sg_set_page(sg, virt_to_page(dma->rx_buf_virt), rem, in pch_spi_handle_dma()
1024 sg->offset); in pch_spi_handle_dma()
1025 sg_dma_len(sg) = rem; in pch_spi_handle_dma()
1027 sg->offset = size * (i - 1) + rem; in pch_spi_handle_dma()
1028 sg->offset = sg->offset * (*bpw / 8); in pch_spi_handle_dma()
[all …]
/linux-4.4.14/drivers/dma/ipu/
Dipu_idmac.c777 struct idmac_tx_desc *desc, struct scatterlist *sg, int buf_idx) in ipu_submit_buffer() argument
791 ipu_update_channel_buffer(ichan, buf_idx, sg_dma_address(sg)); in ipu_submit_buffer()
795 sg, chan_id, buf_idx); in ipu_submit_buffer()
804 struct scatterlist *sg; in ipu_submit_channel_buffers() local
807 for (i = 0, sg = desc->sg; i < 2 && sg; i++) { in ipu_submit_channel_buffers()
808 if (!ichan->sg[i]) { in ipu_submit_channel_buffers()
809 ichan->sg[i] = sg; in ipu_submit_channel_buffers()
811 ret = ipu_submit_buffer(ichan, desc, sg, i); in ipu_submit_channel_buffers()
815 sg = sg_next(sg); in ipu_submit_channel_buffers()
850 dma_addr_t dma_1 = sg_is_last(desc->sg) ? 0 : in idmac_tx_submit()
[all …]
/linux-4.4.14/include/linux/platform_data/
Ddma-ste-dma40.h184 struct scatterlist sg; in stedma40_slave_mem() local
185 sg_init_table(&sg, 1); in stedma40_slave_mem()
186 sg.dma_address = addr; in stedma40_slave_mem()
187 sg.length = size; in stedma40_slave_mem()
189 return dmaengine_prep_slave_sg(chan, &sg, 1, direction, flags); in stedma40_slave_mem()
/linux-4.4.14/drivers/infiniband/ulp/iser/
Diser_memory.c153 struct scatterlist *sg, *sgl = data->sg; in iser_sg_to_page_vec() local
164 for_each_sg(sgl, sg, data->dma_nents, i) { in iser_sg_to_page_vec()
165 start_addr = ib_sg_dma_address(ibdev, sg); in iser_sg_to_page_vec()
168 dma_len = ib_sg_dma_len(ibdev, sg); in iser_sg_to_page_vec()
198 struct scatterlist *sg; in iser_data_buf_dump() local
201 for_each_sg(data->sg, sg, data->dma_nents, i) in iser_data_buf_dump()
204 i, (unsigned long)ib_sg_dma_address(ibdev, sg), in iser_data_buf_dump()
205 sg_page(sg), sg->offset, in iser_data_buf_dump()
206 sg->length, ib_sg_dma_len(ibdev, sg)); in iser_data_buf_dump()
229 data->dma_nents = ib_dma_map_sg(dev, data->sg, data->size, dma_dir); in iser_dma_map_task_data()
[all …]
/linux-4.4.14/drivers/virtio/
Dvirtio_ring.c138 struct scatterlist *sg; in virtqueue_add() local
213 for (sg = sgs[n]; sg; sg = sg_next(sg)) { in virtqueue_add()
215 desc[i].addr = cpu_to_virtio64(_vq->vdev, sg_phys(sg)); in virtqueue_add()
216 desc[i].len = cpu_to_virtio32(_vq->vdev, sg->length); in virtqueue_add()
222 for (sg = sgs[n]; sg; sg = sg_next(sg)) { in virtqueue_add()
224 desc[i].addr = cpu_to_virtio64(_vq->vdev, sg_phys(sg)); in virtqueue_add()
225 desc[i].len = cpu_to_virtio32(_vq->vdev, sg->length); in virtqueue_add()
290 struct scatterlist *sg; in virtqueue_add_sgs() local
291 for (sg = sgs[i]; sg; sg = sg_next(sg)) in virtqueue_add_sgs()
312 struct scatterlist *sg, unsigned int num, in virtqueue_add_outbuf() argument
[all …]
Dvirtio_balloon.c115 struct scatterlist sg; in tell_host() local
118 sg_init_one(&sg, vb->pfns, sizeof(vb->pfns[0]) * vb->num_pfns); in tell_host()
121 virtqueue_add_outbuf(vq, &sg, 1, vb, GFP_KERNEL); in tell_host()
267 struct scatterlist sg; in stats_handle_request() local
276 sg_init_one(&sg, vb->stats, sizeof(vb->stats)); in stats_handle_request()
277 virtqueue_add_outbuf(vq, &sg, 1, vb, GFP_KERNEL); in stats_handle_request()
406 struct scatterlist sg; in init_vqs() local
413 sg_init_one(&sg, vb->stats, sizeof vb->stats); in init_vqs()
414 if (virtqueue_add_outbuf(vb->stats_vq, &sg, 1, vb, GFP_KERNEL) in init_vqs()
/linux-4.4.14/drivers/media/pci/cx25821/
Dcx25821-core.c1011 struct scatterlist *sg; in cx25821_risc_field() local
1025 sg = sglist; in cx25821_risc_field()
1027 while (offset && offset >= sg_dma_len(sg)) { in cx25821_risc_field()
1028 offset -= sg_dma_len(sg); in cx25821_risc_field()
1029 sg = sg_next(sg); in cx25821_risc_field()
1031 if (bpl <= sg_dma_len(sg) - offset) { in cx25821_risc_field()
1035 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in cx25821_risc_field()
1042 (sg_dma_len(sg) - offset)); in cx25821_risc_field()
1043 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in cx25821_risc_field()
1045 todo -= (sg_dma_len(sg) - offset); in cx25821_risc_field()
[all …]
/linux-4.4.14/drivers/infiniband/hw/mlx5/
Dmem.c57 struct scatterlist *sg; in mlx5_ib_cont_pages() local
78 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { in mlx5_ib_cont_pages()
79 len = sg_dma_len(sg) >> page_shift; in mlx5_ib_cont_pages()
80 pfn = sg_dma_address(sg) >> page_shift; in mlx5_ib_cont_pages()
161 struct scatterlist *sg; in __mlx5_ib_populate_pas() local
180 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { in __mlx5_ib_populate_pas()
181 len = sg_dma_len(sg) >> umem_page_shift; in __mlx5_ib_populate_pas()
182 base = sg_dma_address(sg); in __mlx5_ib_populate_pas()
/linux-4.4.14/drivers/infiniband/core/
Dumem.c49 struct scatterlist *sg; in __ib_umem_release() local
58 for_each_sg(umem->sg_head.sgl, sg, umem->npages, i) { in __ib_umem_release()
60 page = sg_page(sg); in __ib_umem_release()
96 struct scatterlist *sg, *sg_list_start; in ib_umem_get() local
203 for_each_sg(sg_list_start, sg, ret, i) { in ib_umem_get()
207 sg_set_page(sg, page_list[i], PAGE_SIZE, 0); in ib_umem_get()
211 sg_list_start = sg; in ib_umem_get()
318 struct scatterlist *sg; in ib_umem_page_count() local
326 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, i) in ib_umem_page_count()
327 n += sg_dma_len(sg) >> shift; in ib_umem_page_count()
/linux-4.4.14/arch/sh/kernel/
Ddma-nommu.c26 static int nommu_map_sg(struct device *dev, struct scatterlist *sg, in nommu_map_sg() argument
33 WARN_ON(nents == 0 || sg[0].length == 0); in nommu_map_sg()
35 for_each_sg(sg, s, nents, i) { in nommu_map_sg()
54 static void nommu_sync_sg(struct device *dev, struct scatterlist *sg, in nommu_sync_sg() argument
60 for_each_sg(sg, s, nelems, i) in nommu_sync_sg()
/linux-4.4.14/drivers/net/ethernet/micrel/
Dks8842.c146 struct scatterlist sg; member
154 struct scatterlist sg; member
443 sg_dma_len(&ctl->sg) = skb->len + sizeof(u32); in ks8842_tx_frame_dma()
454 sg_dma_address(&ctl->sg), 0, sg_dma_len(&ctl->sg), in ks8842_tx_frame_dma()
458 if (sg_dma_len(&ctl->sg) % 4) in ks8842_tx_frame_dma()
459 sg_dma_len(&ctl->sg) += 4 - sg_dma_len(&ctl->sg) % 4; in ks8842_tx_frame_dma()
462 &ctl->sg, 1, DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT); in ks8842_tx_frame_dma()
556 struct scatterlist *sg = &ctl->sg; in __ks8842_start_new_rx_dma() local
561 sg_init_table(sg, 1); in __ks8842_start_new_rx_dma()
562 sg_dma_address(sg) = dma_map_single(adapter->dev, in __ks8842_start_new_rx_dma()
[all …]
/linux-4.4.14/drivers/gpu/drm/i915/
Di915_gem_dmabuf.c93 struct sg_table *sg, in i915_gem_unmap_dma_buf() argument
100 dma_unmap_sg(attachment->dev, sg->sgl, sg->nents, dir); in i915_gem_unmap_dma_buf()
101 sg_free_table(sg); in i915_gem_unmap_dma_buf()
102 kfree(sg); in i915_gem_unmap_dma_buf()
252 struct sg_table *sg; in i915_gem_object_get_pages_dmabuf() local
254 sg = dma_buf_map_attachment(obj->base.import_attach, DMA_BIDIRECTIONAL); in i915_gem_object_get_pages_dmabuf()
255 if (IS_ERR(sg)) in i915_gem_object_get_pages_dmabuf()
256 return PTR_ERR(sg); in i915_gem_object_get_pages_dmabuf()
258 obj->pages = sg; in i915_gem_object_get_pages_dmabuf()
/linux-4.4.14/drivers/scsi/libfc/
Dfc_libfc.c112 struct scatterlist *sg, in fc_copy_buffer_to_sglist() argument
119 while (remaining > 0 && sg) { in fc_copy_buffer_to_sglist()
123 if (*offset >= sg->length) { in fc_copy_buffer_to_sglist()
131 *offset -= sg->length; in fc_copy_buffer_to_sglist()
132 sg = sg_next(sg); in fc_copy_buffer_to_sglist()
135 sg_bytes = min(remaining, sg->length - *offset); in fc_copy_buffer_to_sglist()
141 off = *offset + sg->offset; in fc_copy_buffer_to_sglist()
144 page_addr = kmap_atomic(sg_page(sg) + (off >> PAGE_SHIFT)); in fc_copy_buffer_to_sglist()
/linux-4.4.14/arch/xtensa/kernel/
Dpci-dma.c113 struct scatterlist *sg, int nents, in xtensa_sync_sg_for_cpu() argument
119 for_each_sg(sg, s, nents, i) { in xtensa_sync_sg_for_cpu()
126 struct scatterlist *sg, int nents, in xtensa_sync_sg_for_device() argument
132 for_each_sg(sg, s, nents, i) { in xtensa_sync_sg_for_device()
203 static int xtensa_map_sg(struct device *dev, struct scatterlist *sg, in xtensa_map_sg() argument
210 for_each_sg(sg, s, nents, i) { in xtensa_map_sg()
218 struct scatterlist *sg, int nents, in xtensa_unmap_sg() argument
225 for_each_sg(sg, s, nents, i) { in xtensa_unmap_sg()
/linux-4.4.14/drivers/gpu/drm/
Ddrm_scatter.c70 if (drm_core_check_feature(dev, DRIVER_SG) && dev->sg && in drm_legacy_sg_cleanup()
72 drm_sg_cleanup(dev->sg); in drm_legacy_sg_cleanup()
73 dev->sg = NULL; in drm_legacy_sg_cleanup()
97 if (dev->sg) in drm_legacy_sg_alloc()
149 dev->sg = entry; in drm_legacy_sg_alloc()
210 entry = dev->sg; in drm_legacy_sg_free()
211 dev->sg = NULL; in drm_legacy_sg_free()
Ddrm_prime.c680 struct sg_table *sg = NULL; in drm_prime_pages_to_sg() local
683 sg = kmalloc(sizeof(struct sg_table), GFP_KERNEL); in drm_prime_pages_to_sg()
684 if (!sg) { in drm_prime_pages_to_sg()
689 ret = sg_alloc_table_from_pages(sg, pages, nr_pages, 0, in drm_prime_pages_to_sg()
694 return sg; in drm_prime_pages_to_sg()
696 kfree(sg); in drm_prime_pages_to_sg()
715 struct scatterlist *sg; in drm_prime_sg_to_page_addr_arrays() local
722 for_each_sg(sgt->sgl, sg, sgt->nents, count) { in drm_prime_sg_to_page_addr_arrays()
723 len = sg->length; in drm_prime_sg_to_page_addr_arrays()
724 page = sg_page(sg); in drm_prime_sg_to_page_addr_arrays()
[all …]
/linux-4.4.14/arch/powerpc/platforms/powernv/
Dopal.c807 struct opal_sg_list *sg, *first = NULL; in opal_vmalloc_to_sg_list() local
810 sg = kzalloc(PAGE_SIZE, GFP_KERNEL); in opal_vmalloc_to_sg_list()
811 if (!sg) in opal_vmalloc_to_sg_list()
814 first = sg; in opal_vmalloc_to_sg_list()
820 sg->entry[i].data = cpu_to_be64(data); in opal_vmalloc_to_sg_list()
821 sg->entry[i].length = cpu_to_be64(length); in opal_vmalloc_to_sg_list()
831 sg->length = cpu_to_be64( in opal_vmalloc_to_sg_list()
834 sg->next = cpu_to_be64(__pa(next)); in opal_vmalloc_to_sg_list()
835 sg = next; in opal_vmalloc_to_sg_list()
842 sg->length = cpu_to_be64(i * sizeof(struct opal_sg_entry) + 16); in opal_vmalloc_to_sg_list()
[all …]
/linux-4.4.14/drivers/dma/sh/
Dusb-dmac.c63 struct usb_dmac_sg sg[0]; member
200 struct usb_dmac_sg *sg = desc->sg + index; in usb_dmac_chan_start_sg() local
206 dst_addr = sg->mem_addr; in usb_dmac_chan_start_sg()
208 src_addr = sg->mem_addr; in usb_dmac_chan_start_sg()
212 chan->index, sg, sg->size, &src_addr, &dst_addr); in usb_dmac_chan_start_sg()
217 DIV_ROUND_UP(sg->size, USB_DMAC_XFER_SIZE)); in usb_dmac_chan_start_sg()
218 usb_dmac_chan_write(chan, USB_DMATEND, usb_dmac_calc_tend(sg->size)); in usb_dmac_chan_start_sg()
272 desc = kzalloc(sizeof(*desc) + sg_len * sizeof(desc->sg[0]), gfp); in usb_dmac_desc_alloc()
425 struct scatterlist *sg; in usb_dmac_prep_slave_sg() local
440 for_each_sg(sgl, sg, sg_len, i) { in usb_dmac_prep_slave_sg()
[all …]
/linux-4.4.14/drivers/staging/rtl8192u/ieee80211/
Dieee80211_crypt_wep.c97 struct scatterlist sg; in prism2_wep_encrypt() local
141 sg_init_one(&sg, pos, len+4); in prism2_wep_encrypt()
143 return crypto_blkcipher_encrypt(&desc, &sg, &sg, len + 4); in prism2_wep_encrypt()
167 struct scatterlist sg; in prism2_wep_decrypt() local
190 sg_init_one(&sg, pos, plen+4); in prism2_wep_decrypt()
192 if (crypto_blkcipher_decrypt(&desc, &sg, &sg, plen + 4)) in prism2_wep_decrypt()
Dieee80211_crypt_tkip.c319 struct scatterlist sg; in ieee80211_tkip_encrypt() local
367 sg_init_one(&sg, pos, len+4); in ieee80211_tkip_encrypt()
368 ret = crypto_blkcipher_encrypt(&desc, &sg, &sg, len + 4); in ieee80211_tkip_encrypt()
397 struct scatterlist sg; in ieee80211_tkip_decrypt() local
453 sg_init_one(&sg, pos, plen+4); in ieee80211_tkip_decrypt()
455 if (crypto_blkcipher_decrypt(&desc, &sg, &sg, plen + 4)) { in ieee80211_tkip_decrypt()
508 struct scatterlist sg[2]; in michael_mic() local
515 sg_init_table(sg, 2); in michael_mic()
516 sg_set_buf(&sg[0], hdr, 16); in michael_mic()
517 sg_set_buf(&sg[1], data, data_len); in michael_mic()
[all …]
/linux-4.4.14/drivers/memstick/core/
Dms_block.c98 static int msb_sg_compare_to_buffer(struct scatterlist *sg, in msb_sg_compare_to_buffer() argument
104 sg_miter_start(&miter, sg, sg_nents(sg), in msb_sg_compare_to_buffer()
344 struct scatterlist sg[2]; in h_msb_read_page() local
434 sg_init_table(sg, ARRAY_SIZE(sg)); in h_msb_read_page()
435 msb_sg_copy(msb->current_sg, sg, ARRAY_SIZE(sg), in h_msb_read_page()
439 memstick_init_req_sg(mrq, MS_TPC_READ_LONG_DATA, sg); in h_msb_read_page()
480 struct scatterlist sg[2]; in h_msb_write_block() local
559 sg_init_table(sg, ARRAY_SIZE(sg)); in h_msb_write_block()
561 if (msb_sg_copy(msb->current_sg, sg, ARRAY_SIZE(sg), in h_msb_write_block()
566 memstick_init_req_sg(mrq, MS_TPC_WRITE_LONG_DATA, sg); in h_msb_write_block()
[all …]
/linux-4.4.14/arch/arm64/mm/
Ddma-mapping.c230 struct scatterlist *sg; in __swiotlb_map_sg_attrs() local
235 for_each_sg(sgl, sg, ret, i) in __swiotlb_map_sg_attrs()
236 __dma_map_area(phys_to_virt(dma_to_phys(dev, sg->dma_address)), in __swiotlb_map_sg_attrs()
237 sg->length, dir); in __swiotlb_map_sg_attrs()
247 struct scatterlist *sg; in __swiotlb_unmap_sg_attrs() local
251 for_each_sg(sgl, sg, nelems, i) in __swiotlb_unmap_sg_attrs()
252 __dma_unmap_area(phys_to_virt(dma_to_phys(dev, sg->dma_address)), in __swiotlb_unmap_sg_attrs()
253 sg->length, dir); in __swiotlb_unmap_sg_attrs()
279 struct scatterlist *sg; in __swiotlb_sync_sg_for_cpu() local
283 for_each_sg(sgl, sg, nelems, i) in __swiotlb_sync_sg_for_cpu()
[all …]
/linux-4.4.14/arch/h8300/kernel/
Ddma.c53 struct scatterlist *sg; in map_sg() local
56 for_each_sg(sgl, sg, nents, i) { in map_sg()
57 sg->dma_address = sg_phys(sg); in map_sg()
/linux-4.4.14/drivers/staging/rtl8192e/
Drtllib_crypt_wep.c105 struct scatterlist sg; in prism2_wep_encrypt() local
152 sg_init_one(&sg, pos, len+4); in prism2_wep_encrypt()
154 return crypto_blkcipher_encrypt(&desc, &sg, &sg, len + 4); in prism2_wep_encrypt()
179 struct scatterlist sg; in prism2_wep_decrypt() local
201 sg_init_one(&sg, pos, plen+4); in prism2_wep_decrypt()
203 if (crypto_blkcipher_decrypt(&desc, &sg, &sg, plen + 4)) in prism2_wep_decrypt()
Drtllib_crypt_tkip.c308 struct scatterlist sg; in rtllib_tkip_encrypt() local
357 sg_init_one(&sg, pos, len+4); in rtllib_tkip_encrypt()
361 ret = crypto_blkcipher_encrypt(&desc, &sg, &sg, len + 4); in rtllib_tkip_encrypt()
391 struct scatterlist sg; in rtllib_tkip_decrypt() local
451 sg_init_one(&sg, pos, plen+4); in rtllib_tkip_decrypt()
454 if (crypto_blkcipher_decrypt(&desc, &sg, &sg, plen + 4)) { in rtllib_tkip_decrypt()
507 struct scatterlist sg[2]; in michael_mic() local
513 sg_init_table(sg, 2); in michael_mic()
514 sg_set_buf(&sg[0], hdr, 16); in michael_mic()
515 sg_set_buf(&sg[1], data, data_len); in michael_mic()
[all …]
/linux-4.4.14/drivers/media/pci/ivtv/
Divtv-udma.c77 struct scatterlist *sg; in ivtv_udma_fill_sg_array() local
79 for (i = 0, sg = dma->SGlist; i < dma->SG_length; i++, sg = sg_next(sg)) { in ivtv_udma_fill_sg_array()
80 dma->SGarray[i].size = cpu_to_le32(sg_dma_len(sg)); in ivtv_udma_fill_sg_array()
81 dma->SGarray[i].src = cpu_to_le32(sg_dma_address(sg)); in ivtv_udma_fill_sg_array()
83 buffer_offset += sg_dma_len(sg); in ivtv_udma_fill_sg_array()
85 split -= sg_dma_len(sg); in ivtv_udma_fill_sg_array()
/linux-4.4.14/net/wireless/
Dlib80211_crypt_wep.c139 struct scatterlist sg; in lib80211_wep_encrypt() local
169 sg_init_one(&sg, pos, len + 4); in lib80211_wep_encrypt()
170 return crypto_blkcipher_encrypt(&desc, &sg, &sg, len + 4); in lib80211_wep_encrypt()
187 struct scatterlist sg; in lib80211_wep_decrypt() local
209 sg_init_one(&sg, pos, plen + 4); in lib80211_wep_decrypt()
210 if (crypto_blkcipher_decrypt(&desc, &sg, &sg, plen + 4)) in lib80211_wep_decrypt()
Dlib80211_crypt_tkip.c360 struct scatterlist sg; in lib80211_tkip_encrypt() local
386 sg_init_one(&sg, pos, len + 4); in lib80211_tkip_encrypt()
387 return crypto_blkcipher_encrypt(&desc, &sg, &sg, len + 4); in lib80211_tkip_encrypt()
414 struct scatterlist sg; in lib80211_tkip_decrypt() local
469 sg_init_one(&sg, pos, plen + 4); in lib80211_tkip_decrypt()
470 if (crypto_blkcipher_decrypt(&desc, &sg, &sg, plen + 4)) { in lib80211_tkip_decrypt()
512 struct scatterlist sg[2]; in michael_mic() local
518 sg_init_table(sg, 2); in michael_mic()
519 sg_set_buf(&sg[0], hdr, 16); in michael_mic()
520 sg_set_buf(&sg[1], data, data_len); in michael_mic()
[all …]
/linux-4.4.14/drivers/dma/hsu/
Dhsu.c82 hsu_chan_writel(hsuc, HSU_CH_DxSAR(i), desc->sg[i].addr); in hsu_dma_chan_start()
83 hsu_chan_writel(hsuc, HSU_CH_DxTSR(i), desc->sg[i].len); in hsu_dma_chan_start()
197 desc->sg = kcalloc(nents, sizeof(*desc->sg), GFP_NOWAIT); in hsu_dma_alloc_desc()
198 if (!desc->sg) { in hsu_dma_alloc_desc()
210 kfree(desc->sg); in hsu_dma_desc_free()
221 struct scatterlist *sg; in hsu_dma_prep_slave_sg() local
228 for_each_sg(sgl, sg, sg_len, i) { in hsu_dma_prep_slave_sg()
229 desc->sg[i].addr = sg_dma_address(sg); in hsu_dma_prep_slave_sg()
230 desc->sg[i].len = sg_dma_len(sg); in hsu_dma_prep_slave_sg()
258 bytes += desc->sg[i].len; in hsu_dma_desc_size()
/linux-4.4.14/net/sunrpc/xprtrdma/
Dfrwr_ops.c155 f->sg = kcalloc(depth, sizeof(*f->sg), GFP_KERNEL); in __frwr_init()
156 if (!f->sg) in __frwr_init()
159 sg_init_table(f->sg, depth); in __frwr_init()
186 kfree(r->r.frmr.sg); in __frwr_release()
345 sg_set_page(&frmr->sg[i], in frwr_op_map()
350 sg_set_buf(&frmr->sg[i], seg->mr_offset, in frwr_op_map()
363 dma_nents = ib_dma_map_sg(device, frmr->sg, frmr->sg_nents, direction); in frwr_op_map()
366 __func__, frmr->sg, frmr->sg_nents); in frwr_op_map()
370 n = ib_map_mr_sg(mr, frmr->sg, frmr->sg_nents, PAGE_SIZE); in frwr_op_map()
411 ib_dma_unmap_sg(device, frmr->sg, dma_nents, direction); in frwr_op_map()
[all …]
/linux-4.4.14/drivers/rapidio/devices/
Dtsi721_dma.c338 struct scatterlist *sg, u32 sys_size) in tsi721_desc_fill_init() argument
355 (u64)sg_dma_address(sg) & 0xffffffff); in tsi721_desc_fill_init()
356 bd_ptr->t1.bufptr_hi = cpu_to_le32((u64)sg_dma_address(sg) >> 32); in tsi721_desc_fill_init()
422 struct scatterlist *sg; in tsi721_submit_sg() local
457 for_each_sg(desc->sg, sg, desc->sg_len, i) { in tsi721_submit_sg()
461 (unsigned long long)sg_dma_address(sg), sg_dma_len(sg)); in tsi721_submit_sg()
463 if (sg_dma_len(sg) > TSI721_BDMA_MAX_BCOUNT) { in tsi721_submit_sg()
474 if (next_addr == sg_dma_address(sg) && in tsi721_submit_sg()
475 bcount + sg_dma_len(sg) <= TSI721_BDMA_MAX_BCOUNT) { in tsi721_submit_sg()
477 bcount += sg_dma_len(sg); in tsi721_submit_sg()
[all …]
/linux-4.4.14/drivers/usb/host/whci/
Dqset.c266 struct scatterlist *sg; in qset_copy_bounce_to_sg() local
273 sg = std->bounce_sg; in qset_copy_bounce_to_sg()
279 len = min(sg->length - offset, remaining); in qset_copy_bounce_to_sg()
280 memcpy(sg_virt(sg) + offset, bounce, len); in qset_copy_bounce_to_sg()
286 if (offset >= sg->length) { in qset_copy_bounce_to_sg()
287 sg = sg_next(sg); in qset_copy_bounce_to_sg()
439 struct scatterlist *sg; in qset_add_urb_sg() local
450 for_each_sg(urb->sg, sg, urb->num_mapped_sgs, i) { in qset_add_urb_sg()
460 dma_addr = sg_dma_address(sg); in qset_add_urb_sg()
461 dma_remaining = min_t(size_t, sg_dma_len(sg), remaining); in qset_add_urb_sg()
[all …]
/linux-4.4.14/arch/x86/kernel/
Dpci-nommu.c56 static int nommu_map_sg(struct device *hwdev, struct scatterlist *sg, in nommu_map_sg() argument
63 WARN_ON(nents == 0 || sg[0].length == 0); in nommu_map_sg()
65 for_each_sg(sg, s, nents, i) { in nommu_map_sg()
85 struct scatterlist *sg, int nelems, in nommu_sync_sg_for_device() argument
Damd_gart_64.c288 static void gart_unmap_sg(struct device *dev, struct scatterlist *sg, int nents, in gart_unmap_sg() argument
294 for_each_sg(sg, s, nents, i) { in gart_unmap_sg()
302 static int dma_map_sg_nonforce(struct device *dev, struct scatterlist *sg, in dma_map_sg_nonforce() argument
312 for_each_sg(sg, s, nents, i) { in dma_map_sg_nonforce()
319 gart_unmap_sg(dev, sg, i, dir, NULL); in dma_map_sg_nonforce()
321 sg[0].dma_length = 0; in dma_map_sg_nonforce()
389 static int gart_map_sg(struct device *dev, struct scatterlist *sg, int nents, in gart_map_sg() argument
406 start_sg = sg; in gart_map_sg()
407 sgmap = sg; in gart_map_sg()
412 for_each_sg(sg, s, nents, i) { in gart_map_sg()
[all …]
/linux-4.4.14/drivers/infiniband/hw/mthca/
Dmthca_wqe.h117 struct ib_sge *sg) in mthca_set_data_seg() argument
119 dseg->byte_count = cpu_to_be32(sg->length); in mthca_set_data_seg()
120 dseg->lkey = cpu_to_be32(sg->lkey); in mthca_set_data_seg()
121 dseg->addr = cpu_to_be64(sg->addr); in mthca_set_data_seg()
/linux-4.4.14/arch/frv/mb93090-mb00/
Dpci-dma-nommu.c126 struct scatterlist *sg; in dma_map_sg() local
128 for_each_sg(sglist, sg, nents, i) { in dma_map_sg()
129 frv_cache_wback_inv(sg_dma_address(sg), in dma_map_sg()
130 sg_dma_address(sg) + sg_dma_len(sg)); in dma_map_sg()
Dpci-dma.c59 struct scatterlist *sg; in dma_map_sg() local
65 for_each_sg(sglist, sg, nents, i) { in dma_map_sg()
66 vaddr = kmap_atomic_primary(sg_page(sg)); in dma_map_sg()
/linux-4.4.14/drivers/crypto/marvell/
Dtdma.c22 if (!sgiter->sg) in mv_cesa_req_dma_iter_next_transfer()
27 if (sgiter->offset == sg_dma_len(sgiter->sg)) { in mv_cesa_req_dma_iter_next_transfer()
28 if (sg_is_last(sgiter->sg)) in mv_cesa_req_dma_iter_next_transfer()
31 sgiter->sg = sg_next(sgiter->sg); in mv_cesa_req_dma_iter_next_transfer()
212 src = sg_dma_address(sgiter->sg) + sgiter->offset; in mv_cesa_dma_add_op_transfers()
214 dst = sg_dma_address(sgiter->sg) + sgiter->offset; in mv_cesa_dma_add_op_transfers()
/linux-4.4.14/drivers/net/
Dvirtio_net.c71 struct scatterlist sg[MAX_SKB_FRAGS + 2]; member
94 struct scatterlist sg[MAX_SKB_FRAGS + 2]; member
549 sg_init_table(rq->sg, 2); in add_recvbuf_small()
550 sg_set_buf(rq->sg, hdr, vi->hdr_len); in add_recvbuf_small()
551 skb_to_sgvec(skb, rq->sg + 1, 0, skb->len); in add_recvbuf_small()
553 err = virtqueue_add_inbuf(rq->vq, rq->sg, 2, skb, gfp); in add_recvbuf_small()
567 sg_init_table(rq->sg, MAX_SKB_FRAGS + 2); in add_recvbuf_big()
577 sg_set_buf(&rq->sg[i], page_address(first), PAGE_SIZE); in add_recvbuf_big()
593 sg_set_buf(&rq->sg[0], p, vi->hdr_len); in add_recvbuf_big()
597 sg_set_buf(&rq->sg[1], p + offset, PAGE_SIZE - offset); in add_recvbuf_big()
[all …]
/linux-4.4.14/drivers/usb/wusbcore/
Dcrypto.c209 struct scatterlist sg[4], sg_dst; in wusb_ccm_mac() local
254 sg_init_table(sg, ARRAY_SIZE(sg)); in wusb_ccm_mac()
255 sg_set_buf(&sg[0], &b0, sizeof(b0)); in wusb_ccm_mac()
256 sg_set_buf(&sg[1], &b1, sizeof(b1)); in wusb_ccm_mac()
257 sg_set_buf(&sg[2], b, blen); in wusb_ccm_mac()
259 sg_set_buf(&sg[3], bzero, zero_padding); in wusb_ccm_mac()
264 result = crypto_blkcipher_encrypt(&desc, &sg_dst, sg, dst_size); in wusb_ccm_mac()
/linux-4.4.14/drivers/iommu/
Ddma-iommu.c394 static int __finalise_sg(struct device *dev, struct scatterlist *sg, int nents, in __finalise_sg() argument
400 for_each_sg(sg, s, nents, i) { in __finalise_sg()
418 static void __invalidate_sg(struct scatterlist *sg, int nents) in __invalidate_sg() argument
423 for_each_sg(sg, s, nents, i) { in __invalidate_sg()
440 int iommu_dma_map_sg(struct device *dev, struct scatterlist *sg, in iommu_dma_map_sg() argument
457 for_each_sg(sg, s, nents, i) { in iommu_dma_map_sg()
494 if (iommu_map_sg(domain, dma_addr, sg, nents, prot) < iova_len) in iommu_dma_map_sg()
497 return __finalise_sg(dev, sg, nents, dma_addr); in iommu_dma_map_sg()
502 __invalidate_sg(sg, nents); in iommu_dma_map_sg()
506 void iommu_dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nents, in iommu_dma_unmap_sg() argument
[all …]
/linux-4.4.14/drivers/staging/rts5208/
Drtsx_transport.c69 struct scatterlist *sg = in rtsx_stor_access_xfer_buf() local
79 struct page *page = sg_page(sg) + in rtsx_stor_access_xfer_buf()
80 ((sg->offset + *offset) >> PAGE_SHIFT); in rtsx_stor_access_xfer_buf()
82 (sg->offset + *offset) & (PAGE_SIZE-1); in rtsx_stor_access_xfer_buf()
83 unsigned int sglen = sg->length - *offset; in rtsx_stor_access_xfer_buf()
95 ++sg; in rtsx_stor_access_xfer_buf()
324 struct scatterlist *sg, int num_sg, unsigned int *index, in rtsx_transfer_sglist_adma_partial() argument
337 if ((sg == NULL) || (num_sg <= 0) || !offset || !index) in rtsx_transfer_sglist_adma_partial()
366 sg_cnt = dma_map_sg(&(rtsx->pci->dev), sg, num_sg, dma_dir); in rtsx_transfer_sglist_adma_partial()
369 sg_ptr = sg; in rtsx_transfer_sglist_adma_partial()
[all …]
/linux-4.4.14/drivers/crypto/nx/
Dnx.c97 struct nx_sg *sg; in nx_build_sg_list() local
121 for (sg = sg_head; sg_len < *len; sg++) { in nx_build_sg_list()
124 sg->addr = sg_addr; in nx_build_sg_list()
128 next_page = (sg->addr & PAGE_MASK) + PAGE_SIZE; in nx_build_sg_list()
129 sg->len = min_t(u64, sg_addr, next_page) - sg->addr; in nx_build_sg_list()
130 sg_len += sg->len; in nx_build_sg_list()
139 if ((sg - sg_head) == sgmax) { in nx_build_sg_list()
142 sg++; in nx_build_sg_list()
149 return sg; in nx_build_sg_list()
192 scatterwalk_start(&walk, sg_next(walk.sg)); in nx_walk_and_build()
[all …]
/linux-4.4.14/arch/arm/mach-ks8695/
DMakefile15 obj-$(CONFIG_MACH_LITE300) += board-sg.o
16 obj-$(CONFIG_MACH_SG310) += board-sg.o
17 obj-$(CONFIG_MACH_SE4200) += board-sg.o
/linux-4.4.14/arch/sparc/include/asm/
Ddma.h112 #define mmu_get_scsi_sgl(dev,sg,sz) \ argument
113 sparc32_dma_ops->get_scsi_sgl(dev, sg, sz)
116 #define mmu_release_scsi_sgl(dev,sg,sz) \ argument
117 sparc32_dma_ops->release_scsi_sgl(dev, sg, sz)
/linux-4.4.14/drivers/i2c/busses/
Di2c-at91.c119 struct scatterlist sg[2]; member
225 dma_unmap_single(dev->dev, sg_dma_address(&dma->sg[0]), in at91_twi_dma_cleanup()
255 dma_unmap_single(dev->dev, sg_dma_address(&dev->dma.sg[0]), in at91_twi_write_data_dma_callback()
295 struct scatterlist *sg; in at91_twi_write_data_dma() local
302 sg = &dma->sg[sg_len++]; in at91_twi_write_data_dma()
303 sg_dma_len(sg) = part1_len; in at91_twi_write_data_dma()
304 sg_dma_address(sg) = dma_addr; in at91_twi_write_data_dma()
309 sg = &dma->sg[sg_len++]; in at91_twi_write_data_dma()
310 sg_dma_len(sg) = part2_len; in at91_twi_write_data_dma()
311 sg_dma_address(sg) = dma_addr + part1_len; in at91_twi_write_data_dma()
[all …]
/linux-4.4.14/drivers/media/pci/cx88/
Dcx88-core.c81 struct scatterlist *sg; in cx88_risc_field() local
94 sg = sglist; in cx88_risc_field()
96 while (offset && offset >= sg_dma_len(sg)) { in cx88_risc_field()
97 offset -= sg_dma_len(sg); in cx88_risc_field()
98 sg = sg_next(sg); in cx88_risc_field()
104 if (bpl <= sg_dma_len(sg)-offset) { in cx88_risc_field()
107 *(rp++)=cpu_to_le32(sg_dma_address(sg)+offset); in cx88_risc_field()
113 (sg_dma_len(sg)-offset)); in cx88_risc_field()
114 *(rp++)=cpu_to_le32(sg_dma_address(sg)+offset); in cx88_risc_field()
115 todo -= (sg_dma_len(sg)-offset); in cx88_risc_field()
[all …]
/linux-4.4.14/tools/virtio/
Dvringh_test.c326 struct scatterlist sg[4]; in parallel_test() local
351 sg_init_table(sg, num_sg = 3); in parallel_test()
352 sg_set_buf(&sg[0], (void *)dbuf, 1); in parallel_test()
353 sg_set_buf(&sg[1], (void *)dbuf + 1, 2); in parallel_test()
354 sg_set_buf(&sg[2], (void *)dbuf + 3, 1); in parallel_test()
357 sg_init_table(sg, num_sg = 2); in parallel_test()
358 sg_set_buf(&sg[0], (void *)dbuf, 1); in parallel_test()
359 sg_set_buf(&sg[1], (void *)dbuf + 1, 3); in parallel_test()
362 sg_init_table(sg, num_sg = 1); in parallel_test()
363 sg_set_buf(&sg[0], (void *)dbuf, 4); in parallel_test()
[all …]
/linux-4.4.14/drivers/block/
Dcpqarray.c945 c->req.sg[i].size = tmp_sg[i].length; in do_ida_request()
946 c->req.sg[i].addr = (__u32) pci_map_page(h->pci_dev, in do_ida_request()
1031 pci_unmap_page(hba[cmd->ctlr]->pci_dev, cmd->req.sg[i].addr, in complete_command()
1032 cmd->req.sg[i].size, ddir); in complete_command()
1257 p = memdup_user(io->sg[0].addr, io->sg[0].size); in ida_ctlr_ioctl()
1266 c->req.sg[0].size = io->sg[0].size; in ida_ctlr_ioctl()
1267 c->req.sg[0].addr = pci_map_single(h->pci_dev, p, in ida_ctlr_ioctl()
1268 c->req.sg[0].size, PCI_DMA_BIDIRECTIONAL); in ida_ctlr_ioctl()
1274 p = kmalloc(io->sg[0].size, GFP_KERNEL); in ida_ctlr_ioctl()
1282 c->req.sg[0].size = io->sg[0].size; in ida_ctlr_ioctl()
[all …]
Dxen-blkfront.c80 struct scatterlist *sg; member
588 struct scatterlist *sg; in blkif_queue_rw_req() local
625 num_sg = blk_rq_map_sg(req->q, req, info->shadow[id].sg); in blkif_queue_rw_req()
628 for_each_sg(info->shadow[id].sg, sg, num_sg, i) in blkif_queue_rw_req()
629 num_grant += gnttab_count_grant(sg->offset, sg->length); in blkif_queue_rw_req()
677 for_each_sg(info->shadow[id].sg, sg, num_sg, i) { in blkif_queue_rw_req()
678 BUG_ON(sg->offset + sg->length > PAGE_SIZE); in blkif_queue_rw_req()
681 setup.bvec_off = sg->offset; in blkif_queue_rw_req()
682 setup.bvec_data = kmap_atomic(sg_page(sg)); in blkif_queue_rw_req()
685 gnttab_foreach_grant_in_range(sg_page(sg), in blkif_queue_rw_req()
[all …]
/linux-4.4.14/drivers/net/irda/
Dsa1100_ir.c50 struct scatterlist sg; member
115 return sg_dma_len(&buf->sg) - state.residue; in sa1100_irda_dma_xferred()
150 desc = dmaengine_prep_slave_sg(chan, &buf->sg, 1, dir, in sa1100_irda_dma_start()
180 sg_set_buf(&si->dma_rx.sg, si->dma_rx.skb->data, HPSIR_MAX_RXLEN); in sa1100_irda_rx_alloc()
181 if (dma_map_sg(si->dma_rx.dev, &si->dma_rx.sg, 1, DMA_FROM_DEVICE) == 0) { in sa1100_irda_rx_alloc()
230 dma_unmap_sg(si->dma_tx.dev, &si->dma_tx.sg, 1, DMA_TO_DEVICE); in sa1100_irda_sirtxdma_irq()
235 dev->stats.tx_bytes += sg_dma_len(&si->dma_tx.sg); in sa1100_irda_sirtxdma_irq()
263 sg_set_buf(&si->dma_tx.sg, si->tx_buff.data, si->tx_buff.len); in sa1100_irda_sir_tx_start()
264 if (dma_map_sg(si->dma_tx.dev, &si->dma_tx.sg, 1, DMA_TO_DEVICE) == 0) { in sa1100_irda_sir_tx_start()
378 dma_unmap_sg(si->dma_tx.dev, &si->dma_tx.sg, 1, in sa1100_irda_firtxdma_irq()
[all …]
/linux-4.4.14/lib/mpi/
Dmpicoder.c439 struct scatterlist *sg; in mpi_read_raw_from_sgl() local
448 for_each_sg(sgl, sg, ents, i) { in mpi_read_raw_from_sgl()
449 const u8 *buff = sg_virt(sg); in mpi_read_raw_from_sgl()
450 int len = sg->length; in mpi_read_raw_from_sgl()
465 sgl = sg; in mpi_read_raw_from_sgl()
501 for_each_sg(sgl, sg, ents, i) { in mpi_read_raw_from_sgl()
502 const u8 *buffer = sg_virt(sg) + lzeros; in mpi_read_raw_from_sgl()
503 int len = sg->length - lzeros; in mpi_read_raw_from_sgl()
506 if (sg_is_last(sg) && (len % BYTES_PER_MPI_LIMB)) in mpi_read_raw_from_sgl()
/linux-4.4.14/arch/frv/include/asm/
Ddma-mapping.h34 extern int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
38 void dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, in dma_unmap_sg() argument
85 void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems, in dma_sync_sg_for_cpu() argument
91 void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, int nelems, in dma_sync_sg_for_device() argument
/linux-4.4.14/arch/mips/loongson64/common/
Ddma-swiotlb.c60 static int loongson_dma_map_sg(struct device *dev, struct scatterlist *sg, in loongson_dma_map_sg() argument
64 int r = swiotlb_map_sg_attrs(dev, sg, nents, dir, NULL); in loongson_dma_map_sg()
79 struct scatterlist *sg, int nents, in loongson_dma_sync_sg_for_device() argument
82 swiotlb_sync_sg_for_device(dev, sg, nents, dir); in loongson_dma_sync_sg_for_device()
/linux-4.4.14/sound/soc/sh/
Dsiu_pcm.c124 struct scatterlist sg; in siu_pcm_wr_set() local
127 sg_init_table(&sg, 1); in siu_pcm_wr_set()
128 sg_set_page(&sg, pfn_to_page(PFN_DOWN(buff)), in siu_pcm_wr_set()
130 sg_dma_len(&sg) = size; in siu_pcm_wr_set()
131 sg_dma_address(&sg) = buff; in siu_pcm_wr_set()
134 &sg, 1, DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT | DMA_CTRL_ACK); in siu_pcm_wr_set()
172 struct scatterlist sg; in siu_pcm_rd_set() local
177 sg_init_table(&sg, 1); in siu_pcm_rd_set()
178 sg_set_page(&sg, pfn_to_page(PFN_DOWN(buff)), in siu_pcm_rd_set()
180 sg_dma_len(&sg) = size; in siu_pcm_rd_set()
[all …]
/linux-4.4.14/net/ipv4/
Dah4.c154 struct scatterlist *sg; in ah_output() local
185 sg = ah_req_sg(ahash, req); in ah_output()
186 seqhisg = sg + nfrags; in ah_output()
222 sg_init_table(sg, nfrags + sglists); in ah_output()
223 skb_to_sgvec_nomark(skb, sg, 0, skb->len); in ah_output()
230 ahash_request_set_crypt(req, sg, icv, skb->len + seqhi_len); in ah_output()
307 struct scatterlist *sg; in ah_input() local
372 sg = ah_req_sg(ahash, req); in ah_input()
373 seqhisg = sg + nfrags; in ah_input()
392 sg_init_table(sg, nfrags + sglists); in ah_input()
[all …]
Desp4.c127 struct scatterlist *sg; in esp_output() local
185 sg = esp_req_sg(aead, req); in esp_output()
258 sg_init_table(sg, nfrags); in esp_output()
259 skb_to_sgvec(skb, sg, in esp_output()
263 aead_request_set_crypt(req, sg, sg, ivlen + clen, iv); in esp_output()
419 struct scatterlist *sg; in esp_input() local
451 sg = esp_req_sg(aead, req); in esp_input()
471 sg_init_table(sg, nfrags); in esp_input()
472 skb_to_sgvec(skb, sg, 0, skb->len); in esp_input()
474 aead_request_set_crypt(req, sg, sg, elen + ivlen, iv); in esp_input()
/linux-4.4.14/Documentation/DocBook/
Dusb.xml.db32 API-struct-usb-sg-request
57 API-usb-sg-init
58 API-usb-sg-wait
59 API-usb-sg-cancel
102 API-usb-buffer-map-sg
103 API-usb-buffer-dmasync-sg
104 API-usb-buffer-unmap-sg
/linux-4.4.14/drivers/media/pci/bt8xx/
Dbttv-risc.c54 struct scatterlist *sg; in bttv_risc_packed() local
80 sg = sglist; in bttv_risc_packed()
85 while (offset && offset >= sg_dma_len(sg)) { in bttv_risc_packed()
86 offset -= sg_dma_len(sg); in bttv_risc_packed()
87 sg = sg_next(sg); in bttv_risc_packed()
89 if (bpl <= sg_dma_len(sg)-offset) { in bttv_risc_packed()
93 *(rp++)=cpu_to_le32(sg_dma_address(sg)+offset); in bttv_risc_packed()
99 (sg_dma_len(sg)-offset)); in bttv_risc_packed()
100 *(rp++)=cpu_to_le32(sg_dma_address(sg)+offset); in bttv_risc_packed()
101 todo -= (sg_dma_len(sg)-offset); in bttv_risc_packed()
[all …]
/linux-4.4.14/include/scsi/
Dscsi_cmnd.h162 extern void *scsi_kmap_atomic_sg(struct scatterlist *sg, int sg_count,
196 #define scsi_for_each_sg(cmd, sg, nseg, __i) \ argument
197 for_each_sg(scsi_sglist(cmd), sg, nseg, __i)
323 #define scsi_for_each_prot_sg(cmd, sg, nseg, __i) \ argument
324 for_each_sg(scsi_prot_sglist(cmd), sg, nseg, __i)
/linux-4.4.14/arch/arm/kernel/
Ddma.c124 void set_dma_sg (unsigned int chan, struct scatterlist *sg, int nr_sg) in set_dma_sg() argument
131 dma->sg = sg; in set_dma_sg()
148 dma->sg = NULL; in __set_dma_addr()
165 dma->sg = NULL; in set_dma_count()
/linux-4.4.14/arch/arm/mm/
Ddma-mapping.c911 int arm_dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in arm_dma_map_sg() argument
918 for_each_sg(sg, s, nents, i) { in arm_dma_map_sg()
930 for_each_sg(sg, s, i, j) in arm_dma_map_sg()
945 void arm_dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nents, in arm_dma_unmap_sg() argument
953 for_each_sg(sg, s, nents, i) in arm_dma_unmap_sg()
964 void arm_dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in arm_dma_sync_sg_for_cpu() argument
971 for_each_sg(sg, s, nents, i) in arm_dma_sync_sg_for_cpu()
983 void arm_dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, in arm_dma_sync_sg_for_device() argument
990 for_each_sg(sg, s, nents, i) in arm_dma_sync_sg_for_device()
1504 static int __map_sg_chunk(struct device *dev, struct scatterlist *sg, in __map_sg_chunk() argument
[all …]
/linux-4.4.14/drivers/ide/
Dau1xxx-ide.c219 struct scatterlist *sg; in auide_build_dmatable() local
227 sg = hwif->sg_table; in auide_build_dmatable()
228 while (i && sg_dma_len(sg)) { in auide_build_dmatable()
232 cur_addr = sg_dma_address(sg); in auide_build_dmatable()
233 cur_len = sg_dma_len(sg); in auide_build_dmatable()
253 sg_phys(sg), tc, flags)) { in auide_build_dmatable()
259 sg_phys(sg), tc, flags)) { in auide_build_dmatable()
268 sg = sg_next(sg); in auide_build_dmatable()
/linux-4.4.14/net/ipv6/
Desp6.c155 struct scatterlist *sg; in esp6_output() local
212 sg = esp_req_sg(aead, req); in esp6_output()
250 sg_init_table(sg, nfrags); in esp6_output()
251 skb_to_sgvec(skb, sg, in esp6_output()
255 aead_request_set_crypt(req, sg, sg, ivlen + clen, iv); in esp6_output()
369 struct scatterlist *sg; in esp6_input() local
405 sg = esp_req_sg(aead, req); in esp6_input()
425 sg_init_table(sg, nfrags); in esp6_input()
426 skb_to_sgvec(skb, sg, 0, skb->len); in esp6_input()
428 aead_request_set_crypt(req, sg, sg, elen + ivlen, iv); in esp6_input()
Dah6.c343 struct scatterlist *sg; in ah6_output() local
380 sg = ah_req_sg(ahash, req); in ah6_output()
381 seqhisg = sg + nfrags; in ah6_output()
425 sg_init_table(sg, nfrags + sglists); in ah6_output()
426 skb_to_sgvec_nomark(skb, sg, 0, skb->len); in ah6_output()
433 ahash_request_set_crypt(req, sg, icv, skb->len + seqhi_len); in ah6_output()
524 struct scatterlist *sg; in ah6_input() local
589 sg = ah_req_sg(ahash, req); in ah6_input()
590 seqhisg = sg + nfrags; in ah6_input()
605 sg_init_table(sg, nfrags + sglists); in ah6_input()
[all …]
/linux-4.4.14/arch/cris/include/asm/
Ddma-mapping.h56 dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() argument
80 dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, in dma_unmap_sg() argument
113 dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems, in dma_sync_sg_for_cpu() argument
119 dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, int nelems, in dma_sync_sg_for_device() argument
/linux-4.4.14/arch/nios2/include/asm/
Ddma-mapping.h78 extern int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
84 extern void dma_unmap_sg(struct device *dev, struct scatterlist *sg,
96 extern void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg,
98 extern void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg,
/linux-4.4.14/arch/powerpc/platforms/512x/
Dmpc512x_lpbfifo.c167 struct scatterlist sg; in mpc512x_lpbfifo_kick() local
249 sg_init_table(&sg, 1); in mpc512x_lpbfifo_kick()
251 sg_dma_address(&sg) = dma_map_single(dma_dev->dev, in mpc512x_lpbfifo_kick()
253 if (dma_mapping_error(dma_dev->dev, sg_dma_address(&sg))) in mpc512x_lpbfifo_kick()
256 lpbfifo.ram_bus_addr = sg_dma_address(&sg); /* For freeing later */ in mpc512x_lpbfifo_kick()
258 sg_dma_len(&sg) = lpbfifo.req->size; in mpc512x_lpbfifo_kick()
260 dma_tx = dmaengine_prep_slave_sg(lpbfifo.chan, &sg, in mpc512x_lpbfifo_kick()
328 dma_unmap_single(dma_dev->dev, sg_dma_address(&sg), in mpc512x_lpbfifo_kick()
/linux-4.4.14/drivers/net/ppp/
Dppp_mppe.c68 setup_sg(struct scatterlist *sg, const void *address, unsigned int length) in setup_sg() argument
70 sg_set_buf(sg, address, length); in setup_sg()
139 struct scatterlist sg[4]; in get_new_key_from_sha() local
142 sg_init_table(sg, 4); in get_new_key_from_sha()
144 nbytes = setup_sg(&sg[0], state->master_key, state->keylen); in get_new_key_from_sha()
145 nbytes += setup_sg(&sg[1], sha_pad->sha_pad1, in get_new_key_from_sha()
147 nbytes += setup_sg(&sg[2], state->session_key, state->keylen); in get_new_key_from_sha()
148 nbytes += setup_sg(&sg[3], sha_pad->sha_pad2, in get_new_key_from_sha()
154 crypto_hash_digest(&desc, sg, nbytes, state->sha1_digest); in get_new_key_from_sha()
/linux-4.4.14/arch/openrisc/kernel/
Ddma.c179 or1k_map_sg(struct device *dev, struct scatterlist *sg, in or1k_map_sg() argument
186 for_each_sg(sg, s, nents, i) { in or1k_map_sg()
195 or1k_unmap_sg(struct device *dev, struct scatterlist *sg, in or1k_unmap_sg() argument
202 for_each_sg(sg, s, nents, i) { in or1k_unmap_sg()
/linux-4.4.14/drivers/misc/mic/scif/
Dscif_debugfs.c68 struct scatterlist *sg; in scif_display_window() local
91 for_each_sg(window->st->sgl, sg, window->st->nents, j) in scif_display_window()
93 j, sg_dma_address(sg), sg_dma_len(sg)); in scif_display_window()
/linux-4.4.14/drivers/scsi/qla2xxx/
Dqla_iocb.c199 struct scatterlist *sg; in qla2x00_build_scsi_iocbs_32() local
222 scsi_for_each_sg(cmd, sg, tot_dsds, i) { in qla2x00_build_scsi_iocbs_32()
236 *cur_dsd++ = cpu_to_le32(sg_dma_address(sg)); in qla2x00_build_scsi_iocbs_32()
237 *cur_dsd++ = cpu_to_le32(sg_dma_len(sg)); in qla2x00_build_scsi_iocbs_32()
257 struct scatterlist *sg; in qla2x00_build_scsi_iocbs_64() local
279 scsi_for_each_sg(cmd, sg, tot_dsds, i) { in qla2x00_build_scsi_iocbs_64()
294 sle_dma = sg_dma_address(sg); in qla2x00_build_scsi_iocbs_64()
297 *cur_dsd++ = cpu_to_le32(sg_dma_len(sg)); in qla2x00_build_scsi_iocbs_64()
705 struct scatterlist *sg; in qla24xx_build_scsi_iocbs() local
738 scsi_for_each_sg(cmd, sg, tot_dsds, i) { in qla24xx_build_scsi_iocbs()
[all …]
/linux-4.4.14/drivers/misc/mic/host/
Dmic_boot.c92 static int __mic_dma_map_sg(struct device *dev, struct scatterlist *sg, in __mic_dma_map_sg() argument
102 ret = dma_map_sg(&mdev->pdev->dev, sg, nents, dir); in __mic_dma_map_sg()
106 for_each_sg(sg, s, nents, i) { in __mic_dma_map_sg()
114 for_each_sg(sg, s, i, j) { in __mic_dma_map_sg()
118 dma_unmap_sg(&mdev->pdev->dev, sg, nents, dir); in __mic_dma_map_sg()
123 struct scatterlist *sg, int nents, in __mic_dma_unmap_sg() argument
133 for_each_sg(sg, s, nents, i) { in __mic_dma_unmap_sg()
138 dma_unmap_sg(&mdev->pdev->dev, sg, nents, dir); in __mic_dma_unmap_sg()
/linux-4.4.14/drivers/tty/serial/
Damba-pl011.c133 struct scatterlist sg; member
155 struct scatterlist sg; member
252 static int pl011_sgbuf_init(struct dma_chan *chan, struct pl011_sgbuf *sg, in pl011_sgbuf_init() argument
257 sg->buf = dma_alloc_coherent(chan->device->dev, in pl011_sgbuf_init()
259 if (!sg->buf) in pl011_sgbuf_init()
262 sg_init_table(&sg->sg, 1); in pl011_sgbuf_init()
263 sg_set_page(&sg->sg, phys_to_page(dma_addr), in pl011_sgbuf_init()
265 sg_dma_address(&sg->sg) = dma_addr; in pl011_sgbuf_init()
266 sg_dma_len(&sg->sg) = PL011_DMA_BUFFER_SIZE; in pl011_sgbuf_init()
271 static void pl011_sgbuf_free(struct dma_chan *chan, struct pl011_sgbuf *sg, in pl011_sgbuf_free() argument
[all …]
Dpch_uart.c798 struct scatterlist *sg = priv->sg_tx_p; in pch_dma_tx_complete() local
801 for (i = 0; i < priv->nent; i++, sg++) { in pch_dma_tx_complete()
802 xmit->tail += sg_dma_len(sg); in pch_dma_tx_complete()
803 port->icount.tx += sg_dma_len(sg); in pch_dma_tx_complete()
807 dma_unmap_sg(port->dev, sg, priv->nent, DMA_TO_DEVICE); in pch_dma_tx_complete()
869 struct scatterlist *sg; in dma_handle_rx() local
872 sg = &priv->sg_rx; in dma_handle_rx()
876 sg_dma_len(sg) = priv->trigger_level; in dma_handle_rx()
879 sg_dma_len(sg), (unsigned long)priv->rx_buf_virt & in dma_handle_rx()
882 sg_dma_address(sg) = priv->rx_buf_dma; in dma_handle_rx()
[all …]
/linux-4.4.14/arch/blackfin/include/asm/
Ddma-mapping.h99 extern int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
103 dma_unmap_sg(struct device *dev, struct scatterlist *sg, in dma_unmap_sg() argument
140 dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nents, in dma_sync_sg_for_cpu() argument
147 dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg,
/linux-4.4.14/drivers/scsi/isci/
Drequest.c105 static void init_sgl_element(struct scu_sgl_element *e, struct scatterlist *sg) in init_sgl_element() argument
107 e->length = sg_dma_len(sg); in init_sgl_element()
108 e->address_upper = upper_32_bits(sg_dma_address(sg)); in init_sgl_element()
109 e->address_lower = lower_32_bits(sg_dma_address(sg)); in init_sgl_element()
117 struct scatterlist *sg = NULL; in sci_request_build_sgl() local
124 sg = task->scatter; in sci_request_build_sgl()
126 while (sg) { in sci_request_build_sgl()
128 init_sgl_element(&scu_sg->A, sg); in sci_request_build_sgl()
129 sg = sg_next(sg); in sci_request_build_sgl()
130 if (sg) { in sci_request_build_sgl()
[all …]
/linux-4.4.14/drivers/infiniband/hw/usnic/
Dusnic_uiom.c83 struct scatterlist *sg; in usnic_uiom_put_pages() local
88 for_each_sg(chunk->page_list, sg, chunk->nents, i) { in usnic_uiom_put_pages()
89 page = sg_page(sg); in usnic_uiom_put_pages()
90 pa = sg_phys(sg); in usnic_uiom_put_pages()
104 struct scatterlist *sg; in usnic_uiom_get_pages() local
170 for_each_sg(chunk->page_list, sg, chunk->nents, i) { in usnic_uiom_get_pages()
171 sg_set_page(sg, page_list[i + off], in usnic_uiom_get_pages()
173 pa = sg_phys(sg); in usnic_uiom_get_pages()

1234