Home
last modified time | relevance | path

Searched refs:vmap (Results 1 – 54 of 54) sorted by relevance

/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/bios/
Dvmap.c32 u16 vmap = 0x0000; in nvbios_vmap_table() local
36 vmap = nv_ro16(bios, bit_P.offset + 0x20); in nvbios_vmap_table()
37 if (vmap) { in nvbios_vmap_table()
38 *ver = nv_ro08(bios, vmap + 0); in nvbios_vmap_table()
42 *hdr = nv_ro08(bios, vmap + 1); in nvbios_vmap_table()
43 *cnt = nv_ro08(bios, vmap + 3); in nvbios_vmap_table()
44 *len = nv_ro08(bios, vmap + 2); in nvbios_vmap_table()
45 return vmap; in nvbios_vmap_table()
60 u16 vmap = nvbios_vmap_table(bios, ver, hdr, cnt, len); in nvbios_vmap_parse() local
62 switch (!!vmap * *ver) { in nvbios_vmap_parse()
[all …]
DKbuild31 nvkm-y += nvkm/subdev/bios/vmap.o
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/volt/
Dbase.c69 u16 vmap; in nvkm_volt_map() local
71 vmap = nvbios_vmap_entry_parse(bios, id, &ver, &len, &info); in nvkm_volt_map()
72 if (vmap) { in nvkm_volt_map()
/linux-4.1.27/arch/arm/mm/
Dfault-armv.c246 p1 = vmap(&page, 1, VM_IOREMAP, prot); in check_writebuffer_bugs()
247 p2 = vmap(&page, 1, VM_IOREMAP, prot); in check_writebuffer_bugs()
/linux-4.1.27/arch/hexagon/kernel/
Dvdso.c41 vdso = vmap(&vdso_page, 1, 0, PAGE_KERNEL); in vdso_init()
/linux-4.1.27/drivers/gpu/drm/vmwgfx/
Dvmwgfx_prime.c116 .vmap = vmw_prime_dmabuf_vmap,
/linux-4.1.27/arch/mips/kernel/
Dvdso.c48 vdso = vmap(&vdso_page, 1, 0, PAGE_KERNEL); in init_vdso()
/linux-4.1.27/drivers/gpu/drm/i915/
Di915_gem_dmabuf.c142 obj->dma_buf_vmapping = vmap(pages, i, 0, PAGE_KERNEL); in i915_gem_dmabuf_vmap()
224 .vmap = i915_gem_dmabuf_vmap,
Di915_cmd_parser.c845 addr = vmap(pages, i, 0, PAGE_KERNEL); in vmap_batch()
/linux-4.1.27/Documentation/
Dcachetlb.txt383 vmap/vmalloc API. Since kernel I/O goes via physical pages, the I/O
385 the only aliases. This isn't true for vmap aliases, so anything in
386 the kernel trying to do I/O to vmap areas must manually manage
387 coherency. It must do this by flushing the vmap range before doing
392 the vmap area. This is to make sure that any data the kernel
393 modified in the vmap range is made visible to the physical
399 the cache for a given virtual address range in the vmap area
403 vmap area.
Ddma-buf-sharing.txt308 For some cases the overhead of kmap can be too high, a vmap interface
316 The vmap call can fail if there is no vmap support in the exporter, or if it
318 the dma-buf layer keeps a reference count for all vmap access and calls down
319 into the exporter's vmap function only when no vmapping exists, and only
320 unmaps it once. Protection against concurrent vmap/vunmap calls is provided
/linux-4.1.27/include/linux/
Ddma-buf.h107 void *(*vmap)(struct dma_buf *); member
Dvmalloc.h85 extern void *vmap(struct page **pages, unsigned int count,
/linux-4.1.27/arch/x86/xen/
Dgrant-table.c147 vaddr = vmap(pages, nr_grant_frames, 0, PAGE_KERNEL); in xlated_setup_gnttab_pages()
/linux-4.1.27/drivers/net/ethernet/mellanox/mlx4/
Den_resources.c99 buf->direct.buf = vmap(pages, buf->nbufs, VM_MAP, PAGE_KERNEL); in mlx4_en_map_buffer()
Dalloc.c640 buf->direct.buf = vmap(pages, buf->nbufs, VM_MAP, PAGE_KERNEL); in mlx4_buf_alloc()
/linux-4.1.27/drivers/gpu/drm/
Ddrm_memory.c90 addr = vmap(page_map, num_pages, VM_IOREMAP, PAGE_AGP); in agp_remap()
Ddrm_prime.c302 .vmap = drm_gem_dmabuf_vmap,
/linux-4.1.27/arch/powerpc/platforms/cell/spufs/
Dlscsa_alloc.c119 csa->lscsa = vmap(pgarray, n_4k, VM_USERMAP, PAGE_KERNEL); in spu_alloc_lscsa()
/linux-4.1.27/drivers/gpu/drm/ttm/
Dttm_bo_util.c266 dst = vmap(&d, 1, 0, prot); in ttm_copy_io_ttm_page()
302 src = vmap(&s, 1, 0, prot); in ttm_copy_ttm_io_page()
561 map->virtual = vmap(ttm->pages + start_page, num_pages, in ttm_bo_kmap_ttm()
/linux-4.1.27/sound/core/
Dsgbuf.c128 dmab->area = vmap(sgbuf->page_table, sgbuf->pages, VM_MAP, PAGE_KERNEL); in snd_malloc_sgbuf_pages()
/linux-4.1.27/arch/m68k/kernel/
Ddma.c58 addr = vmap(map, size, VM_MAP, pgprot); in dma_alloc_coherent()
/linux-4.1.27/drivers/gpu/drm/udl/
Dudl_gem.c174 obj->vmapping = vmap(obj->pages, page_count, 0, PAGE_KERNEL); in udl_gem_vmap()
/linux-4.1.27/arch/arm/lib/
Duaccess_with_memcpy.c231 user_ptr = vmap(&dst_page, 1, VM_IOREMAP, __pgprot(__P010));
/linux-4.1.27/drivers/net/ethernet/mellanox/mlx5/core/
Dalloc.c100 buf->direct.buf = vmap(pages, buf->nbufs, VM_MAP, PAGE_KERNEL); in mlx5_buf_alloc()
/linux-4.1.27/arch/sh/kernel/
Dio_trapped.c76 tiop->virt_base = vmap(pages, n, VM_MAP, PAGE_NONE); in register_trapped_io()
/linux-4.1.27/drivers/staging/android/ion/
Dion_test.c68 void *vaddr = vmap(&page, 1, VM_MAP, pgprot); in ion_handle_test_dma()
Dion_heap.c56 vaddr = vmap(pages, npages, VM_MAP, pgprot); in ion_heap_map_kernel()
/linux-4.1.27/drivers/dma-buf/
Ddma-buf.c718 if (!dmabuf->ops->vmap) in dma_buf_vmap()
731 ptr = dmabuf->ops->vmap(dmabuf); in dma_buf_vmap()
/linux-4.1.27/drivers/gpu/drm/exynos/
Dexynos_drm_fbdev.c100 buffer->kvaddr = (void __iomem *) vmap(buffer->pages, in exynos_drm_fbdev_update()
/linux-4.1.27/drivers/media/v4l2-core/
Dvideobuf2-vmalloc.c362 .vmap = vb2_vmalloc_dmabuf_ops_vmap,
Dvideobuf-dma-sg.c237 dma->vaddr = vmap(dma->vaddr_pages, nr_pages, VM_MAP | VM_IOREMAP, in videobuf_dma_init_kernel()
Dvideobuf2-dma-sg.c577 .vmap = vb2_dma_sg_dmabuf_ops_vmap,
Dvideobuf2-dma-contig.c374 .vmap = vb2_dc_dmabuf_ops_vmap,
/linux-4.1.27/drivers/gpu/drm/tegra/
Dfb.c264 bo->vaddr = vmap(bo->pages, bo->num_pages, VM_MAP, in tegra_fbdev_probe()
Dgem.c622 .vmap = tegra_gem_prime_vmap,
/linux-4.1.27/drivers/staging/comedi/
Dcomedi_buf.c145 async->prealloc_buf = vmap(pages, n_pages, VM_MAP, in __comedi_buf_alloc()
/linux-4.1.27/Documentation/vm/
Dhighmem.txt67 (*) vmap(). This can be used to make a long duration mapping of multiple
/linux-4.1.27/fs/pstore/
Dram_core.c412 vaddr = vmap(pages, page_count, VM_MAP, prot); in persistent_ram_vmap()
/linux-4.1.27/drivers/char/agp/
Duninorth-agp.c419 bridge->gatt_table = vmap(pages, (1 << page_order), 0, PAGE_KERNEL_NCG); in uninorth_create_gatt_table()
/linux-4.1.27/drivers/gpu/drm/msm/
Dmsm_gem.c396 msm_obj->vaddr = vmap(pages, obj->size >> PAGE_SHIFT, in msm_gem_vaddr_locked()
/linux-4.1.27/drivers/xen/xenbus/
Dxenbus_client.c635 addr = vmap(node->hvm.pages, nr_grefs, VM_MAP | VM_IOREMAP, in xenbus_map_ring_valloc_hvm()
/linux-4.1.27/drivers/staging/lustre/lnet/klnds/socklnd/
Dsocklnd_lib-linux.c296 addr = vmap(pages, niov, VM_MAP, PAGE_KERNEL); in ksocknal_lib_kiov_vmap()
/linux-4.1.27/mm/
Dnommu.c467 void *vmap(struct page **pages, unsigned int count, unsigned long flags, pgprot_t prot) in vmap() function
472 EXPORT_SYMBOL(vmap);
Dmmap.c2912 struct vm_area_struct *copy_vma(struct vm_area_struct **vmap, in copy_vma() argument
2916 struct vm_area_struct *vma = *vmap; in copy_vma()
2955 *vmap = vma = new_vma; in copy_vma()
Dvmalloc.c1551 void *vmap(struct page **pages, unsigned int count, in vmap() function
1573 EXPORT_SYMBOL(vmap);
/linux-4.1.27/arch/powerpc/kvm/
De500_mmu.c796 virt = vmap(pages, num_pages, VM_MAP, PAGE_KERNEL); in kvm_vcpu_ioctl_config_tlb()
/linux-4.1.27/kernel/
Drelay.c144 mem = vmap(buf->page_array, n_pages, VM_MAP, PAGE_KERNEL); in relay_alloc_buf()
/linux-4.1.27/drivers/gpu/drm/omapdrm/
Domap_gem.c942 omap_obj->vaddr = vmap(pages, obj->size >> PAGE_SHIFT, in omap_gem_vaddr()
/linux-4.1.27/drivers/base/
Dfirmware_class.c604 buf->data = vmap(buf->pages, buf->nr_pages, 0, PAGE_KERNEL_RO); in fw_map_pages_buf()
/linux-4.1.27/drivers/target/
Dtarget_core_transport.c2258 cmd->t_data_vmap = vmap(pages, cmd->t_data_nents, VM_MAP, PAGE_KERNEL); in transport_kmap_data_sg()
/linux-4.1.27/drivers/misc/vmw_vmci/
Dvmci_queue_pair.c848 produce_q->q_header = vmap(headers, 2, VM_MAP, PAGE_KERNEL); in qp_host_map_queues()
/linux-4.1.27/Documentation/filesystems/
Dproc.txt938 vmap vmap()ed pages
/linux-4.1.27/drivers/firewire/
Dohci.c1015 ctx->buffer = vmap(pages, ARRAY_SIZE(pages), VM_MAP, PAGE_KERNEL); in ar_context_init()