ptes              361 arch/alpha/kernel/core_apecs.c 	*(vuip)APECS_IOC_TB2R = virt_to_phys(hose->sg_isa->ptes) >> 1;
ptes              464 arch/alpha/kernel/core_cia.c 	arena->ptes[4] = pte0;
ptes              488 arch/alpha/kernel/core_cia.c 	arena->ptes[5] = pte0;
ptes              524 arch/alpha/kernel/core_cia.c 	arena->ptes[4] = 0;
ptes              525 arch/alpha/kernel/core_cia.c 	arena->ptes[5] = 0;
ptes              737 arch/alpha/kernel/core_cia.c 	*(vip)CIA_IOC_PCI_T0_BASE = virt_to_phys(hose->sg_isa->ptes) >> 2;
ptes              286 arch/alpha/kernel/core_lca.c 	*(vulp)LCA_IOC_T_BASE0 = virt_to_phys(hose->sg_isa->ptes);
ptes              297 arch/alpha/kernel/core_marvel.c 	csrs->POx_TBASE[0].csr = virt_to_phys(hose->sg_isa->ptes);
ptes              315 arch/alpha/kernel/core_marvel.c 	csrs->POx_TBASE[2].csr = virt_to_phys(hose->sg_pci->ptes);
ptes              693 arch/alpha/kernel/core_marvel.c 	unsigned long *ptes;
ptes              748 arch/alpha/kernel/core_marvel.c 		ptes = hose->sg_pci->ptes;
ptes              752 arch/alpha/kernel/core_marvel.c 			pfn = ptes[baddr >> PAGE_SHIFT];
ptes             1051 arch/alpha/kernel/core_marvel.c 	pte = aper->arena->ptes[baddr >> PAGE_SHIFT];
ptes              378 arch/alpha/kernel/core_mcpcia.c 	*(vuip)MCPCIA_T0_BASE(mid) = virt_to_phys(hose->sg_isa->ptes) >> 8;
ptes              382 arch/alpha/kernel/core_mcpcia.c 	*(vuip)MCPCIA_T1_BASE(mid) = virt_to_phys(hose->sg_pci->ptes) >> 8;
ptes              361 arch/alpha/kernel/core_t2.c 	*(vulp)T2_TBASE2 = virt_to_phys(hose->sg_isa->ptes) >> 1;
ptes              329 arch/alpha/kernel/core_titan.c 	port->tba[0].csr  = virt_to_phys(hose->sg_isa->ptes);
ptes              337 arch/alpha/kernel/core_titan.c 	port->tba[2].csr  = virt_to_phys(hose->sg_pci->ptes);
ptes              464 arch/alpha/kernel/core_titan.c 	unsigned long *ptes;
ptes              519 arch/alpha/kernel/core_titan.c 		ptes = hose->sg_pci->ptes;
ptes              523 arch/alpha/kernel/core_titan.c 			pfn = ptes[baddr >> PAGE_SHIFT];
ptes              712 arch/alpha/kernel/core_titan.c 	pte = aper->arena->ptes[baddr >> PAGE_SHIFT];
ptes              337 arch/alpha/kernel/core_tsunami.c 	pchip->tba[0].csr  = virt_to_phys(hose->sg_isa->ptes);
ptes              341 arch/alpha/kernel/core_tsunami.c 	pchip->tba[1].csr  = virt_to_phys(hose->sg_pci->ptes);
ptes              123 arch/alpha/kernel/core_wildfire.c 	pci->pci_window[0].tbase.csr = virt_to_phys(hose->sg_isa->ptes);
ptes              135 arch/alpha/kernel/core_wildfire.c 	pci->pci_window[3].tbase.csr = virt_to_phys(hose->sg_pci->ptes);
ptes              139 arch/alpha/kernel/pci_impl.h 	unsigned long *ptes;
ptes               88 arch/alpha/kernel/pci_iommu.c 	arena->ptes = memblock_alloc_node(sizeof(*arena), align, nid);
ptes               89 arch/alpha/kernel/pci_iommu.c 	if (!NODE_DATA(nid) || !arena->ptes) {
ptes               93 arch/alpha/kernel/pci_iommu.c 		arena->ptes = memblock_alloc(mem_size, align);
ptes               94 arch/alpha/kernel/pci_iommu.c 		if (!arena->ptes)
ptes              105 arch/alpha/kernel/pci_iommu.c 	arena->ptes = memblock_alloc(mem_size, align);
ptes              106 arch/alpha/kernel/pci_iommu.c 	if (!arena->ptes)
ptes              137 arch/alpha/kernel/pci_iommu.c 	unsigned long *ptes;
ptes              152 arch/alpha/kernel/pci_iommu.c 	ptes = arena->ptes;
ptes              164 arch/alpha/kernel/pci_iommu.c 		if (ptes[p+i])
ptes              196 arch/alpha/kernel/pci_iommu.c 	unsigned long *ptes;
ptes              202 arch/alpha/kernel/pci_iommu.c 	ptes = arena->ptes;
ptes              215 arch/alpha/kernel/pci_iommu.c 		ptes[p+i] = IOMMU_INVALID_PTE;
ptes              229 arch/alpha/kernel/pci_iommu.c 	p = arena->ptes + ofs;
ptes              327 arch/alpha/kernel/pci_iommu.c 		arena->ptes[i + dma_ofs] = mk_iommu_pte(paddr);
ptes              574 arch/alpha/kernel/pci_iommu.c 	unsigned long *ptes;
ptes              629 arch/alpha/kernel/pci_iommu.c 	ptes = &arena->ptes[dma_ofs];
ptes              648 arch/alpha/kernel/pci_iommu.c 			*ptes++ = mk_iommu_pte(paddr);
ptes              860 arch/alpha/kernel/pci_iommu.c 	unsigned long *ptes;
ptes              868 arch/alpha/kernel/pci_iommu.c 	ptes = arena->ptes;
ptes              879 arch/alpha/kernel/pci_iommu.c 		ptes[p+i] = IOMMU_RESERVED_PTE;
ptes              890 arch/alpha/kernel/pci_iommu.c 	unsigned long *ptes;
ptes              895 arch/alpha/kernel/pci_iommu.c 	ptes = arena->ptes;
ptes              899 arch/alpha/kernel/pci_iommu.c 		if (ptes[i] != IOMMU_RESERVED_PTE)
ptes              911 arch/alpha/kernel/pci_iommu.c 	unsigned long *ptes;
ptes              918 arch/alpha/kernel/pci_iommu.c 	ptes = arena->ptes;
ptes              921 arch/alpha/kernel/pci_iommu.c 		if (ptes[j] != IOMMU_RESERVED_PTE) {
ptes              928 arch/alpha/kernel/pci_iommu.c 		ptes[j] = mk_iommu_pte(page_to_phys(pages[i]));
ptes              943 arch/alpha/kernel/pci_iommu.c 	p = arena->ptes + pg_start;
ptes              171 arch/powerpc/include/asm/plpar_wrappers.h 				    unsigned long *ptes)
ptes              179 arch/powerpc/include/asm/plpar_wrappers.h 	memcpy(ptes, retbuf, 8*sizeof(unsigned long));
ptes              189 arch/powerpc/include/asm/plpar_wrappers.h 					unsigned long *ptes)
ptes              197 arch/powerpc/include/asm/plpar_wrappers.h 	memcpy(ptes, retbuf, 8*sizeof(unsigned long));
ptes              345 arch/powerpc/include/asm/plpar_wrappers.h 				    unsigned long *ptes)
ptes              256 arch/powerpc/mm/hugetlbpage.c 	void *ptes[0];
ptes              268 arch/powerpc/mm/hugetlbpage.c 		kmem_cache_free(PGT_CACHE(PTE_T_ORDER), batch->ptes[i]);
ptes              291 arch/powerpc/mm/hugetlbpage.c 	(*batchp)->ptes[(*batchp)->index++] = hugepte;
ptes              242 arch/powerpc/mm/ptdump/hashpagetable.c 	struct hash_pte ptes[4];
ptes              260 arch/powerpc/mm/ptdump/hashpagetable.c 		lpar_rc = plpar_pte_read_4(0, hpte_group, (void *)ptes);
ptes              265 arch/powerpc/mm/ptdump/hashpagetable.c 			if (HPTE_V_COMPARE(ptes[j].v, want_v) &&
ptes              266 arch/powerpc/mm/ptdump/hashpagetable.c 					(ptes[j].v & HPTE_V_VALID)) {
ptes              268 arch/powerpc/mm/ptdump/hashpagetable.c 				*v = ptes[j].v;
ptes              269 arch/powerpc/mm/ptdump/hashpagetable.c 				*r = ptes[j].r;
ptes              802 arch/powerpc/platforms/pseries/lpar.c 	} ptes[4];
ptes              811 arch/powerpc/platforms/pseries/lpar.c 		lpar_rc = plpar_pte_read_4_raw(0, i, (void *)ptes);
ptes              818 arch/powerpc/platforms/pseries/lpar.c 			if ((ptes[j].pteh & HPTE_V_VRMA_MASK) ==
ptes              821 arch/powerpc/platforms/pseries/lpar.c 			if (ptes[j].pteh & HPTE_V_VALID)
ptes              823 arch/powerpc/platforms/pseries/lpar.c 					&(ptes[j].pteh), &(ptes[j].ptel));
ptes              910 arch/powerpc/platforms/pseries/lpar.c 	} ptes[4];
ptes              914 arch/powerpc/platforms/pseries/lpar.c 		lpar_rc = plpar_pte_read_4(0, hpte_group, (void *)ptes);
ptes              922 arch/powerpc/platforms/pseries/lpar.c 			if (HPTE_V_COMPARE(ptes[j].pteh, want_v) &&
ptes              923 arch/powerpc/platforms/pseries/lpar.c 			    (ptes[j].pteh & HPTE_V_VALID))
ptes               88 arch/x86/kvm/paging_tmpl.h 	pt_element_t ptes[PT_MAX_FULL_LEVELS];
ptes              236 arch/x86/kvm/paging_tmpl.h 		pte = orig_pte = walker->ptes[level - 1];
ptes              277 arch/x86/kvm/paging_tmpl.h 		walker->ptes[level - 1] = pte;
ptes              408 arch/x86/kvm/paging_tmpl.h 		walker->ptes[walker->level - 1] = pte;
ptes              575 arch/x86/kvm/paging_tmpl.h 	return r || curr_pte != gw->ptes[level - 1];
ptes               28 arch/x86/xen/grant-table.c 	pte_t **ptes;
ptes               45 arch/x86/xen/grant-table.c 		set_pte_at(&init_mm, addr, gnttab_shared_vm_area.ptes[i],
ptes               67 arch/x86/xen/grant-table.c 		set_pte_at(&init_mm, addr, gnttab_status_vm_area.ptes[i],
ptes               77 arch/x86/xen/grant-table.c 	pte_t **ptes;
ptes               82 arch/x86/xen/grant-table.c 		ptes = gnttab_status_vm_area.ptes;
ptes               84 arch/x86/xen/grant-table.c 		ptes = gnttab_shared_vm_area.ptes;
ptes               89 arch/x86/xen/grant-table.c 		set_pte_at(&init_mm, addr, ptes[i], __pte(0));
ptes               96 arch/x86/xen/grant-table.c 	area->ptes = kmalloc_array(nr_frames, sizeof(*area->ptes), GFP_KERNEL);
ptes               97 arch/x86/xen/grant-table.c 	if (area->ptes == NULL)
ptes              100 arch/x86/xen/grant-table.c 	area->area = alloc_vm_area(PAGE_SIZE * nr_frames, area->ptes);
ptes              102 arch/x86/xen/grant-table.c 		kfree(area->ptes);
ptes              112 arch/x86/xen/grant-table.c 	kfree(area->ptes);
ptes              338 block/partitions/efi.c 			gpt_header **gpt, gpt_entry **ptes)
ptes              343 block/partitions/efi.c 	if (!ptes)
ptes              432 block/partitions/efi.c 	if (!(*ptes = alloc_read_gpt_entries(state, *gpt)))
ptes              436 block/partitions/efi.c 	crc = efi_crc32((const unsigned char *) (*ptes), pt_size);
ptes              447 block/partitions/efi.c 	kfree(*ptes);
ptes              448 block/partitions/efi.c 	*ptes = NULL;
ptes              584 block/partitions/efi.c 			  gpt_entry **ptes)
ptes              593 block/partitions/efi.c 	if (!ptes)
ptes              633 block/partitions/efi.c                 *ptes = pptes;
ptes              642 block/partitions/efi.c                 *ptes = aptes;
ptes              655 block/partitions/efi.c         *ptes = NULL;
ptes              681 block/partitions/efi.c 	gpt_entry *ptes = NULL;
ptes              685 block/partitions/efi.c 	if (!find_valid_gpt(state, &gpt, &ptes) || !gpt || !ptes) {
ptes              687 block/partitions/efi.c 		kfree(ptes);
ptes              697 block/partitions/efi.c 		u64 start = le64_to_cpu(ptes[i].starting_lba);
ptes              698 block/partitions/efi.c 		u64 size = le64_to_cpu(ptes[i].ending_lba) -
ptes              699 block/partitions/efi.c 			   le64_to_cpu(ptes[i].starting_lba) + 1ULL;
ptes              701 block/partitions/efi.c 		if (!is_pte_valid(&ptes[i], last_lba(state->bdev)))
ptes              707 block/partitions/efi.c 		if (!efi_guidcmp(ptes[i].partition_type_guid, PARTITION_LINUX_RAID_GUID))
ptes              711 block/partitions/efi.c 		efi_guid_to_str(&ptes[i].unique_partition_guid, info->uuid);
ptes              715 block/partitions/efi.c 				ARRAY_SIZE(ptes[i].partition_name));
ptes              718 block/partitions/efi.c 			u8 c = ptes[i].partition_name[label_count] & 0xff;
ptes              726 block/partitions/efi.c 	kfree(ptes);
ptes              276 drivers/gpu/drm/gma500/mmu.c 	uint32_t *ptes;
ptes              292 drivers/gpu/drm/gma500/mmu.c 	ptes = (uint32_t *) v;
ptes              294 drivers/gpu/drm/gma500/mmu.c 		*ptes++ = pd->invalid_pte;
ptes              198 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		     const struct nvkm_vmm_desc *desc, u32 ptei, u32 ptes)
ptes              209 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	for (lpti = ptei >> sptb; ptes; spti = 0, lpti++) {
ptes              210 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		const u32 pten = min(sptn - spti, ptes);
ptes              212 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		ptes -= pten;
ptes              222 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) {
ptes              236 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) {
ptes              243 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			TRA(it, "LPTE %05x: U -> S %d PTEs", pteb, ptes);
ptes              244 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			pair->func->sparse(vmm, pgt->pt[0], pteb, ptes);
ptes              251 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			TRA(it, "LPTE %05x: U -> I %d PTEs", pteb, ptes);
ptes              252 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			pair->func->invalid(vmm, pgt->pt[0], pteb, ptes);
ptes              258 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_unref_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes)
ptes              267 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		dma = desc->func->pfn_clear(it->vmm, pgt->pt[type], ptei, ptes);
ptes              272 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			desc->func->pfn_unmap(it->vmm, pgt->pt[type], ptei, ptes);
ptes              277 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	pgt->refs[type] -= ptes;
ptes              281 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		nvkm_vmm_unref_sptes(it, pgt, desc, ptei, ptes);
ptes              297 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		   const struct nvkm_vmm_desc *desc, u32 ptei, u32 ptes)
ptes              308 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	for (lpti = ptei >> sptb; ptes; spti = 0, lpti++) {
ptes              309 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		const u32 pten = min(sptn - spti, ptes);
ptes              311 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		ptes -= pten;
ptes              321 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) {
ptes              335 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) {
ptes              343 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			const u32 sptc = ptes * sptn;
ptes              350 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			TRA(it, "LPTE %05x: S -> U %d PTEs", pteb, ptes);
ptes              351 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			pair->func->unmap(vmm, pgt->pt[0], pteb, ptes);
ptes              357 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			TRA(it, "LPTE %05x: I -> U %d PTEs", pteb, ptes);
ptes              358 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			pair->func->unmap(vmm, pgt->pt[0], pteb, ptes);
ptes              364 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_ref_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes)
ptes              371 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	pgt->refs[type] += ptes;
ptes              375 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		nvkm_vmm_ref_sptes(it, pgt, desc, ptei, ptes);
ptes              382 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		     struct nvkm_vmm_pt *pgt, u32 ptei, u32 ptes)
ptes              385 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		while (ptes--)
ptes              389 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		memset(&pgt->pte[ptei], NVKM_VMM_PTE_SPARSE, ptes);
ptes              394 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_sparse_unref_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes)
ptes              398 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		memset(&pt->pde[ptei], 0x00, sizeof(pt->pde[0]) * ptes);
ptes              401 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		memset(&pt->pte[ptei], 0x00, sizeof(pt->pte[0]) * ptes);
ptes              402 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	return nvkm_vmm_unref_ptes(it, pfn, ptei, ptes);
ptes              406 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_sparse_ref_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes)
ptes              408 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	nvkm_vmm_sparse_ptes(it->desc, it->pt[0], ptei, ptes);
ptes              409 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	return nvkm_vmm_ref_ptes(it, pfn, ptei, ptes);
ptes              423 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	u32 pteb, ptei, ptes;
ptes              449 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			for (ptes = 1, ptei++; ptei < pten; ptes++, ptei++) {
ptes              457 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 					desc->func->sparse(vmm, pt, pteb, ptes);
ptes              459 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 					desc->func->invalid(vmm, pt, pteb, ptes);
ptes              460 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 				memset(&pgt->pte[pteb], 0x00, ptes);
ptes              462 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 				desc->func->unmap(vmm, pt, pteb, ptes);
ptes              463 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 				while (ptes--)
ptes              536 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		const u32 ptes = min_t(u64, it.cnt, pten - ptei);
ptes              563 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		if (!REF_PTES || REF_PTES(&it, pfn, ptei, ptes)) {
ptes              567 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 					MAP_PTES(vmm, pt, ptei, ptes, map);
ptes              569 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 					CLR_PTES(vmm, pt, ptei, ptes);
ptes              575 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		it.pte[it.lvl] += ptes;
ptes              576 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		it.cnt -= ptes;
ptes             1806 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_boot_ptes(struct nvkm_vmm_iter *it, bool pfn, u32 ptei, u32 ptes)
ptes               54 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h 				  struct nvkm_mmu_pt *, u32 ptei, u32 ptes);
ptes               58 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h 				  u32 ptei, u32 ptes, struct nvkm_vmm_map *);
ptes               72 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h 	bool (*pfn_clear)(struct nvkm_vmm *, struct nvkm_mmu_pt *, u32 ptei, u32 ptes);
ptes               33 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 		  u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr)
ptes               39 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 		while (ptes--) {
ptes               48 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 		map->type += ptes * map->ctag;
ptes               50 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 		while (ptes--) {
ptes               59 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 		  u32 ptei, u32 ptes, struct nvkm_vmm_map *map)
ptes               61 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 	VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte);
ptes               66 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 		  u32 ptei, u32 ptes, struct nvkm_vmm_map *map)
ptes               69 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 		VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes);
ptes               71 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 		while (ptes--) {
ptes               80 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 	VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte);
ptes               85 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 		  u32 ptei, u32 ptes, struct nvkm_vmm_map *map)
ptes               87 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 	VMM_MAP_ITER_MEM(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte);
ptes               92 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 		    struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes)
ptes               94 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 	VMM_FO064(pt, vmm, ptei * 8, 0ULL, ptes);
ptes               26 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgk104.c 		      struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes)
ptes               29 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgk104.c 	VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(1) /* PRIV. */, ptes);
ptes               29 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgm200.c 		     struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes)
ptes               32 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgm200.c 	VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(32) /* VOL. */, ptes);
ptes               35 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 		    struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes)
ptes               41 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	while (ptes--) {
ptes               56 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 		    struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes)
ptes               60 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	while (ptes--) {
ptes               76 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 		  u32 ptei, u32 ptes, struct nvkm_vmm_map *map)
ptes               82 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	while (ptes--) {
ptes              110 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 		  u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr)
ptes              114 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	map->type += ptes * map->ctag;
ptes              116 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	while (ptes--) {
ptes              124 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 		  u32 ptei, u32 ptes, struct nvkm_vmm_map *map)
ptes              126 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gp100_vmm_pgt_pte);
ptes              131 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 		  u32 ptei, u32 ptes, struct nvkm_vmm_map *map)
ptes              134 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 		VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes);
ptes              136 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 		while (ptes--) {
ptes              145 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gp100_vmm_pgt_pte);
ptes              150 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 		  u32 ptei, u32 ptes, struct nvkm_vmm_map *map)
ptes              152 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	VMM_MAP_ITER_MEM(vmm, pt, ptei, ptes, map, gp100_vmm_pgt_pte);
ptes              157 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 		     struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes)
ptes              160 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(3) /* VOL. */, ptes);
ptes              177 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 		      struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes)
ptes              180 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(5) /* PRIV. */, ptes);
ptes              193 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 		  u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr)
ptes              197 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	map->type += ptes * map->ctag;
ptes              199 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	while (ptes--) {
ptes              207 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 		  u32 ptei, u32 ptes, struct nvkm_vmm_map *map)
ptes              209 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	VMM_MAP_ITER_MEM(vmm, pt, ptei, ptes, map, gp100_vmm_pd0_pte);
ptes               29 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c 		 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr)
ptes               32 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c 	while (ptes--) {
ptes               40 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c 		 u32 ptei, u32 ptes, struct nvkm_vmm_map *map)
ptes               42 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c 	VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte);
ptes               47 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c 		 u32 ptei, u32 ptes, struct nvkm_vmm_map *map)
ptes               51 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c 	while (ptes--)
ptes               55 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c 	VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte);
ptes               61 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c 		   struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes)
ptes               63 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c 	VMM_FO032(pt, vmm, 8 + (ptei * 4), 0, ptes);
ptes               28 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c 		 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr)
ptes               31 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c 	while (ptes--) {
ptes               39 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c 		 u32 ptei, u32 ptes, struct nvkm_vmm_map *map)
ptes               41 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c 	VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte);
ptes               46 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c 		 u32 ptei, u32 ptes, struct nvkm_vmm_map *map)
ptes               50 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c 	while (ptes--) {
ptes               56 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c 	VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte);
ptes               62 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c 		   struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes)
ptes               64 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c 	VMM_FO032(pt, vmm, ptei * 4, 0, ptes);
ptes               28 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		  dma_addr_t *list, u32 ptei, u32 ptes)
ptes               38 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 	while (ptes--) {
ptes               74 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr)
ptes               79 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		const u32 pten = min(ptes, 4 - (ptei & 3));
ptes               84 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		ptes -= pten;
ptes               87 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 	while (ptes >= 4) {
ptes               94 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		ptes -= 4;
ptes               97 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 	if (ptes) {
ptes               98 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		for (i = 0; i < ptes; i++, addr += 0x1000)
ptes              100 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, ptes);
ptes              106 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		 u32 ptei, u32 ptes, struct nvkm_vmm_map *map)
ptes              108 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 	VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv44_vmm_pgt_pte);
ptes              113 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		 u32 ptei, u32 ptes, struct nvkm_vmm_map *map)
ptes              118 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		const u32 pten = min(ptes, 4 - (ptei & 3));
ptes              121 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		ptes -= pten;
ptes              125 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 	while (ptes >= 4) {
ptes              133 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		ptes -= 4;
ptes              136 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 	if (ptes) {
ptes              137 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		nv44_vmm_pgt_fill(vmm, pt, map->dma, ptei, ptes);
ptes              138 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		map->dma += ptes;
ptes              142 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 	VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv44_vmm_pgt_pte);
ptes              148 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		   struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes)
ptes              152 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		const u32 pten = min(ptes, 4 - (ptei & 3));
ptes              155 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		ptes -= pten;
ptes              158 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 	while (ptes > 4) {
ptes              163 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		ptes -= 4;
ptes              166 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 	if (ptes)
ptes              167 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c 		nv44_vmm_pgt_fill(vmm, pt, NULL, ptei, ptes);
ptes               33 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 		 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr)
ptes               39 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 	map->type += ptes * map->ctag;
ptes               41 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 	while (ptes) {
ptes               44 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 			if (ptes >= pten && IS_ALIGNED(ptei, pten))
ptes               50 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 		ptes -= pten;
ptes               59 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 		 u32 ptei, u32 ptes, struct nvkm_vmm_map *map)
ptes               61 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 	VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte);
ptes               66 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 		 u32 ptei, u32 ptes, struct nvkm_vmm_map *map)
ptes               69 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 		VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes);
ptes               71 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 		while (ptes--) {
ptes               80 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 	VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte);
ptes               85 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 		 u32 ptei, u32 ptes, struct nvkm_vmm_map *map)
ptes               87 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 	VMM_MAP_ITER_MEM(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte);
ptes               92 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 		   struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes)
ptes               94 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 	VMM_FO064(pt, vmm, ptei * 8, 0ULL, ptes);
ptes              302 drivers/staging/gasket/gasket_page_table.c static bool gasket_is_pte_range_free(struct gasket_page_table_entry *ptes,
ptes              308 drivers/staging/gasket/gasket_page_table.c 		if (ptes[i].status != PTE_FREE)
ptes              466 drivers/staging/gasket/gasket_page_table.c 				  struct gasket_page_table_entry *ptes,
ptes              484 drivers/staging/gasket/gasket_page_table.c 			ptes[i].page = NULL;
ptes              485 drivers/staging/gasket/gasket_page_table.c 			ptes[i].offset = offset;
ptes              486 drivers/staging/gasket/gasket_page_table.c 			ptes[i].dma_addr = pg_tbl->coherent_pages[0].paddr +
ptes              500 drivers/staging/gasket/gasket_page_table.c 			ptes[i].page = page;
ptes              501 drivers/staging/gasket/gasket_page_table.c 			ptes[i].offset = offset;
ptes              504 drivers/staging/gasket/gasket_page_table.c 			ptes[i].dma_addr =
ptes              509 drivers/staging/gasket/gasket_page_table.c 					      ptes[i].dma_addr)) {
ptes              510 drivers/staging/gasket/gasket_page_table.c 				if (gasket_release_page(ptes[i].page))
ptes              513 drivers/staging/gasket/gasket_page_table.c 				memset(&ptes[i], 0,
ptes              520 drivers/staging/gasket/gasket_page_table.c 		dma_addr = (ptes[i].dma_addr + offset) | GASKET_VALID_SLOT_FLAG;
ptes              533 drivers/staging/gasket/gasket_page_table.c 		ptes[i].status = PTE_INUSE;
ptes              591 drivers/staging/gasket/gasket_page_table.c 				     struct gasket_page_table_entry *ptes,
ptes              602 drivers/staging/gasket/gasket_page_table.c 		if (is_simple_mapping || ptes[i].status == PTE_INUSE) {
ptes              611 drivers/staging/gasket/gasket_page_table.c 		if (ptes[i].status == PTE_INUSE) {
ptes              612 drivers/staging/gasket/gasket_page_table.c 			if (ptes[i].page && ptes[i].dma_addr) {
ptes              613 drivers/staging/gasket/gasket_page_table.c 				dma_unmap_page(pg_tbl->device, ptes[i].dma_addr,
ptes              616 drivers/staging/gasket/gasket_page_table.c 			if (gasket_release_page(ptes[i].page))
ptes              621 drivers/staging/gasket/gasket_page_table.c 		memset(&ptes[i], 0, sizeof(struct gasket_page_table_entry));
ptes              683 drivers/xen/xenbus/xenbus_client.c 	pte_t *ptes[XENBUS_MAX_RING_GRANTS];
ptes              698 drivers/xen/xenbus/xenbus_client.c 	area = alloc_vm_area(XEN_PAGE_SIZE * nr_grefs, ptes);
ptes              705 drivers/xen/xenbus/xenbus_client.c 		phys_addrs[i] = arbitrary_virt_to_machine(ptes[i]).maddr;
ptes              303 include/linux/swap.h 	pte_t *ptes;
ptes              305 include/linux/swap.h 	pte_t ptes[SWAP_RA_PTE_CACHE_SIZE];
ptes              193 include/linux/vmalloc.h extern struct vm_struct *alloc_vm_area(size_t size, pte_t **ptes);
ptes              373 mm/nommu.c     struct vm_struct *alloc_vm_area(size_t size, pte_t **ptes)
ptes              699 mm/swap_state.c 	ra_info->ptes = pte;
ptes              701 mm/swap_state.c 	tpte = ra_info->ptes;
ptes              739 mm/swap_state.c 	for (i = 0, pte = ra_info.ptes; i < ra_info.nr_pte;
ptes             3102 mm/vmalloc.c   struct vm_struct *alloc_vm_area(size_t size, pte_t **ptes)
ptes             3116 mm/vmalloc.c   				size, f, ptes ? &ptes : NULL)) {