Lines Matching refs:PAGE_SHIFT
335 # define mapped_space_bits (3*(PAGE_SHIFT - pte_bits) + PAGE_SHIFT) in ia64_mmu_init()
343 # define vmlpt_bits (impl_va_bits - PAGE_SHIFT + pte_bits) in ia64_mmu_init()
356 if ((mapped_space_bits - PAGE_SHIFT > vmlpt_bits - pte_bits) || in ia64_mmu_init()
447 map_start = vmem_map + (__pa(start) >> PAGE_SHIFT); in create_mem_map_page_table()
448 map_end = vmem_map + (__pa(end) >> PAGE_SHIFT); in create_mem_map_page_table()
469 set_pte(pte, pfn_pte(__pa(alloc_bootmem_pages_node(NODE_DATA(node), PAGE_SIZE)) >> PAGE_SHIFT, in create_mem_map_page_table()
489 map_start = vmem_map + (__pa(start) >> PAGE_SHIFT); in virtual_memmap_init()
490 map_end = vmem_map + (__pa(end) >> PAGE_SHIFT); in virtual_memmap_init()
582 pfn_start = (PAGE_ALIGN(__pa(start))) >> PAGE_SHIFT; in find_max_min_low_pfn()
583 pfn_end = (PAGE_ALIGN(__pa(end - 1))) >> PAGE_SHIFT; in find_max_min_low_pfn()
585 pfn_start = GRANULEROUNDDOWN(__pa(start)) >> PAGE_SHIFT; in find_max_min_low_pfn()
586 pfn_end = GRANULEROUNDUP(__pa(end - 1)) >> PAGE_SHIFT; in find_max_min_low_pfn()
659 unsigned long start_pfn = start >> PAGE_SHIFT; in arch_add_memory()
660 unsigned long nr_pages = size >> PAGE_SHIFT; in arch_add_memory()
679 unsigned long start_pfn = start >> PAGE_SHIFT; in arch_remove_memory()
680 unsigned long nr_pages = size >> PAGE_SHIFT; in arch_remove_memory()