Lines Matching refs:vb

819 	struct vmap_block *vb;  in new_vmap_block()  local
827 vb = kmalloc_node(sizeof(struct vmap_block), in new_vmap_block()
829 if (unlikely(!vb)) in new_vmap_block()
836 kfree(vb); in new_vmap_block()
842 kfree(vb); in new_vmap_block()
848 spin_lock_init(&vb->lock); in new_vmap_block()
849 vb->va = va; in new_vmap_block()
852 vb->free = VMAP_BBMAP_BITS - (1UL << order); in new_vmap_block()
853 vb->dirty = 0; in new_vmap_block()
854 vb->dirty_min = VMAP_BBMAP_BITS; in new_vmap_block()
855 vb->dirty_max = 0; in new_vmap_block()
856 INIT_LIST_HEAD(&vb->free_list); in new_vmap_block()
860 err = radix_tree_insert(&vmap_block_tree, vb_idx, vb); in new_vmap_block()
867 list_add_tail_rcu(&vb->free_list, &vbq->free); in new_vmap_block()
874 static void free_vmap_block(struct vmap_block *vb) in free_vmap_block() argument
879 vb_idx = addr_to_vb_idx(vb->va->va_start); in free_vmap_block()
883 BUG_ON(tmp != vb); in free_vmap_block()
885 free_vmap_area_noflush(vb->va); in free_vmap_block()
886 kfree_rcu(vb, rcu_head); in free_vmap_block()
892 struct vmap_block *vb; in purge_fragmented_blocks() local
897 list_for_each_entry_rcu(vb, &vbq->free, free_list) { in purge_fragmented_blocks()
899 if (!(vb->free + vb->dirty == VMAP_BBMAP_BITS && vb->dirty != VMAP_BBMAP_BITS)) in purge_fragmented_blocks()
902 spin_lock(&vb->lock); in purge_fragmented_blocks()
903 if (vb->free + vb->dirty == VMAP_BBMAP_BITS && vb->dirty != VMAP_BBMAP_BITS) { in purge_fragmented_blocks()
904 vb->free = 0; /* prevent further allocs after releasing lock */ in purge_fragmented_blocks()
905 vb->dirty = VMAP_BBMAP_BITS; /* prevent purging it again */ in purge_fragmented_blocks()
906 vb->dirty_min = 0; in purge_fragmented_blocks()
907 vb->dirty_max = VMAP_BBMAP_BITS; in purge_fragmented_blocks()
909 list_del_rcu(&vb->free_list); in purge_fragmented_blocks()
911 spin_unlock(&vb->lock); in purge_fragmented_blocks()
912 list_add_tail(&vb->purge, &purge); in purge_fragmented_blocks()
914 spin_unlock(&vb->lock); in purge_fragmented_blocks()
918 list_for_each_entry_safe(vb, n_vb, &purge, purge) { in purge_fragmented_blocks()
919 list_del(&vb->purge); in purge_fragmented_blocks()
920 free_vmap_block(vb); in purge_fragmented_blocks()
935 struct vmap_block *vb; in vb_alloc() local
953 list_for_each_entry_rcu(vb, &vbq->free, free_list) { in vb_alloc()
956 spin_lock(&vb->lock); in vb_alloc()
957 if (vb->free < (1UL << order)) { in vb_alloc()
958 spin_unlock(&vb->lock); in vb_alloc()
962 pages_off = VMAP_BBMAP_BITS - vb->free; in vb_alloc()
963 vaddr = vmap_block_vaddr(vb->va->va_start, pages_off); in vb_alloc()
964 vb->free -= 1UL << order; in vb_alloc()
965 if (vb->free == 0) { in vb_alloc()
967 list_del_rcu(&vb->free_list); in vb_alloc()
971 spin_unlock(&vb->lock); in vb_alloc()
990 struct vmap_block *vb; in vb_free() local
1004 vb = radix_tree_lookup(&vmap_block_tree, vb_idx); in vb_free()
1006 BUG_ON(!vb); in vb_free()
1010 spin_lock(&vb->lock); in vb_free()
1013 vb->dirty_min = min(vb->dirty_min, offset); in vb_free()
1014 vb->dirty_max = max(vb->dirty_max, offset + (1UL << order)); in vb_free()
1016 vb->dirty += 1UL << order; in vb_free()
1017 if (vb->dirty == VMAP_BBMAP_BITS) { in vb_free()
1018 BUG_ON(vb->free); in vb_free()
1019 spin_unlock(&vb->lock); in vb_free()
1020 free_vmap_block(vb); in vb_free()
1022 spin_unlock(&vb->lock); in vb_free()
1049 struct vmap_block *vb; in vm_unmap_aliases() local
1052 list_for_each_entry_rcu(vb, &vbq->free, free_list) { in vm_unmap_aliases()
1053 spin_lock(&vb->lock); in vm_unmap_aliases()
1054 if (vb->dirty) { in vm_unmap_aliases()
1055 unsigned long va_start = vb->va->va_start; in vm_unmap_aliases()
1058 s = va_start + (vb->dirty_min << PAGE_SHIFT); in vm_unmap_aliases()
1059 e = va_start + (vb->dirty_max << PAGE_SHIFT); in vm_unmap_aliases()
1066 spin_unlock(&vb->lock); in vm_unmap_aliases()