last_offset       230 drivers/android/binder_alloc_selftest.c 	size_t last_offset, offset = 0;
last_offset       233 drivers/android/binder_alloc_selftest.c 		last_offset = offset;
last_offset       235 drivers/android/binder_alloc_selftest.c 		front_sizes[i] = offset - last_offset;
last_offset       276 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 	u32 i, last_offset = 0;
last_offset       304 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 				(off < last_offset)) {
last_offset       320 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 		last_offset = off;
last_offset       305 drivers/gpu/drm/msm/msm_gem_submit.c 	uint32_t i, last_offset = 0;
last_offset       352 drivers/gpu/drm/msm/msm_gem_submit.c 				(off < last_offset)) {
last_offset       374 drivers/gpu/drm/msm/msm_gem_submit.c 		last_offset = off;
last_offset      3034 drivers/md/dm-integrity.c 		sector_t last_sector, last_area, last_offset;
last_offset      3043 drivers/md/dm-integrity.c 		get_area_and_offset(ic, ic->provided_data_sectors - 1, &last_area, &last_offset);
last_offset      3044 drivers/md/dm-integrity.c 		last_sector = get_data_sector(ic, last_area, last_offset);
last_offset       415 drivers/net/ethernet/hisilicon/hns/hns_enet.c 	int last_offset;
last_offset       428 drivers/net/ethernet/hisilicon/hns/hns_enet.c 		last_offset = hnae_page_size(ring) - hnae_buf_size(ring);
last_offset       454 drivers/net/ethernet/hisilicon/hns/hns_enet.c 	if (desc_cb->page_offset <= last_offset) {
last_offset      3812 drivers/net/ethernet/intel/i40e/i40e_ethtool.c 	u16 last_offset = 0;
last_offset      3864 drivers/net/ethernet/intel/i40e/i40e_ethtool.c 		last_offset = list_prev_entry(entry, list)->src_offset + 1;
last_offset      3866 drivers/net/ethernet/intel/i40e/i40e_ethtool.c 	for (; i < 3; i++, last_offset++) {
last_offset      3871 drivers/net/ethernet/intel/i40e/i40e_ethtool.c 						     last_offset));
last_offset       561 drivers/net/ethernet/intel/ice/ice_txrx.c 	unsigned int last_offset = PAGE_SIZE - ICE_RXBUF_2048;
last_offset       575 drivers/net/ethernet/intel/ice/ice_txrx.c 	if (rx_buf->page_offset > last_offset)
last_offset       554 drivers/net/ethernet/mellanox/mlx4/qp.c 	u32 last_offset;
last_offset       607 drivers/net/ethernet/mellanox/mlx4/qp.c 	last_offset = dev->caps.reserved_qps_cnt[MLX4_QP_REGION_FW];
last_offset       631 drivers/net/ethernet/mellanox/mlx4/qp.c 		if (((last_offset & MLX4_BF_QP_SKIP_MASK) &&
last_offset       632 drivers/net/ethernet/mellanox/mlx4/qp.c 		     ((int)(max_table_offset - last_offset)) >=
last_offset       634 drivers/net/ethernet/mellanox/mlx4/qp.c 		    (!(last_offset & MLX4_BF_QP_SKIP_MASK) &&
last_offset       635 drivers/net/ethernet/mellanox/mlx4/qp.c 		     !((last_offset + requested_size - 1) &
last_offset       640 drivers/net/ethernet/mellanox/mlx4/qp.c 				(last_offset | MLX4_BF_QP_SKIP_MASK | bf_mask) + 1;
last_offset       642 drivers/net/ethernet/mellanox/mlx4/qp.c 			if (last_offset & MLX4_BF_QP_SKIP_MASK)
last_offset       643 drivers/net/ethernet/mellanox/mlx4/qp.c 				last_offset = candidate_offset;
last_offset       647 drivers/net/ethernet/mellanox/mlx4/qp.c 			if (last_offset > max_table_offset) {
last_offset       651 drivers/net/ethernet/mellanox/mlx4/qp.c 				size = min3(max_table_offset - last_offset,
last_offset       652 drivers/net/ethernet/mellanox/mlx4/qp.c 					    bf_mask - (last_offset & bf_mask),
last_offset       659 drivers/net/ethernet/mellanox/mlx4/qp.c 						bf_mask - (last_offset & bf_mask),
last_offset       666 drivers/net/ethernet/mellanox/mlx4/qp.c 						last_offset = candidate_offset;
last_offset       688 drivers/net/ethernet/mellanox/mlx4/qp.c 			last_offset = offset + size;
last_offset      1016 drivers/net/wireless/mediatek/mt7601u/phy.c __mt7601u_phy_freq_cal(struct mt7601u_dev *dev, s8 last_offset, u8 phy_mode)
last_offset      1020 drivers/net/wireless/mediatek/mt7601u/phy.c 	trace_freq_cal_offset(dev, phy_mode, last_offset);
last_offset      1023 drivers/net/wireless/mediatek/mt7601u/phy.c 	if (last_offset == MT_FREQ_OFFSET_INVALID)
last_offset      1045 drivers/net/wireless/mediatek/mt7601u/phy.c 	if (abs(last_offset) >= activate_threshold)
last_offset      1047 drivers/net/wireless/mediatek/mt7601u/phy.c 	else if (abs(last_offset) <= deactivate_threshold)
last_offset      1053 drivers/net/wireless/mediatek/mt7601u/phy.c 	if (last_offset > deactivate_threshold) {
last_offset      1058 drivers/net/wireless/mediatek/mt7601u/phy.c 	} else if (last_offset < -deactivate_threshold) {
last_offset      1077 drivers/net/wireless/mediatek/mt7601u/phy.c 	s8 last_offset;
last_offset      1082 drivers/net/wireless/mediatek/mt7601u/phy.c 	last_offset = dev->bcn_freq_off;
last_offset      1086 drivers/net/wireless/mediatek/mt7601u/phy.c 	delay = __mt7601u_phy_freq_cal(dev, last_offset, phy_mode);
last_offset       770 drivers/soc/ti/knav_qmss_queue.c 	unsigned last_offset;
last_offset       825 drivers/soc/ti/knav_qmss_queue.c 	last_offset = 0;
last_offset       829 drivers/soc/ti/knav_qmss_queue.c 		if ((pi->region_offset - last_offset) >= num_desc) {
last_offset       833 drivers/soc/ti/knav_qmss_queue.c 		last_offset = pi->region_offset + pi->num_desc;
last_offset       840 drivers/soc/ti/knav_qmss_queue.c 		pool->region_offset = last_offset;
last_offset       168 drivers/staging/media/meson/vdec/esparser.c 	if (offset < sess->last_offset)
last_offset       171 drivers/staging/media/meson/vdec/esparser.c 	sess->last_offset = offset;
last_offset       314 drivers/staging/media/meson/vdec/vdec.c 	sess->last_offset = 0;
last_offset       257 drivers/staging/media/meson/vdec/vdec.h 	u32 last_offset;
last_offset       434 fs/btrfs/compression.c 	u64 last_offset;
last_offset       446 fs/btrfs/compression.c 	last_offset = bio_end_offset(cb->orig_bio);
last_offset       455 fs/btrfs/compression.c 	while (last_offset < compressed_end) {
last_offset       456 fs/btrfs/compression.c 		pg_index = last_offset >> PAGE_SHIFT;
last_offset       479 fs/btrfs/compression.c 		end = last_offset + PAGE_SIZE - 1;
last_offset       486 fs/btrfs/compression.c 		lock_extent(tree, last_offset, end);
last_offset       488 fs/btrfs/compression.c 		em = lookup_extent_mapping(em_tree, last_offset,
last_offset       492 fs/btrfs/compression.c 		if (!em || last_offset < em->start ||
last_offset       493 fs/btrfs/compression.c 		    (last_offset + PAGE_SIZE > extent_map_end(em)) ||
last_offset       496 fs/btrfs/compression.c 			unlock_extent(tree, last_offset, end);
last_offset       524 fs/btrfs/compression.c 			unlock_extent(tree, last_offset, end);
last_offset       530 fs/btrfs/compression.c 		last_offset += PAGE_SIZE;
last_offset        51 fs/btrfs/file.c 	u64 last_offset;
last_offset       107 fs/btrfs/file.c 			if (defrag->last_offset > entry->last_offset)
last_offset       108 fs/btrfs/file.c 				entry->last_offset = defrag->last_offset;
last_offset       310 fs/btrfs/file.c 	range.start = defrag->last_offset;
last_offset       322 fs/btrfs/file.c 		defrag->last_offset = range.start;
last_offset       324 fs/btrfs/file.c 	} else if (defrag->last_offset && !defrag->cycled) {
last_offset       330 fs/btrfs/file.c 		defrag->last_offset = 0;
last_offset      8323 fs/btrfs/inode.c 	u64 last_offset;
last_offset      8331 fs/btrfs/inode.c 		last_offset = ordered_offset;
last_offset      8344 fs/btrfs/inode.c 		if (ordered_offset == last_offset)
last_offset      2670 fs/btrfs/scrub.c 	u64 last_offset;
last_offset      2675 fs/btrfs/scrub.c 	last_offset = (physical - map->stripes[num].physical) * data_stripes;
last_offset      2677 fs/btrfs/scrub.c 		*stripe_start = last_offset;
last_offset      2679 fs/btrfs/scrub.c 	*offset = last_offset;
last_offset      2681 fs/btrfs/scrub.c 		*offset = last_offset + i * map->stripe_len;
last_offset      2696 fs/btrfs/scrub.c 	*offset = last_offset + j * map->stripe_len;
last_offset      3537 fs/btrfs/tree-log.c 				       u64 first_offset, u64 last_offset)
last_offset      3555 fs/btrfs/tree-log.c 	btrfs_set_dir_log_end(path->nodes[0], item, last_offset);
last_offset      3581 fs/btrfs/tree-log.c 	u64 last_offset = (u64)-1;
last_offset      3715 fs/btrfs/tree-log.c 				last_offset = (u64)-1;
last_offset      3722 fs/btrfs/tree-log.c 			last_offset = (u64)-1;
last_offset      3732 fs/btrfs/tree-log.c 				last_offset = tmp.offset;
last_offset      3741 fs/btrfs/tree-log.c 		*last_offset_ret = last_offset;
last_offset      3747 fs/btrfs/tree-log.c 					 ino, first_offset, last_offset);
last_offset      2667 fs/f2fs/node.c 	int i, idx, last_offset, nrpages;
last_offset      2670 fs/f2fs/node.c 	last_offset = sbi->blocks_per_seg;
last_offset      2674 fs/f2fs/node.c 	for (i = 0; i < last_offset; i += nrpages, addr += nrpages) {
last_offset      2675 fs/f2fs/node.c 		nrpages = min(last_offset - i, BIO_MAX_PAGES);
last_offset      2568 fs/reiserfs/inode.c 		unsigned last_offset;
last_offset      2570 fs/reiserfs/inode.c 		last_offset = inode->i_size & (PAGE_SIZE - 1);
last_offset      2572 fs/reiserfs/inode.c 		if (page->index >= end_index + 1 || !last_offset) {
last_offset      2576 fs/reiserfs/inode.c 		zero_user_segment(page, last_offset, PAGE_SIZE);
last_offset      2062 kernel/bpf/btf.c 	u32 meta_needed, last_offset;
last_offset      2085 kernel/bpf/btf.c 	last_offset = 0;
last_offset      2118 kernel/bpf/btf.c 		if (last_offset > offset) {
last_offset      2131 kernel/bpf/btf.c 		last_offset = offset;