ih               2305 arch/powerpc/kernel/prom_init.c static int __init prom_set_color(ihandle ih, int i, int r, int g, int b)
ih               2307 arch/powerpc/kernel/prom_init.c 	return call_prom("call-method", 6, 1, ADDR("color!"), ih, i, b, g, r);
ih               2322 arch/powerpc/kernel/prom_init.c 	ihandle ih;
ih               2365 arch/powerpc/kernel/prom_init.c 		ih = call_prom("open", 1, 1, path);
ih               2366 arch/powerpc/kernel/prom_init.c 		if (ih == 0) {
ih               2379 arch/powerpc/kernel/prom_init.c 			if (prom_set_color(ih, i, clut[0], clut[1],
ih               2386 arch/powerpc/kernel/prom_init.c 			if (prom_set_color(ih, i + 32, clut[0], clut[1],
ih                103 crypto/vmac.c  #define ADD128(rh, rl, ih, il)						\
ih                109 crypto/vmac.c  		(rh) += (ih);						\
ih                484 drivers/cpufreq/pmac64-cpufreq.c 	u64 max_freq, min_freq, ih, il;
ih                580 drivers/cpufreq/pmac64-cpufreq.c 	ih = *((u32 *)(eeprom + 0x10));
ih                584 drivers/cpufreq/pmac64-cpufreq.c 	if (il == ih) {
ih                591 drivers/cpufreq/pmac64-cpufreq.c 	if (ih != 0 && il != 0)
ih                592 drivers/cpufreq/pmac64-cpufreq.c 		min_freq = (max_freq * il) / ih;
ih                396 drivers/crypto/cavium/nitrox/nitrox_req.h 	union pkt_instr_hdr ih;
ih                445 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	sr->instr.ih.value = 0;
ih                446 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	sr->instr.ih.s.g = 1;
ih                447 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	sr->instr.ih.s.gsz = sr->in.sgmap_cnt;
ih                448 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	sr->instr.ih.s.ssz = sr->out.sgmap_cnt;
ih                449 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	sr->instr.ih.s.fsz = FDATA_SIZE + sizeof(struct gphdr);
ih                450 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	sr->instr.ih.s.tlen = sr->instr.ih.s.fsz + sr->in.total_bytes;
ih                451 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	sr->instr.ih.value = cpu_to_be64(sr->instr.ih.value);
ih               2624 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c 	atomic_set(&adev->irq.ih.lock, 0);
ih                 56 drivers/gpu/drm/amd/amdgpu/amdgpu_doorbell.h 	uint32_t ih;
ih                 41 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c int amdgpu_ih_ring_init(struct amdgpu_device *adev, struct amdgpu_ih_ring *ih,
ih                 50 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 	ih->ring_size = ring_size;
ih                 51 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 	ih->ptr_mask = ih->ring_size - 1;
ih                 52 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 	ih->rptr = 0;
ih                 53 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 	ih->use_bus_addr = use_bus_addr;
ih                 58 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		if (ih->ring)
ih                 64 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		ih->ring = dma_alloc_coherent(adev->dev, ih->ring_size + 8,
ih                 66 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		if (ih->ring == NULL)
ih                 69 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		memset((void *)ih->ring, 0, ih->ring_size + 8);
ih                 70 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		ih->gpu_addr = dma_addr;
ih                 71 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		ih->wptr_addr = dma_addr + ih->ring_size;
ih                 72 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		ih->wptr_cpu = &ih->ring[ih->ring_size / 4];
ih                 73 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		ih->rptr_addr = dma_addr + ih->ring_size + 4;
ih                 74 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		ih->rptr_cpu = &ih->ring[(ih->ring_size / 4) + 1];
ih                 88 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		r = amdgpu_bo_create_kernel(adev, ih->ring_size, PAGE_SIZE,
ih                 90 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 					    &ih->ring_obj, &ih->gpu_addr,
ih                 91 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 					    (void **)&ih->ring);
ih                 98 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		ih->wptr_addr = adev->wb.gpu_addr + wptr_offs * 4;
ih                 99 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		ih->wptr_cpu = &adev->wb.wb[wptr_offs];
ih                100 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		ih->rptr_addr = adev->wb.gpu_addr + rptr_offs * 4;
ih                101 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		ih->rptr_cpu = &adev->wb.wb[rptr_offs];
ih                115 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c void amdgpu_ih_ring_fini(struct amdgpu_device *adev, struct amdgpu_ih_ring *ih)
ih                117 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 	if (ih->use_bus_addr) {
ih                118 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		if (!ih->ring)
ih                124 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		dma_free_coherent(adev->dev, ih->ring_size + 8,
ih                125 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 				  (void *)ih->ring, ih->gpu_addr);
ih                126 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		ih->ring = NULL;
ih                128 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		amdgpu_bo_free_kernel(&ih->ring_obj, &ih->gpu_addr,
ih                129 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 				      (void **)&ih->ring);
ih                130 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		amdgpu_device_wb_free(adev, (ih->wptr_addr - ih->gpu_addr) / 4);
ih                131 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		amdgpu_device_wb_free(adev, (ih->rptr_addr - ih->gpu_addr) / 4);
ih                144 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c int amdgpu_ih_process(struct amdgpu_device *adev, struct amdgpu_ih_ring *ih)
ih                149 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 	if (!ih->enabled || adev->shutdown)
ih                152 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 	wptr = amdgpu_ih_get_wptr(adev, ih);
ih                156 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 	if (atomic_xchg(&ih->lock, 1))
ih                159 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 	DRM_DEBUG("%s: rptr %d, wptr %d\n", __func__, ih->rptr, wptr);
ih                164 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 	while (ih->rptr != wptr && --count) {
ih                165 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		amdgpu_irq_dispatch(adev, ih);
ih                166 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 		ih->rptr &= ih->ptr_mask;
ih                169 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 	amdgpu_ih_set_rptr(adev, ih);
ih                170 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 	atomic_set(&ih->lock, 0);
ih                173 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 	wptr = amdgpu_ih_get_wptr(adev, ih);
ih                174 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c 	if (wptr != ih->rptr)
ih                 61 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.h 	u32 (*get_wptr)(struct amdgpu_device *adev, struct amdgpu_ih_ring *ih);
ih                 62 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.h 	void (*decode_iv)(struct amdgpu_device *adev, struct amdgpu_ih_ring *ih,
ih                 64 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.h 	void (*set_rptr)(struct amdgpu_device *adev, struct amdgpu_ih_ring *ih);
ih                 67 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.h #define amdgpu_ih_get_wptr(adev, ih) (adev)->irq.ih_funcs->get_wptr((adev), (ih))
ih                 69 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.h 	(adev)->irq.ih_funcs->decode_iv((adev), (ih), (iv))
ih                 70 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.h #define amdgpu_ih_set_rptr(adev, ih) (adev)->irq.ih_funcs->set_rptr((adev), (ih))
ih                 72 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.h int amdgpu_ih_ring_init(struct amdgpu_device *adev, struct amdgpu_ih_ring *ih,
ih                 74 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.h void amdgpu_ih_ring_fini(struct amdgpu_device *adev, struct amdgpu_ih_ring *ih);
ih                 75 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.h int amdgpu_ih_process(struct amdgpu_device *adev, struct amdgpu_ih_ring *ih);
ih                153 drivers/gpu/drm/amd/amdgpu/amdgpu_irq.c 	ret = amdgpu_ih_process(adev, &adev->irq.ih);
ih                377 drivers/gpu/drm/amd/amdgpu/amdgpu_irq.c 			 struct amdgpu_ih_ring *ih)
ih                379 drivers/gpu/drm/amd/amdgpu/amdgpu_irq.c 	u32 ring_index = ih->rptr >> 2;
ih                386 drivers/gpu/drm/amd/amdgpu/amdgpu_irq.c 	entry.iv_entry = (const uint32_t *)&ih->ring[ring_index];
ih                389 drivers/gpu/drm/amd/amdgpu/amdgpu_irq.c 	trace_amdgpu_iv(ih - &adev->irq.ih, &entry);
ih                 91 drivers/gpu/drm/amd/amdgpu/amdgpu_irq.h 	struct amdgpu_ih_ring		ih, ih1, ih2;
ih                111 drivers/gpu/drm/amd/amdgpu/amdgpu_irq.h 			 struct amdgpu_ih_ring *ih);
ih                 53 drivers/gpu/drm/amd/amdgpu/amdgpu_test.c 	if (adev->irq.ih.ring_obj)
ih                 54 drivers/gpu/drm/amd/amdgpu/amdgpu_test.c 		n -= adev->irq.ih.ring_size;
ih                 77 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 	    TP_PROTO(unsigned ih, struct amdgpu_iv_entry *iv),
ih                 78 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 	    TP_ARGS(ih, iv),
ih                 80 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			     __field(unsigned, ih)
ih                 92 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			   __entry->ih = ih;
ih                108 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 		      __entry->ih, __entry->client_id, __entry->src_id,
ih                 69 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	adev->irq.ih.enabled = true;
ih                 91 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	adev->irq.ih.enabled = false;
ih                 92 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	adev->irq.ih.rptr = 0;
ih                108 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	struct amdgpu_ih_ring *ih = &adev->irq.ih;
ih                126 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	WREG32(mmIH_RB_BASE, adev->irq.ih.gpu_addr >> 8);
ih                127 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4);
ih                136 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	WREG32(mmIH_RB_WPTR_ADDR_LO, lower_32_bits(ih->wptr_addr));
ih                137 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	WREG32(mmIH_RB_WPTR_ADDR_HI, upper_32_bits(ih->wptr_addr) & 0xFF);
ih                188 drivers/gpu/drm/amd/amdgpu/cik_ih.c 			   struct amdgpu_ih_ring *ih)
ih                192 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	wptr = le32_to_cpu(*ih->wptr_cpu);
ih                201 drivers/gpu/drm/amd/amdgpu/cik_ih.c 			 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask);
ih                202 drivers/gpu/drm/amd/amdgpu/cik_ih.c 		ih->rptr = (wptr + 16) & ih->ptr_mask;
ih                207 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	return (wptr & ih->ptr_mask);
ih                242 drivers/gpu/drm/amd/amdgpu/cik_ih.c 			     struct amdgpu_ih_ring *ih,
ih                246 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	u32 ring_index = ih->rptr >> 2;
ih                249 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	dw[0] = le32_to_cpu(ih->ring[ring_index + 0]);
ih                250 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	dw[1] = le32_to_cpu(ih->ring[ring_index + 1]);
ih                251 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	dw[2] = le32_to_cpu(ih->ring[ring_index + 2]);
ih                252 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	dw[3] = le32_to_cpu(ih->ring[ring_index + 3]);
ih                262 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	ih->rptr += 16;
ih                273 drivers/gpu/drm/amd/amdgpu/cik_ih.c 			    struct amdgpu_ih_ring *ih)
ih                275 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	WREG32(mmIH_RB_RPTR, ih->rptr);
ih                297 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	r = amdgpu_ih_ring_init(adev, &adev->irq.ih, 64 * 1024, false);
ih                311 drivers/gpu/drm/amd/amdgpu/cik_ih.c 	amdgpu_ih_ring_fini(adev, &adev->irq.ih);
ih                 69 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	adev->irq.ih.enabled = true;
ih                 91 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	adev->irq.ih.enabled = false;
ih                 92 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	adev->irq.ih.rptr = 0;
ih                108 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	struct amdgpu_ih_ring *ih = &adev->irq.ih;
ih                127 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	WREG32(mmIH_RB_BASE, adev->irq.ih.gpu_addr >> 8);
ih                129 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4);
ih                138 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	WREG32(mmIH_RB_WPTR_ADDR_LO, lower_32_bits(ih->wptr_addr));
ih                139 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	WREG32(mmIH_RB_WPTR_ADDR_HI, upper_32_bits(ih->wptr_addr) & 0xFF);
ih                190 drivers/gpu/drm/amd/amdgpu/cz_ih.c 			  struct amdgpu_ih_ring *ih)
ih                194 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	wptr = le32_to_cpu(*ih->wptr_cpu);
ih                203 drivers/gpu/drm/amd/amdgpu/cz_ih.c 			wptr, ih->rptr, (wptr + 16) & ih->ptr_mask);
ih                204 drivers/gpu/drm/amd/amdgpu/cz_ih.c 		ih->rptr = (wptr + 16) & ih->ptr_mask;
ih                209 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	return (wptr & ih->ptr_mask);
ih                221 drivers/gpu/drm/amd/amdgpu/cz_ih.c 			    struct amdgpu_ih_ring *ih,
ih                225 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	u32 ring_index = ih->rptr >> 2;
ih                228 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	dw[0] = le32_to_cpu(ih->ring[ring_index + 0]);
ih                229 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	dw[1] = le32_to_cpu(ih->ring[ring_index + 1]);
ih                230 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	dw[2] = le32_to_cpu(ih->ring[ring_index + 2]);
ih                231 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	dw[3] = le32_to_cpu(ih->ring[ring_index + 3]);
ih                241 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	ih->rptr += 16;
ih                252 drivers/gpu/drm/amd/amdgpu/cz_ih.c 			   struct amdgpu_ih_ring *ih)
ih                254 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	WREG32(mmIH_RB_RPTR, ih->rptr);
ih                276 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	r = amdgpu_ih_ring_init(adev, &adev->irq.ih, 64 * 1024, false);
ih                290 drivers/gpu/drm/amd/amdgpu/cz_ih.c 	amdgpu_ih_ring_fini(adev, &adev->irq.ih);
ih                 69 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	adev->irq.ih.enabled = true;
ih                 91 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	adev->irq.ih.enabled = false;
ih                 92 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	adev->irq.ih.rptr = 0;
ih                108 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	struct amdgpu_ih_ring *ih = &adev->irq.ih;
ih                127 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	WREG32(mmIH_RB_BASE, adev->irq.ih.gpu_addr >> 8);
ih                129 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4);
ih                138 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	WREG32(mmIH_RB_WPTR_ADDR_LO, lower_32_bits(ih->wptr_addr));
ih                139 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	WREG32(mmIH_RB_WPTR_ADDR_HI, upper_32_bits(ih->wptr_addr) & 0xFF);
ih                190 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 			       struct amdgpu_ih_ring *ih)
ih                194 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	wptr = le32_to_cpu(*ih->wptr_cpu);
ih                203 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 			 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask);
ih                204 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 		ih->rptr = (wptr + 16) & ih->ptr_mask;
ih                209 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	return (wptr & ih->ptr_mask);
ih                221 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 				 struct amdgpu_ih_ring *ih,
ih                225 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	u32 ring_index = ih->rptr >> 2;
ih                228 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	dw[0] = le32_to_cpu(ih->ring[ring_index + 0]);
ih                229 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	dw[1] = le32_to_cpu(ih->ring[ring_index + 1]);
ih                230 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	dw[2] = le32_to_cpu(ih->ring[ring_index + 2]);
ih                231 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	dw[3] = le32_to_cpu(ih->ring[ring_index + 3]);
ih                241 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	ih->rptr += 16;
ih                252 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 				struct amdgpu_ih_ring *ih)
ih                254 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	WREG32(mmIH_RB_RPTR, ih->rptr);
ih                276 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	r = amdgpu_ih_ring_init(adev, &adev->irq.ih, 64 * 1024, false);
ih                290 drivers/gpu/drm/amd/amdgpu/iceland_ih.c 	amdgpu_ih_ring_fini(adev, &adev->irq.ih);
ih                 52 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	adev->irq.ih.enabled = true;
ih                 72 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	adev->irq.ih.enabled = false;
ih                 73 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	adev->irq.ih.rptr = 0;
ih                 76 drivers/gpu/drm/amd/amdgpu/navi10_ih.c static uint32_t navi10_ih_rb_cntl(struct amdgpu_ih_ring *ih, uint32_t ih_rb_cntl)
ih                 78 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	int rb_bufsz = order_base_2(ih->ring_size / 4);
ih                 81 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 				   MC_SPACE, ih->use_bus_addr ? 1 : 4);
ih                112 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	struct amdgpu_ih_ring *ih = &adev->irq.ih;
ih                123 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	WREG32_SOC15(OSSSYS, 0, mmIH_RB_BASE, ih->gpu_addr >> 8);
ih                124 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	WREG32_SOC15(OSSSYS, 0, mmIH_RB_BASE_HI, (ih->gpu_addr >> 40) & 0xff);
ih                127 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	ih_rb_cntl = navi10_ih_rb_cntl(ih, ih_rb_cntl);
ih                132 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 		if (ih->use_bus_addr) {
ih                144 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 		     lower_32_bits(ih->wptr_addr));
ih                146 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 		     upper_32_bits(ih->wptr_addr) & 0xFFFF);
ih                153 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	if (ih->use_doorbell) {
ih                156 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 						 ih->doorbell_index);
ih                165 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	adev->nbio_funcs->ih_doorbell_range(adev, ih->use_doorbell,
ih                166 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 					    ih->doorbell_index);
ih                211 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 			      struct amdgpu_ih_ring *ih)
ih                215 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	wptr = le32_to_cpu(*ih->wptr_cpu);
ih                230 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	tmp = (wptr + 32) & ih->ptr_mask;
ih                233 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 		 wptr, ih->rptr, tmp);
ih                234 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	ih->rptr = tmp;
ih                241 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	return (wptr & ih->ptr_mask);
ih                253 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 				struct amdgpu_ih_ring *ih,
ih                257 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	u32 ring_index = ih->rptr >> 2;
ih                260 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	dw[0] = le32_to_cpu(ih->ring[ring_index + 0]);
ih                261 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	dw[1] = le32_to_cpu(ih->ring[ring_index + 1]);
ih                262 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	dw[2] = le32_to_cpu(ih->ring[ring_index + 2]);
ih                263 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	dw[3] = le32_to_cpu(ih->ring[ring_index + 3]);
ih                264 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	dw[4] = le32_to_cpu(ih->ring[ring_index + 4]);
ih                265 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	dw[5] = le32_to_cpu(ih->ring[ring_index + 5]);
ih                266 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	dw[6] = le32_to_cpu(ih->ring[ring_index + 6]);
ih                267 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	dw[7] = le32_to_cpu(ih->ring[ring_index + 7]);
ih                284 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	ih->rptr += 32;
ih                295 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 			       struct amdgpu_ih_ring *ih)
ih                297 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	if (ih->use_doorbell) {
ih                299 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 		*ih->rptr_cpu = ih->rptr;
ih                300 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 		WDOORBELL32(ih->doorbell_index, ih->rptr);
ih                302 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 		WREG32_SOC15(OSSSYS, 0, mmIH_RB_RPTR, ih->rptr);
ih                324 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	r = amdgpu_ih_ring_init(adev, &adev->irq.ih, 256 * 1024, use_bus_addr);
ih                328 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	adev->irq.ih.use_doorbell = true;
ih                329 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	adev->irq.ih.doorbell_index = adev->doorbell_index.ih << 1;
ih                341 drivers/gpu/drm/amd/amdgpu/navi10_ih.c 	amdgpu_ih_ring_fini(adev, &adev->irq.ih);
ih                552 drivers/gpu/drm/amd/amdgpu/nv.c 	adev->doorbell_index.ih = AMDGPU_NAVI10_DOORBELL_IH;
ih                 42 drivers/gpu/drm/amd/amdgpu/si_ih.c 	adev->irq.ih.enabled = true;
ih                 56 drivers/gpu/drm/amd/amdgpu/si_ih.c 	adev->irq.ih.enabled = false;
ih                 57 drivers/gpu/drm/amd/amdgpu/si_ih.c 	adev->irq.ih.rptr = 0;
ih                 62 drivers/gpu/drm/amd/amdgpu/si_ih.c 	struct amdgpu_ih_ring *ih = &adev->irq.ih;
ih                 74 drivers/gpu/drm/amd/amdgpu/si_ih.c 	WREG32(IH_RB_BASE, adev->irq.ih.gpu_addr >> 8);
ih                 75 drivers/gpu/drm/amd/amdgpu/si_ih.c 	rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4);
ih                 82 drivers/gpu/drm/amd/amdgpu/si_ih.c 	WREG32(IH_RB_WPTR_ADDR_LO, lower_32_bits(ih->wptr_addr));
ih                 83 drivers/gpu/drm/amd/amdgpu/si_ih.c 	WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(ih->wptr_addr) & 0xFF);
ih                106 drivers/gpu/drm/amd/amdgpu/si_ih.c 			  struct amdgpu_ih_ring *ih)
ih                110 drivers/gpu/drm/amd/amdgpu/si_ih.c 	wptr = le32_to_cpu(*ih->wptr_cpu);
ih                115 drivers/gpu/drm/amd/amdgpu/si_ih.c 			wptr, ih->rptr, (wptr + 16) & ih->ptr_mask);
ih                116 drivers/gpu/drm/amd/amdgpu/si_ih.c 		ih->rptr = (wptr + 16) & ih->ptr_mask;
ih                121 drivers/gpu/drm/amd/amdgpu/si_ih.c 	return (wptr & ih->ptr_mask);
ih                125 drivers/gpu/drm/amd/amdgpu/si_ih.c 			    struct amdgpu_ih_ring *ih,
ih                128 drivers/gpu/drm/amd/amdgpu/si_ih.c 	u32 ring_index = ih->rptr >> 2;
ih                131 drivers/gpu/drm/amd/amdgpu/si_ih.c 	dw[0] = le32_to_cpu(ih->ring[ring_index + 0]);
ih                132 drivers/gpu/drm/amd/amdgpu/si_ih.c 	dw[1] = le32_to_cpu(ih->ring[ring_index + 1]);
ih                133 drivers/gpu/drm/amd/amdgpu/si_ih.c 	dw[2] = le32_to_cpu(ih->ring[ring_index + 2]);
ih                134 drivers/gpu/drm/amd/amdgpu/si_ih.c 	dw[3] = le32_to_cpu(ih->ring[ring_index + 3]);
ih                142 drivers/gpu/drm/amd/amdgpu/si_ih.c 	ih->rptr += 16;
ih                146 drivers/gpu/drm/amd/amdgpu/si_ih.c 			   struct amdgpu_ih_ring *ih)
ih                148 drivers/gpu/drm/amd/amdgpu/si_ih.c 	WREG32(IH_RB_RPTR, ih->rptr);
ih                165 drivers/gpu/drm/amd/amdgpu/si_ih.c 	r = amdgpu_ih_ring_init(adev, &adev->irq.ih, 64 * 1024, false);
ih                177 drivers/gpu/drm/amd/amdgpu/si_ih.c 	amdgpu_ih_ring_fini(adev, &adev->irq.ih);
ih               1270 drivers/gpu/drm/amd/amdgpu/soc15.c 		adev->nbio_funcs->ih_doorbell_range(adev, adev->irq.ih.use_doorbell,
ih               1271 drivers/gpu/drm/amd/amdgpu/soc15.c 						adev->irq.ih.doorbell_index);
ih                 67 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	adev->irq.ih.enabled = true;
ih                 87 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	adev->irq.ih.enabled = false;
ih                 88 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	adev->irq.ih.rptr = 0;
ih                105 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	struct amdgpu_ih_ring *ih = &adev->irq.ih;
ih                123 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	WREG32(mmIH_RB_BASE, ih->gpu_addr >> 8);
ih                125 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4);
ih                138 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	WREG32(mmIH_RB_WPTR_ADDR_LO, lower_32_bits(ih->wptr_addr));
ih                139 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	WREG32(mmIH_RB_WPTR_ADDR_HI, upper_32_bits(ih->wptr_addr) & 0xFF);
ih                146 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	if (adev->irq.ih.use_doorbell) {
ih                148 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 						 OFFSET, adev->irq.ih.doorbell_index);
ih                192 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 			     struct amdgpu_ih_ring *ih)
ih                196 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	wptr = le32_to_cpu(*ih->wptr_cpu);
ih                205 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 			 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask);
ih                206 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 		ih->rptr = (wptr + 16) & ih->ptr_mask;
ih                211 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	return (wptr & ih->ptr_mask);
ih                223 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 			       struct amdgpu_ih_ring *ih,
ih                227 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	u32 ring_index = ih->rptr >> 2;
ih                230 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	dw[0] = le32_to_cpu(ih->ring[ring_index + 0]);
ih                231 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	dw[1] = le32_to_cpu(ih->ring[ring_index + 1]);
ih                232 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	dw[2] = le32_to_cpu(ih->ring[ring_index + 2]);
ih                233 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	dw[3] = le32_to_cpu(ih->ring[ring_index + 3]);
ih                243 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	ih->rptr += 16;
ih                254 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 			      struct amdgpu_ih_ring *ih)
ih                256 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	if (ih->use_doorbell) {
ih                258 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 		*ih->rptr_cpu = ih->rptr;
ih                259 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 		WDOORBELL32(ih->doorbell_index, ih->rptr);
ih                261 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 		WREG32(mmIH_RB_RPTR, ih->rptr);
ih                284 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	r = amdgpu_ih_ring_init(adev, &adev->irq.ih, 64 * 1024, true);
ih                288 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	adev->irq.ih.use_doorbell = true;
ih                289 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	adev->irq.ih.doorbell_index = adev->doorbell_index.ih;
ih                301 drivers/gpu/drm/amd/amdgpu/tonga_ih.c 	amdgpu_ih_ring_fini(adev, &adev->irq.ih);
ih                 61 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	adev->irq.ih.enabled = true;
ih                121 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	adev->irq.ih.enabled = false;
ih                122 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	adev->irq.ih.rptr = 0;
ih                166 drivers/gpu/drm/amd/amdgpu/vega10_ih.c static uint32_t vega10_ih_rb_cntl(struct amdgpu_ih_ring *ih, uint32_t ih_rb_cntl)
ih                168 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	int rb_bufsz = order_base_2(ih->ring_size / 4);
ih                171 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 				   MC_SPACE, ih->use_bus_addr ? 1 : 4);
ih                189 drivers/gpu/drm/amd/amdgpu/vega10_ih.c static uint32_t vega10_ih_doorbell_rptr(struct amdgpu_ih_ring *ih)
ih                193 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	if (ih->use_doorbell) {
ih                196 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 						 ih->doorbell_index);
ih                221 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	struct amdgpu_ih_ring *ih;
ih                231 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	ih = &adev->irq.ih;
ih                233 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	WREG32_SOC15(OSSSYS, 0, mmIH_RB_BASE, ih->gpu_addr >> 8);
ih                234 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	WREG32_SOC15(OSSSYS, 0, mmIH_RB_BASE_HI, (ih->gpu_addr >> 40) & 0xff);
ih                238 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	ih_rb_cntl = vega10_ih_rb_cntl(ih, ih_rb_cntl);
ih                239 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	if (adev->irq.ih.use_bus_addr) {
ih                263 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 		     lower_32_bits(ih->wptr_addr));
ih                265 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 		     upper_32_bits(ih->wptr_addr) & 0xFFFF);
ih                272 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 		     vega10_ih_doorbell_rptr(ih));
ih                274 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	ih = &adev->irq.ih1;
ih                275 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	if (ih->ring_size) {
ih                276 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 		WREG32_SOC15(OSSSYS, 0, mmIH_RB_BASE_RING1, ih->gpu_addr >> 8);
ih                278 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 			     (ih->gpu_addr >> 40) & 0xff);
ih                281 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 		ih_rb_cntl = vega10_ih_rb_cntl(ih, ih_rb_cntl);
ih                301 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 			     vega10_ih_doorbell_rptr(ih));
ih                304 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	ih = &adev->irq.ih2;
ih                305 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	if (ih->ring_size) {
ih                306 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 		WREG32_SOC15(OSSSYS, 0, mmIH_RB_BASE_RING2, ih->gpu_addr >> 8);
ih                308 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 			     (ih->gpu_addr >> 40) & 0xff);
ih                311 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 		ih_rb_cntl = vega10_ih_rb_cntl(ih, ih_rb_cntl);
ih                328 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 			     vega10_ih_doorbell_rptr(ih));
ih                374 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 			      struct amdgpu_ih_ring *ih)
ih                378 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	wptr = le32_to_cpu(*ih->wptr_cpu);
ih                385 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	if (ih == &adev->irq.ih)
ih                387 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	else if (ih == &adev->irq.ih1)
ih                389 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	else if (ih == &adev->irq.ih2)
ih                404 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	tmp = (wptr + 32) & ih->ptr_mask;
ih                407 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 		 wptr, ih->rptr, tmp);
ih                408 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	ih->rptr = tmp;
ih                410 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	if (ih == &adev->irq.ih)
ih                412 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	else if (ih == &adev->irq.ih1)
ih                414 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	else if (ih == &adev->irq.ih2)
ih                424 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	return (wptr & ih->ptr_mask);
ih                436 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 				struct amdgpu_ih_ring *ih,
ih                440 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	u32 ring_index = ih->rptr >> 2;
ih                443 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	dw[0] = le32_to_cpu(ih->ring[ring_index + 0]);
ih                444 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	dw[1] = le32_to_cpu(ih->ring[ring_index + 1]);
ih                445 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	dw[2] = le32_to_cpu(ih->ring[ring_index + 2]);
ih                446 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	dw[3] = le32_to_cpu(ih->ring[ring_index + 3]);
ih                447 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	dw[4] = le32_to_cpu(ih->ring[ring_index + 4]);
ih                448 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	dw[5] = le32_to_cpu(ih->ring[ring_index + 5]);
ih                449 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	dw[6] = le32_to_cpu(ih->ring[ring_index + 6]);
ih                450 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	dw[7] = le32_to_cpu(ih->ring[ring_index + 7]);
ih                467 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	ih->rptr += 32;
ih                477 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 			       struct amdgpu_ih_ring *ih)
ih                483 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	if (ih == &adev->irq.ih)
ih                485 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	else if (ih == &adev->irq.ih1)
ih                487 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	else if (ih == &adev->irq.ih2)
ih                495 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 		if ((v < ih->ring_size) && (v != ih->rptr))
ih                496 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 			WDOORBELL32(ih->doorbell_index, ih->rptr);
ih                510 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 			       struct amdgpu_ih_ring *ih)
ih                512 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	if (ih->use_doorbell) {
ih                514 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 		*ih->rptr_cpu = ih->rptr;
ih                515 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 		WDOORBELL32(ih->doorbell_index, ih->rptr);
ih                518 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 			vega10_ih_irq_rearm(adev, ih);
ih                519 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	} else if (ih == &adev->irq.ih) {
ih                520 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 		WREG32_SOC15(OSSSYS, 0, mmIH_RB_RPTR, ih->rptr);
ih                521 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	} else if (ih == &adev->irq.ih1) {
ih                522 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 		WREG32_SOC15(OSSSYS, 0, mmIH_RB_RPTR_RING1, ih->rptr);
ih                523 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	} else if (ih == &adev->irq.ih2) {
ih                524 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 		WREG32_SOC15(OSSSYS, 0, mmIH_RB_RPTR_RING2, ih->rptr);
ih                586 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	r = amdgpu_ih_ring_init(adev, &adev->irq.ih, 256 * 1024, true);
ih                590 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	adev->irq.ih.use_doorbell = true;
ih                591 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	adev->irq.ih.doorbell_index = adev->doorbell_index.ih << 1;
ih                598 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	adev->irq.ih1.doorbell_index = (adev->doorbell_index.ih + 1) << 1;
ih                605 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	adev->irq.ih2.doorbell_index = (adev->doorbell_index.ih + 2) << 1;
ih                619 drivers/gpu/drm/amd/amdgpu/vega10_ih.c 	amdgpu_ih_ring_fini(adev, &adev->irq.ih);
ih                 75 drivers/gpu/drm/amd/amdgpu/vega10_reg_init.c 	adev->doorbell_index.ih = AMDGPU_DOORBELL64_IH;
ih                 81 drivers/gpu/drm/amd/amdgpu/vega20_reg_init.c 	adev->doorbell_index.ih = AMDGPU_VEGA20_DOORBELL_IH;
ih               1817 drivers/gpu/drm/amd/amdgpu/vi.c 	adev->doorbell_index.ih = AMDGPU_DOORBELL_IH;
ih                 98 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_irq.c 				     void (*ih)(void *),
ih                102 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_irq.c 	hcd->handler = ih;
ih                144 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_irq.c 					    void *ih,
ih                173 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_irq.c 		if (ih == handler) {
ih                193 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_irq.c 		ih, int_params->irq_source, int_params->int_context);
ih                200 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_irq.c 				 void (*ih)(void *))
ih                202 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_irq.c 	if (NULL == int_params || NULL == ih) {
ih                263 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_irq.c 				       void (*ih)(void *),
ih                271 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_irq.c 	if (false == validate_irq_registration_params(int_params, ih))
ih                280 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_irq.c 	init_handler_common_data(handler_data, ih, handler_args, &adev->dm);
ih                328 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_irq.c 					void *ih)
ih                334 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_irq.c 	if (false == validate_irq_unregistration_params(irq_source, ih))
ih                345 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_irq.c 		handler_list = remove_irq_handler(adev, ih, &int_params);
ih                356 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_irq.c 			ih, irq_source);
ih                 68 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_irq.h 				       void (*ih)(void *),
ih                 46 drivers/gpu/drm/amd/display/dc/dm_services.h 	interrupt_handler ih,
ih               6836 drivers/gpu/drm/radeon/cik.c 	rdev->ih.enabled = true;
ih               6858 drivers/gpu/drm/radeon/cik.c 	rdev->ih.enabled = false;
ih               6859 drivers/gpu/drm/radeon/cik.c 	rdev->ih.rptr = 0;
ih               6983 drivers/gpu/drm/radeon/cik.c 	WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8);
ih               6984 drivers/gpu/drm/radeon/cik.c 	rb_bufsz = order_base_2(rdev->ih.ring_size / 4);
ih               7045 drivers/gpu/drm/radeon/cik.c 	if (!rdev->ih.enabled) {
ih               7512 drivers/gpu/drm/radeon/cik.c 			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
ih               7513 drivers/gpu/drm/radeon/cik.c 		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
ih               7518 drivers/gpu/drm/radeon/cik.c 	return (wptr & rdev->ih.ptr_mask);
ih               7568 drivers/gpu/drm/radeon/cik.c 	if (!rdev->ih.enabled || rdev->shutdown)
ih               7575 drivers/gpu/drm/radeon/cik.c 	if (atomic_xchg(&rdev->ih.lock, 1))
ih               7578 drivers/gpu/drm/radeon/cik.c 	rptr = rdev->ih.rptr;
ih               7591 drivers/gpu/drm/radeon/cik.c 		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
ih               7592 drivers/gpu/drm/radeon/cik.c 		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
ih               7593 drivers/gpu/drm/radeon/cik.c 		ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff;
ih               8101 drivers/gpu/drm/radeon/cik.c 		rptr &= rdev->ih.ptr_mask;
ih               8114 drivers/gpu/drm/radeon/cik.c 	rdev->ih.rptr = rptr;
ih               8115 drivers/gpu/drm/radeon/cik.c 	atomic_set(&rdev->ih.lock, 0);
ih               8672 drivers/gpu/drm/radeon/cik.c 	rdev->ih.ring_obj = NULL;
ih               4505 drivers/gpu/drm/radeon/evergreen.c 	if (!rdev->ih.enabled) {
ih               4692 drivers/gpu/drm/radeon/evergreen.c 			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
ih               4693 drivers/gpu/drm/radeon/evergreen.c 		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
ih               4698 drivers/gpu/drm/radeon/evergreen.c 	return (wptr & rdev->ih.ptr_mask);
ih               4718 drivers/gpu/drm/radeon/evergreen.c 	if (!rdev->ih.enabled || rdev->shutdown)
ih               4725 drivers/gpu/drm/radeon/evergreen.c 	if (atomic_xchg(&rdev->ih.lock, 1))
ih               4728 drivers/gpu/drm/radeon/evergreen.c 	rptr = rdev->ih.rptr;
ih               4740 drivers/gpu/drm/radeon/evergreen.c 		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
ih               4741 drivers/gpu/drm/radeon/evergreen.c 		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
ih               4910 drivers/gpu/drm/radeon/evergreen.c 		rptr &= rdev->ih.ptr_mask;
ih               4921 drivers/gpu/drm/radeon/evergreen.c 	rdev->ih.rptr = rptr;
ih               4922 drivers/gpu/drm/radeon/evergreen.c 	atomic_set(&rdev->ih.lock, 0);
ih               5263 drivers/gpu/drm/radeon/evergreen.c 	rdev->ih.ring_obj = NULL;
ih               2438 drivers/gpu/drm/radeon/ni.c 	rdev->ih.ring_obj = NULL;
ih               3318 drivers/gpu/drm/radeon/r600.c 	rdev->ih.ring_obj = NULL;
ih               3476 drivers/gpu/drm/radeon/r600.c 	rdev->ih.ring_size = ring_size;
ih               3477 drivers/gpu/drm/radeon/r600.c 	rdev->ih.ptr_mask = rdev->ih.ring_size - 1;
ih               3478 drivers/gpu/drm/radeon/r600.c 	rdev->ih.rptr = 0;
ih               3486 drivers/gpu/drm/radeon/r600.c 	if (rdev->ih.ring_obj == NULL) {
ih               3487 drivers/gpu/drm/radeon/r600.c 		r = radeon_bo_create(rdev, rdev->ih.ring_size,
ih               3490 drivers/gpu/drm/radeon/r600.c 				     NULL, NULL, &rdev->ih.ring_obj);
ih               3495 drivers/gpu/drm/radeon/r600.c 		r = radeon_bo_reserve(rdev->ih.ring_obj, false);
ih               3498 drivers/gpu/drm/radeon/r600.c 		r = radeon_bo_pin(rdev->ih.ring_obj,
ih               3500 drivers/gpu/drm/radeon/r600.c 				  &rdev->ih.gpu_addr);
ih               3502 drivers/gpu/drm/radeon/r600.c 			radeon_bo_unreserve(rdev->ih.ring_obj);
ih               3506 drivers/gpu/drm/radeon/r600.c 		r = radeon_bo_kmap(rdev->ih.ring_obj,
ih               3507 drivers/gpu/drm/radeon/r600.c 				   (void **)&rdev->ih.ring);
ih               3508 drivers/gpu/drm/radeon/r600.c 		radeon_bo_unreserve(rdev->ih.ring_obj);
ih               3520 drivers/gpu/drm/radeon/r600.c 	if (rdev->ih.ring_obj) {
ih               3521 drivers/gpu/drm/radeon/r600.c 		r = radeon_bo_reserve(rdev->ih.ring_obj, false);
ih               3523 drivers/gpu/drm/radeon/r600.c 			radeon_bo_kunmap(rdev->ih.ring_obj);
ih               3524 drivers/gpu/drm/radeon/r600.c 			radeon_bo_unpin(rdev->ih.ring_obj);
ih               3525 drivers/gpu/drm/radeon/r600.c 			radeon_bo_unreserve(rdev->ih.ring_obj);
ih               3527 drivers/gpu/drm/radeon/r600.c 		radeon_bo_unref(&rdev->ih.ring_obj);
ih               3528 drivers/gpu/drm/radeon/r600.c 		rdev->ih.ring = NULL;
ih               3529 drivers/gpu/drm/radeon/r600.c 		rdev->ih.ring_obj = NULL;
ih               3602 drivers/gpu/drm/radeon/r600.c 	rdev->ih.enabled = true;
ih               3617 drivers/gpu/drm/radeon/r600.c 	rdev->ih.enabled = false;
ih               3618 drivers/gpu/drm/radeon/r600.c 	rdev->ih.rptr = 0;
ih               3710 drivers/gpu/drm/radeon/r600.c 	WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8);
ih               3711 drivers/gpu/drm/radeon/r600.c 	rb_bufsz = order_base_2(rdev->ih.ring_size / 4);
ih               3779 drivers/gpu/drm/radeon/r600.c 	if (!rdev->ih.enabled) {
ih               4056 drivers/gpu/drm/radeon/r600.c 			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
ih               4057 drivers/gpu/drm/radeon/r600.c 		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
ih               4062 drivers/gpu/drm/radeon/r600.c 	return (wptr & rdev->ih.ptr_mask);
ih               4105 drivers/gpu/drm/radeon/r600.c 	if (!rdev->ih.enabled || rdev->shutdown)
ih               4116 drivers/gpu/drm/radeon/r600.c 	if (atomic_xchg(&rdev->ih.lock, 1))
ih               4119 drivers/gpu/drm/radeon/r600.c 	rptr = rdev->ih.rptr;
ih               4131 drivers/gpu/drm/radeon/r600.c 		src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
ih               4132 drivers/gpu/drm/radeon/r600.c 		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
ih               4324 drivers/gpu/drm/radeon/r600.c 		rptr &= rdev->ih.ptr_mask;
ih               4333 drivers/gpu/drm/radeon/r600.c 	rdev->ih.rptr = rptr;
ih               4334 drivers/gpu/drm/radeon/r600.c 	atomic_set(&rdev->ih.lock, 0);
ih               2408 drivers/gpu/drm/radeon/radeon.h 	struct r600_ih ih; /* r6/700 interrupt ring */
ih               1320 drivers/gpu/drm/radeon/radeon_device.c 	atomic_set(&rdev->ih.lock, 0);
ih               1979 drivers/gpu/drm/radeon/rv770.c 	rdev->ih.ring_obj = NULL;
ih               5929 drivers/gpu/drm/radeon/si.c 	rdev->ih.enabled = true;
ih               5944 drivers/gpu/drm/radeon/si.c 	rdev->ih.enabled = false;
ih               5945 drivers/gpu/drm/radeon/si.c 	rdev->ih.rptr = 0;
ih               6011 drivers/gpu/drm/radeon/si.c 	WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8);
ih               6012 drivers/gpu/drm/radeon/si.c 	rb_bufsz = order_base_2(rdev->ih.ring_size / 4);
ih               6064 drivers/gpu/drm/radeon/si.c 	if (!rdev->ih.enabled) {
ih               6226 drivers/gpu/drm/radeon/si.c 			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
ih               6227 drivers/gpu/drm/radeon/si.c 		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
ih               6232 drivers/gpu/drm/radeon/si.c 	return (wptr & rdev->ih.ptr_mask);
ih               6260 drivers/gpu/drm/radeon/si.c 	if (!rdev->ih.enabled || rdev->shutdown)
ih               6267 drivers/gpu/drm/radeon/si.c 	if (atomic_xchg(&rdev->ih.lock, 1))
ih               6270 drivers/gpu/drm/radeon/si.c 	rptr = rdev->ih.rptr;
ih               6282 drivers/gpu/drm/radeon/si.c 		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
ih               6283 drivers/gpu/drm/radeon/si.c 		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
ih               6284 drivers/gpu/drm/radeon/si.c 		ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff;
ih               6436 drivers/gpu/drm/radeon/si.c 		rptr &= rdev->ih.ptr_mask;
ih               6445 drivers/gpu/drm/radeon/si.c 	rdev->ih.rptr = rptr;
ih               6446 drivers/gpu/drm/radeon/si.c 	atomic_set(&rdev->ih.lock, 0);
ih               6913 drivers/gpu/drm/radeon/si.c 	rdev->ih.ring_obj = NULL;
ih               1340 drivers/iommu/dmar.c 	int ih = 0;
ih               1350 drivers/iommu/dmar.c 	desc.qw1 = QI_IOTLB_ADDR(addr) | QI_IOTLB_IH(ih)
ih               1489 drivers/iommu/intel-iommu.c 				  int ih, int map)
ih               1497 drivers/iommu/intel-iommu.c 	if (ih)
ih               1498 drivers/iommu/intel-iommu.c 		ih = 1 << 6;
ih               1509 drivers/iommu/intel-iommu.c 		iommu->flush.flush_iotlb(iommu, did, addr | ih, mask,
ih                103 drivers/iommu/intel-svm.c 				unsigned long address, unsigned long pages, int ih)
ih                121 drivers/iommu/intel-svm.c 				QI_EIOTLB_IH(ih) |
ih                155 drivers/iommu/intel-svm.c 				unsigned long pages, int ih)
ih                161 drivers/iommu/intel-svm.c 		intel_flush_svm_range_dev(svm, sdev, address, pages, ih);
ih                190 drivers/net/ethernet/cavium/liquidio/octeon_iq.h 	u64 ih;
ih               1516 drivers/net/ethernet/packetengines/hamachi.c 				struct iphdr *ih = (struct iphdr *) skb->data;
ih               1521 drivers/net/ethernet/packetengines/hamachi.c 				if (ntohs(ih->tot_len) >= 46){
ih               1523 drivers/net/ethernet/packetengines/hamachi.c 					if (!(ih->frag_off & cpu_to_be16(IP_MF|IP_OFFSET))) {
ih                477 drivers/net/ethernet/sgi/ioc3-eth.c 	struct iphdr *ih;
ih                497 drivers/net/ethernet/sgi/ioc3-eth.c 	ih = (struct iphdr *)((char *)eh + ETH_HLEN);
ih                498 drivers/net/ethernet/sgi/ioc3-eth.c 	if (ip_is_fragment(ih))
ih                501 drivers/net/ethernet/sgi/ioc3-eth.c 	proto = ih->protocol;
ih                507 drivers/net/ethernet/sgi/ioc3-eth.c 	       (ih->tot_len - (ih->ihl << 2)) +
ih                508 drivers/net/ethernet/sgi/ioc3-eth.c 	       htons((u16)ih->protocol) +
ih                509 drivers/net/ethernet/sgi/ioc3-eth.c 	       (ih->saddr >> 16) + (ih->saddr & 0xffff) +
ih                510 drivers/net/ethernet/sgi/ioc3-eth.c 	       (ih->daddr >> 16) + (ih->daddr & 0xffff);
ih               1384 drivers/net/ethernet/sgi/ioc3-eth.c 		const struct iphdr *ih = ip_hdr(skb);
ih               1385 drivers/net/ethernet/sgi/ioc3-eth.c 		const int proto = ntohs(ih->protocol);
ih               1401 drivers/net/ethernet/sgi/ioc3-eth.c 		csum = csum_tcpudp_nofold(ih->saddr, ih->daddr,
ih               1402 drivers/net/ethernet/sgi/ioc3-eth.c 					  ih->tot_len - (ih->ihl << 2),
ih               1408 drivers/net/ethernet/sgi/ioc3-eth.c 		csoff = ETH_HLEN + (ih->ihl << 2);
ih                 53 fs/coda/upcall.c         inp->ih.opcode = opcode;
ih                 54 fs/coda/upcall.c 	inp->ih.pid = task_pid_nr_ns(current, &init_pid_ns);
ih                 55 fs/coda/upcall.c 	inp->ih.pgid = task_pgrp_nr_ns(current, &init_pid_ns);
ih                 56 fs/coda/upcall.c 	inp->ih.uid = from_kuid(&init_user_ns, current_fsuid());
ih                170 fs/coda/upcall.c 	inp->ih.uid = from_kuid(&init_user_ns, uid);
ih                733 fs/coda/upcall.c 	buffer->ih.unique = ++vcp->vc_seq;
ih                739 fs/coda/upcall.c 	req->uc_opcode = buffer->ih.opcode;
ih                740 fs/coda/upcall.c 	req->uc_unique = buffer->ih.unique;
ih                800 fs/coda/upcall.c 	sig_inputArgs->ih.opcode = CODA_SIGNAL;
ih                801 fs/coda/upcall.c 	sig_inputArgs->ih.unique = req->uc_unique;
ih                804 fs/coda/upcall.c 	sig_req->uc_opcode = sig_inputArgs->ih.opcode;
ih                805 fs/coda/upcall.c 	sig_req->uc_unique = sig_inputArgs->ih.unique;
ih                 44 fs/erofs/xattr.c 	struct erofs_xattr_ibody_header *ih;
ih                102 fs/erofs/xattr.c 	ih = (struct erofs_xattr_ibody_header *)(it.kaddr + it.ofs);
ih                104 fs/erofs/xattr.c 	vi->xattr_shared_count = ih->h_shared_count;
ih               1026 fs/fuse/dev.c  	struct fuse_in_header ih;
ih               1028 fs/fuse/dev.c  	unsigned reqsize = sizeof(ih) + sizeof(arg);
ih               1032 fs/fuse/dev.c  	memset(&ih, 0, sizeof(ih));
ih               1034 fs/fuse/dev.c  	ih.len = reqsize;
ih               1035 fs/fuse/dev.c  	ih.opcode = FUSE_INTERRUPT;
ih               1036 fs/fuse/dev.c  	ih.unique = (req->in.h.unique | FUSE_INT_REQ_BIT);
ih               1043 fs/fuse/dev.c  	err = fuse_copy_one(cs, &ih, sizeof(ih));
ih               1084 fs/fuse/dev.c  	struct fuse_in_header ih = {
ih               1088 fs/fuse/dev.c  		.len = sizeof(ih) + sizeof(arg),
ih               1093 fs/fuse/dev.c  	if (nbytes < ih.len)
ih               1096 fs/fuse/dev.c  	err = fuse_copy_one(cs, &ih, sizeof(ih));
ih               1104 fs/fuse/dev.c  	return ih.len;
ih               1116 fs/fuse/dev.c  	struct fuse_in_header ih = {
ih               1119 fs/fuse/dev.c  		.len = sizeof(ih) + sizeof(arg),
ih               1122 fs/fuse/dev.c  	if (nbytes < ih.len) {
ih               1127 fs/fuse/dev.c  	max_forgets = (nbytes - ih.len) / sizeof(struct fuse_forget_one);
ih               1132 fs/fuse/dev.c  	ih.len += count * sizeof(struct fuse_forget_one);
ih               1133 fs/fuse/dev.c  	err = fuse_copy_one(cs, &ih, sizeof(ih));
ih               1153 fs/fuse/dev.c  	return ih.len;
ih                 52 fs/fuse/virtio_fs.c 	struct fuse_in_header ih;
ih                732 fs/fuse/virtio_fs.c 	forget->ih = (struct fuse_in_header){
ih               3585 fs/jfs/jfs_dtree.c 	struct idtentry *ih;
ih               3607 fs/jfs/jfs_dtree.c 	ih = (struct idtentry *) & p->slot[si];
ih               3608 fs/jfs/jfs_dtree.c 	si = ih->next;
ih               3609 fs/jfs/jfs_dtree.c 	name = ih->name;
ih               3610 fs/jfs/jfs_dtree.c 	namlen = ih->namlen;
ih               3663 fs/jfs/jfs_dtree.c 	struct idtentry *ih;
ih               3703 fs/jfs/jfs_dtree.c 		ih = (struct idtentry *) & p->slot[si];
ih               3704 fs/jfs/jfs_dtree.c 		si = ih->next;
ih               3705 fs/jfs/jfs_dtree.c 		name = ih->name;
ih               3706 fs/jfs/jfs_dtree.c 		namlen = ih->namlen;
ih               3835 fs/jfs/jfs_dtree.c 	struct idtentry *ih;
ih               3854 fs/jfs/jfs_dtree.c 		ih = (struct idtentry *) & p->slot[si];
ih               3855 fs/jfs/jfs_dtree.c 		si = ih->next;
ih               3856 fs/jfs/jfs_dtree.c 		namlen = ih->namlen;
ih               3857 fs/jfs/jfs_dtree.c 		name = ih->name;
ih               3898 fs/jfs/jfs_dtree.c 	struct idtentry *ih = NULL;
ih               3943 fs/jfs/jfs_dtree.c 		ih = (struct idtentry *) h;
ih               3944 fs/jfs/jfs_dtree.c 		ih->next = h->next;
ih               3945 fs/jfs/jfs_dtree.c 		xd = (pxd_t *) ih;
ih               3947 fs/jfs/jfs_dtree.c 		ih->namlen = klen;
ih               3948 fs/jfs/jfs_dtree.c 		name = ih->name;
ih               4006 fs/jfs/jfs_dtree.c 			ih->next = -1;
ih                858 fs/reiserfs/bitmap.c 	struct item_head *ih;
ih                873 fs/reiserfs/bitmap.c 	ih = tp_item_head(path);
ih                883 fs/reiserfs/bitmap.c 	if (!hint->formatted_node && is_indirect_le_ih(ih)) {
ih                884 fs/reiserfs/bitmap.c 		if (pos_in_item == I_UNFM_NUM(ih))
ih                 70 fs/reiserfs/dir.c 	struct item_head *ih, tmp_ih;
ih                111 fs/reiserfs/dir.c 		ih = de.de_ih;
ih                112 fs/reiserfs/dir.c 		store_ih(&tmp_ih, ih);
ih                115 fs/reiserfs/dir.c 		RFALSE(COMP_SHORT_KEYS(&ih->ih_key, &pos_key),
ih                117 fs/reiserfs/dir.c 		       ih, &pos_key);
ih                126 fs/reiserfs/dir.c 		RFALSE(ih_entry_count(ih) < entry_num,
ih                128 fs/reiserfs/dir.c 		       entry_num, ih_entry_count(ih));
ih                135 fs/reiserfs/dir.c 		    || entry_num < ih_entry_count(ih)) {
ih                137 fs/reiserfs/dir.c 			    B_I_DEH(bh, ih) + entry_num;
ih                139 fs/reiserfs/dir.c 			for (; entry_num < ih_entry_count(ih);
ih                149 fs/reiserfs/dir.c 				d_reclen = entry_length(bh, ih, entry_num);
ih                150 fs/reiserfs/dir.c 				d_name = B_I_DEH_ENTRY_FILE_NAME(bh, ih, deh);
ih                 83 fs/reiserfs/do_balan.c 	struct item_head *ih = item_head(tbS0, item_pos);
ih                 86 fs/reiserfs/do_balan.c 	RFALSE(ih_item_len(ih) + IH_SIZE != -tb->insert_size[0],
ih                 88 fs/reiserfs/do_balan.c 	       -tb->insert_size[0], ih);
ih                113 fs/reiserfs/do_balan.c 	struct item_head *ih = item_head(tbS0, item_pos);
ih                118 fs/reiserfs/do_balan.c 	if (is_direntry_le_ih(ih)) {
ih                140 fs/reiserfs/do_balan.c 		RFALSE(!ih_item_len(ih),
ih                287 fs/reiserfs/do_balan.c 					     struct item_head *const ih,
ih                302 fs/reiserfs/do_balan.c 		new_item_len = ih_item_len(ih) - tb->lbytes;
ih                305 fs/reiserfs/do_balan.c 		put_ih_item_len(ih, ih_item_len(ih) - new_item_len);
ih                307 fs/reiserfs/do_balan.c 		RFALSE(ih_item_len(ih) <= 0,
ih                309 fs/reiserfs/do_balan.c 		       "ih_item_len=%d", ih_item_len(ih));
ih                313 fs/reiserfs/do_balan.c 		leaf_insert_into_buf(&bi, n + tb->item_pos - ret, ih, body,
ih                314 fs/reiserfs/do_balan.c 			     min_t(int, tb->zeroes_num, ih_item_len(ih)));
ih                321 fs/reiserfs/do_balan.c 		if (is_indirect_le_ih(ih))
ih                324 fs/reiserfs/do_balan.c 		add_le_ih_k_offset(ih, tb->lbytes << shift);
ih                326 fs/reiserfs/do_balan.c 		put_ih_item_len(ih, new_item_len);
ih                333 fs/reiserfs/do_balan.c 		RFALSE(ih_item_len(ih) <= 0,
ih                335 fs/reiserfs/do_balan.c 		       "ih_item_len=%d", ih_item_len(ih));
ih                343 fs/reiserfs/do_balan.c 		leaf_insert_into_buf(&bi, n + tb->item_pos - ret, ih, body,
ih                352 fs/reiserfs/do_balan.c 						 struct item_head * const ih,
ih                414 fs/reiserfs/do_balan.c 						  struct item_head * const ih,
ih                423 fs/reiserfs/do_balan.c 		balance_leaf_paste_left_shift_dirent(tb, ih, body);
ih                533 fs/reiserfs/do_balan.c 					  struct item_head * const ih,
ih                587 fs/reiserfs/do_balan.c 					    struct item_head * const ih,
ih                592 fs/reiserfs/do_balan.c 		return balance_leaf_paste_left_shift(tb, ih, body);
ih                594 fs/reiserfs/do_balan.c 		balance_leaf_paste_left_whole(tb, ih, body);
ih                600 fs/reiserfs/do_balan.c 				      struct item_head * const ih,
ih                611 fs/reiserfs/do_balan.c 			return balance_leaf_insert_left(tb, ih, body);
ih                613 fs/reiserfs/do_balan.c 			return balance_leaf_paste_left(tb, ih, body);
ih                622 fs/reiserfs/do_balan.c 				      struct item_head * const ih,
ih                648 fs/reiserfs/do_balan.c 		old_key_comp = le_ih_k_offset(ih);
ih                649 fs/reiserfs/do_balan.c 		old_len = ih_item_len(ih);
ih                656 fs/reiserfs/do_balan.c 		if (is_indirect_le_ih(ih))
ih                658 fs/reiserfs/do_balan.c 		offset = le_ih_k_offset(ih) + ((old_len - tb->rbytes) << shift);
ih                659 fs/reiserfs/do_balan.c 		set_le_ih_k_offset(ih, offset);
ih                660 fs/reiserfs/do_balan.c 		put_ih_item_len(ih, tb->rbytes);
ih                674 fs/reiserfs/do_balan.c 		leaf_insert_into_buf(&bi, 0, ih, r_body, r_zeroes_number);
ih                683 fs/reiserfs/do_balan.c 		set_le_ih_k_offset(ih, old_key_comp);
ih                684 fs/reiserfs/do_balan.c 		put_ih_item_len(ih, old_len - tb->rbytes);
ih                697 fs/reiserfs/do_balan.c 				     ih, body, tb->zeroes_num);
ih                708 fs/reiserfs/do_balan.c 				     struct item_head * const ih,
ih                759 fs/reiserfs/do_balan.c 				     struct item_head * const ih,
ih                770 fs/reiserfs/do_balan.c 		balance_leaf_paste_right_shift_dirent(tb, ih, body);
ih                837 fs/reiserfs/do_balan.c 				     struct item_head * const ih,
ih                881 fs/reiserfs/do_balan.c 				     struct item_head * const ih,
ih                897 fs/reiserfs/do_balan.c 		balance_leaf_paste_right_shift(tb, ih, body);
ih                900 fs/reiserfs/do_balan.c 		balance_leaf_paste_right_whole(tb, ih, body);
ih                905 fs/reiserfs/do_balan.c 			       struct item_head * const ih,
ih                914 fs/reiserfs/do_balan.c 		balance_leaf_insert_right(tb, ih, body);
ih                916 fs/reiserfs/do_balan.c 		balance_leaf_paste_right(tb, ih, body);
ih                920 fs/reiserfs/do_balan.c 					  struct item_head * const ih,
ih                950 fs/reiserfs/do_balan.c 		old_key_comp = le_ih_k_offset(ih);
ih                951 fs/reiserfs/do_balan.c 		old_len = ih_item_len(ih);
ih                958 fs/reiserfs/do_balan.c 		if (is_indirect_le_ih(ih))
ih                960 fs/reiserfs/do_balan.c 		set_le_ih_k_offset(ih,
ih                961 fs/reiserfs/do_balan.c 				   le_ih_k_offset(ih) +
ih                964 fs/reiserfs/do_balan.c 		put_ih_item_len(ih, tb->sbytes[i]);
ih                980 fs/reiserfs/do_balan.c 		leaf_insert_into_buf(&bi, 0, ih, r_body, r_zeroes_number);
ih                986 fs/reiserfs/do_balan.c 		set_le_ih_k_offset(ih, old_key_comp);
ih                987 fs/reiserfs/do_balan.c 		put_ih_item_len(ih, old_len - tb->sbytes[i]);
ih               1002 fs/reiserfs/do_balan.c 				     ih, body, tb->zeroes_num);
ih               1010 fs/reiserfs/do_balan.c 					 struct item_head * const ih,
ih               1065 fs/reiserfs/do_balan.c 					 struct item_head * const ih,
ih               1078 fs/reiserfs/do_balan.c 	RFALSE(ih, "PAP-12210: ih must be 0");
ih               1081 fs/reiserfs/do_balan.c 		balance_leaf_new_nodes_paste_dirent(tb, ih, body, insert_key,
ih               1138 fs/reiserfs/do_balan.c 					       struct item_head * const ih,
ih               1191 fs/reiserfs/do_balan.c 					 struct item_head * const ih,
ih               1211 fs/reiserfs/do_balan.c 		balance_leaf_new_nodes_paste_shift(tb, ih, body, insert_key,
ih               1215 fs/reiserfs/do_balan.c 		balance_leaf_new_nodes_paste_whole(tb, ih, body, insert_key,
ih               1221 fs/reiserfs/do_balan.c 				   struct item_head * const ih,
ih               1243 fs/reiserfs/do_balan.c 			balance_leaf_new_nodes_insert(tb, ih, body, insert_key,
ih               1246 fs/reiserfs/do_balan.c 			balance_leaf_new_nodes_paste(tb, ih, body, insert_key,
ih               1261 fs/reiserfs/do_balan.c 					    struct item_head * const ih,
ih               1267 fs/reiserfs/do_balan.c 	leaf_insert_into_buf(&bi, tb->item_pos, ih, body, tb->zeroes_num);
ih               1278 fs/reiserfs/do_balan.c 						  struct item_head * const ih,
ih               1312 fs/reiserfs/do_balan.c 					   struct item_head * const ih,
ih               1321 fs/reiserfs/do_balan.c 		balance_leaf_finish_node_paste_dirent(tb, ih, body);
ih               1356 fs/reiserfs/do_balan.c 				      struct item_head * const ih,
ih               1362 fs/reiserfs/do_balan.c 			balance_leaf_finish_node_insert(tb, ih, body);
ih               1364 fs/reiserfs/do_balan.c 			balance_leaf_finish_node_paste(tb, ih, body);
ih               1382 fs/reiserfs/do_balan.c static int balance_leaf(struct tree_balance *tb, struct item_head *ih,
ih               1399 fs/reiserfs/do_balan.c 		tb->zeroes_num = ih_item_len(ih);
ih               1409 fs/reiserfs/do_balan.c 	body += balance_leaf_left(tb, ih, body, flag);
ih               1415 fs/reiserfs/do_balan.c 	balance_leaf_right(tb, ih, body, flag);
ih               1450 fs/reiserfs/do_balan.c 	balance_leaf_new_nodes(tb, ih, body, insert_key, insert_ptr, flag);
ih               1452 fs/reiserfs/do_balan.c 	balance_leaf_finish_node(tb, ih, body, flag);
ih               1847 fs/reiserfs/do_balan.c void do_balance(struct tree_balance *tb, struct item_head *ih,
ih               1888 fs/reiserfs/do_balan.c 	    balance_leaf(tb, ih, body, flag, insert_key, insert_ptr);
ih                 53 fs/reiserfs/fix_node.c 	struct item_head *ih;
ih                 81 fs/reiserfs/fix_node.c 	ih = item_head(Sh, 0);
ih                 84 fs/reiserfs/fix_node.c 	if (op_is_left_mergeable(&ih->ih_key, Sh->b_size)
ih                105 fs/reiserfs/fix_node.c 		vi->vi_item_len += ih_item_len(ih + j) + IH_SIZE;
ih                106 fs/reiserfs/fix_node.c 		vi->vi_ih = ih + j;
ih                107 fs/reiserfs/fix_node.c 		vi->vi_item = ih_item_body(Sh, ih + j);
ih                714 fs/reiserfs/fix_node.c 		struct item_head *ih;
ih                720 fs/reiserfs/fix_node.c 		ih = item_head(S0, 0);
ih                722 fs/reiserfs/fix_node.c 		    && !comp_short_le_keys(&ih->ih_key,
ih                735 fs/reiserfs/fix_node.c 			if (is_direntry_le_ih(ih)) {
ih                742 fs/reiserfs/fix_node.c 				RFALSE(le_ih_k_offset(ih) == DOT_OFFSET,
ih               1278 fs/reiserfs/fix_node.c 	struct item_head *ih;
ih               1281 fs/reiserfs/fix_node.c 	ih = item_head(Sh, 0);
ih               1289 fs/reiserfs/fix_node.c 	      && op_is_left_mergeable(&ih->ih_key, Sh->b_size)) ? IH_SIZE : 0)
ih                134 fs/reiserfs/ibalance.c 	struct reiserfs_key *ih;
ih                164 fs/reiserfs/ibalance.c 	ih = internal_key(cur, ((to == -1) ? 0 : to));
ih                166 fs/reiserfs/ibalance.c 	memmove(ih + count, ih,
ih                170 fs/reiserfs/ibalance.c 	memcpy(ih, inserted, KEY_SIZE);
ih                172 fs/reiserfs/ibalance.c 		memcpy(ih + 1, inserted + 1, KEY_SIZE);
ih                137 fs/reiserfs/inode.c inline void make_le_item_head(struct item_head *ih, const struct cpu_key *key,
ih                143 fs/reiserfs/inode.c 		ih->ih_key.k_dir_id = cpu_to_le32(key->on_disk_key.k_dir_id);
ih                144 fs/reiserfs/inode.c 		ih->ih_key.k_objectid =
ih                147 fs/reiserfs/inode.c 	put_ih_version(ih, version);
ih                148 fs/reiserfs/inode.c 	set_le_ih_k_offset(ih, offset);
ih                149 fs/reiserfs/inode.c 	set_le_ih_k_type(ih, type);
ih                150 fs/reiserfs/inode.c 	put_ih_item_len(ih, length);
ih                156 fs/reiserfs/inode.c 	put_ih_entry_count(ih, entry_count);
ih                215 fs/reiserfs/inode.c 				    struct item_head *ih,
ih                220 fs/reiserfs/inode.c 	if (retval == POSITION_FOUND && is_indirect_le_ih(ih) &&
ih                226 fs/reiserfs/inode.c static inline int indirect_item_found(int retval, struct item_head *ih)
ih                228 fs/reiserfs/inode.c 	return (retval == POSITION_FOUND) && is_indirect_le_ih(ih);
ih                291 fs/reiserfs/inode.c 	struct item_head *ih, tmp_ih;
ih                325 fs/reiserfs/inode.c 	ih = tp_item_head(&path);
ih                326 fs/reiserfs/inode.c 	if (is_indirect_le_ih(ih)) {
ih                327 fs/reiserfs/inode.c 		__le32 *ind_item = (__le32 *) ih_item_body(bh, ih);
ih                339 fs/reiserfs/inode.c 			    ((ih_item_len(ih) / UNFM_P_SIZE) - 1)) {
ih                391 fs/reiserfs/inode.c 	copy_item_head(&tmp_ih, ih);
ih                405 fs/reiserfs/inode.c 		if (!is_direct_le_ih(ih)) {
ih                414 fs/reiserfs/inode.c 		if ((le_ih_k_offset(ih) + path.pos_in_item) > inode->i_size)
ih                416 fs/reiserfs/inode.c 		if ((le_ih_k_offset(ih) - 1 + ih_item_len(ih)) > inode->i_size) {
ih                418 fs/reiserfs/inode.c 			    inode->i_size - (le_ih_k_offset(ih) - 1) -
ih                422 fs/reiserfs/inode.c 			chars = ih_item_len(ih) - path.pos_in_item;
ih                424 fs/reiserfs/inode.c 		memcpy(p, ih_item_body(bh, ih) + path.pos_in_item, chars);
ih                447 fs/reiserfs/inode.c 		ih = tp_item_head(&path);
ih                667 fs/reiserfs/inode.c 	struct item_head *ih, tmp_ih;
ih                748 fs/reiserfs/inode.c 	ih = tp_item_head(&path);
ih                753 fs/reiserfs/inode.c 	copy_item_head(&tmp_ih, ih);
ih                756 fs/reiserfs/inode.c 	    (retval, allocated_block_nr, ih, item, pos_in_item)) {
ih                797 fs/reiserfs/inode.c 	if (indirect_item_found(retval, ih)) {
ih                851 fs/reiserfs/inode.c 		if (is_statdata_le_ih(ih)) {
ih                887 fs/reiserfs/inode.c 		} else if (is_direct_le_ih(ih)) {
ih                892 fs/reiserfs/inode.c 			    ((le_ih_k_offset(ih) -
ih               1022 fs/reiserfs/inode.c 			RFALSE(pos_in_item != ih_item_len(ih) / UNFM_P_SIZE,
ih               1031 fs/reiserfs/inode.c 						     &ih->ih_key) +
ih               1032 fs/reiserfs/inode.c 				     op_bytes_number(ih,
ih               1141 fs/reiserfs/inode.c 		ih = tp_item_head(&path);
ih               1242 fs/reiserfs/inode.c 	struct item_head *ih;
ih               1246 fs/reiserfs/inode.c 	ih = tp_item_head(path);
ih               1248 fs/reiserfs/inode.c 	copy_key(INODE_PKEY(inode), &ih->ih_key);
ih               1258 fs/reiserfs/inode.c 	if (stat_data_v1(ih)) {
ih               1260 fs/reiserfs/inode.c 		    (struct stat_data_v1 *)ih_item_body(bh, ih);
ih               1318 fs/reiserfs/inode.c 		struct stat_data *sd = (struct stat_data *)ih_item_body(bh, ih);
ih               1428 fs/reiserfs/inode.c 	struct item_head *ih;
ih               1431 fs/reiserfs/inode.c 	ih = tp_item_head(path);
ih               1433 fs/reiserfs/inode.c 	if (!is_statdata_le_ih(ih))
ih               1435 fs/reiserfs/inode.c 			       INODE_PKEY(inode), ih);
ih               1438 fs/reiserfs/inode.c 	if (stat_data_v1(ih)) {
ih               1439 fs/reiserfs/inode.c 		inode2sd_v1(ih_item_body(bh, ih), inode, size);
ih               1441 fs/reiserfs/inode.c 		inode2sd(ih_item_body(bh, ih), inode, size);
ih               1454 fs/reiserfs/inode.c 	struct item_head *ih, tmp_ih;
ih               1494 fs/reiserfs/inode.c 		ih = tp_item_head(&path);
ih               1495 fs/reiserfs/inode.c 		copy_item_head(&tmp_ih, ih);
ih               1806 fs/reiserfs/inode.c 				  struct item_head *ih, struct treepath *path,
ih               1817 fs/reiserfs/inode.c 	_make_cpu_key(&key, KEY_FORMAT_3_5, le32_to_cpu(ih->ih_key.k_dir_id),
ih               1818 fs/reiserfs/inode.c 		      le32_to_cpu(ih->ih_key.k_objectid), DOT_OFFSET,
ih               1827 fs/reiserfs/inode.c 		make_le_item_head(ih, NULL, KEY_FORMAT_3_5, DOT_OFFSET,
ih               1830 fs/reiserfs/inode.c 		make_empty_dir_item_v1(body, ih->ih_key.k_dir_id,
ih               1831 fs/reiserfs/inode.c 				       ih->ih_key.k_objectid,
ih               1835 fs/reiserfs/inode.c 		make_le_item_head(ih, NULL, KEY_FORMAT_3_5, DOT_OFFSET,
ih               1838 fs/reiserfs/inode.c 		make_empty_dir_item(body, ih->ih_key.k_dir_id,
ih               1839 fs/reiserfs/inode.c 				    ih->ih_key.k_objectid,
ih               1855 fs/reiserfs/inode.c 				 &(ih->ih_key));
ih               1860 fs/reiserfs/inode.c 	return reiserfs_insert_item(th, path, &key, ih, inode, body);
ih               1869 fs/reiserfs/inode.c 				struct item_head *ih,
ih               1880 fs/reiserfs/inode.c 		      le32_to_cpu(ih->ih_key.k_dir_id),
ih               1881 fs/reiserfs/inode.c 		      le32_to_cpu(ih->ih_key.k_objectid),
ih               1884 fs/reiserfs/inode.c 	make_le_item_head(ih, NULL, KEY_FORMAT_3_5, 1, TYPE_DIRECT, item_len,
ih               1898 fs/reiserfs/inode.c 				 &(ih->ih_key));
ih               1903 fs/reiserfs/inode.c 	return reiserfs_insert_item(th, path, &key, ih, inode, symname);
ih               1938 fs/reiserfs/inode.c 	struct item_head ih;
ih               1957 fs/reiserfs/inode.c 	ih.ih_key.k_dir_id = reiserfs_choose_packing(dir);
ih               1958 fs/reiserfs/inode.c 	ih.ih_key.k_objectid = cpu_to_le32(reiserfs_get_unused_objectid(th));
ih               1959 fs/reiserfs/inode.c 	if (!ih.ih_key.k_objectid) {
ih               1963 fs/reiserfs/inode.c 	args.objectid = inode->i_ino = le32_to_cpu(ih.ih_key.k_objectid);
ih               1965 fs/reiserfs/inode.c 		make_le_item_head(&ih, NULL, KEY_FORMAT_3_5, SD_OFFSET,
ih               1968 fs/reiserfs/inode.c 		make_le_item_head(&ih, NULL, KEY_FORMAT_3_6, SD_OFFSET,
ih               1970 fs/reiserfs/inode.c 	memcpy(INODE_PKEY(inode), &ih.ih_key, KEY_SIZE);
ih               1971 fs/reiserfs/inode.c 	args.dirid = le32_to_cpu(ih.ih_key.k_dir_id);
ih               2022 fs/reiserfs/inode.c 	_make_cpu_key(&key, KEY_FORMAT_3_6, le32_to_cpu(ih.ih_key.k_dir_id),
ih               2023 fs/reiserfs/inode.c 		      le32_to_cpu(ih.ih_key.k_objectid), SD_OFFSET,
ih               2068 fs/reiserfs/inode.c 	    reiserfs_insert_item(th, &path_to_key, &key, &ih, inode,
ih               2082 fs/reiserfs/inode.c 		    reiserfs_new_directory(th, inode, &ih, &path_to_key, dir);
ih               2090 fs/reiserfs/inode.c 		    reiserfs_new_symlink(th, inode, &ih, &path_to_key, symname,
ih               2375 fs/reiserfs/inode.c 	struct item_head *ih;
ih               2412 fs/reiserfs/inode.c 	ih = tp_item_head(&path);
ih               2417 fs/reiserfs/inode.c 	if (indirect_item_found(retval, ih)) {
ih               2429 fs/reiserfs/inode.c 	} else if (is_direct_le_ih(ih)) {
ih               2433 fs/reiserfs/inode.c 		copy_size = ih_item_len(ih) - pos_in_item;
ih               2436 fs/reiserfs/inode.c 		copy_item_head(&tmp_ih, ih);
ih               2461 fs/reiserfs/inode.c 		memcpy(ih_item_body(bh, ih) + pos_in_item, p + bytes_copied,
ih                 19 fs/reiserfs/item_ops.c static int sd_bytes_number(struct item_head *ih, int block_size)
ih                 36 fs/reiserfs/item_ops.c static void sd_print_item(struct item_head *ih, char *item)
ih                 39 fs/reiserfs/item_ops.c 	if (stat_data_v1(ih)) {
ih                 55 fs/reiserfs/item_ops.c static void sd_check_item(struct item_head *ih, char *item)
ih                114 fs/reiserfs/item_ops.c static int direct_bytes_number(struct item_head *ih, int block_size)
ih                116 fs/reiserfs/item_ops.c 	return ih_item_len(ih);
ih                134 fs/reiserfs/item_ops.c static void direct_print_item(struct item_head *ih, char *item)
ih                140 fs/reiserfs/item_ops.c 	while (j < ih_item_len(ih))
ih                145 fs/reiserfs/item_ops.c static void direct_check_item(struct item_head *ih, char *item)
ih                205 fs/reiserfs/item_ops.c static int indirect_bytes_number(struct item_head *ih, int block_size)
ih                207 fs/reiserfs/item_ops.c 	return ih_item_len(ih) / UNFM_P_SIZE * block_size;
ih                260 fs/reiserfs/item_ops.c static void indirect_print_item(struct item_head *ih, char *item)
ih                269 fs/reiserfs/item_ops.c 	if (ih_item_len(ih) % UNFM_P_SIZE)
ih                272 fs/reiserfs/item_ops.c 	printk("%d pointers\n[ ", (int)I_UNFM_NUM(ih));
ih                273 fs/reiserfs/item_ops.c 	for (j = 0; j < I_UNFM_NUM(ih); j++) {
ih                283 fs/reiserfs/item_ops.c static void indirect_check_item(struct item_head *ih, char *item)
ih                349 fs/reiserfs/item_ops.c static int direntry_bytes_number(struct item_head *ih, int block_size)
ih                372 fs/reiserfs/item_ops.c static void direntry_print_item(struct item_head *ih, char *item)
ih                385 fs/reiserfs/item_ops.c 	for (i = 0; i < ih_entry_count(ih); i++, deh++) {
ih                387 fs/reiserfs/item_ops.c 		    (i ? (deh_location(deh - 1)) : ih_item_len(ih)) -
ih                412 fs/reiserfs/item_ops.c static void direntry_check_item(struct item_head *ih, char *item)
ih                419 fs/reiserfs/item_ops.c 	for (i = 0; i < ih_entry_count(ih); i++, deh++) {
ih                636 fs/reiserfs/item_ops.c static int errcatch_bytes_number(struct item_head *ih, int block_size)
ih                657 fs/reiserfs/item_ops.c static void errcatch_print_item(struct item_head *ih, char *item)
ih                663 fs/reiserfs/item_ops.c static void errcatch_check_item(struct item_head *ih, char *item)
ih                 26 fs/reiserfs/lbalance.c 	struct item_head *ih;
ih                 31 fs/reiserfs/lbalance.c 	ih = item_head(source, item_num);
ih                 33 fs/reiserfs/lbalance.c 	RFALSE(!is_direntry_le_ih(ih), "vs-10000: item must be directory item");
ih                 39 fs/reiserfs/lbalance.c 	deh = B_I_DEH(source, ih);
ih                 42 fs/reiserfs/lbalance.c 				    ih_item_len(ih)) -
ih                 45 fs/reiserfs/lbalance.c 		    source->b_data + ih_location(ih) +
ih                 63 fs/reiserfs/lbalance.c 	    (last_first == FIRST_TO_LAST && le_ih_k_offset(ih) == DOT_OFFSET) ||
ih                 65 fs/reiserfs/lbalance.c 	     && comp_short_le_keys /*COMP_SHORT_KEYS */ (&ih->ih_key,
ih                 73 fs/reiserfs/lbalance.c 		memcpy(&new_ih.ih_key, &ih->ih_key, KEY_SIZE);
ih                 82 fs/reiserfs/lbalance.c 			if (from < ih_entry_count(ih)) {
ih                141 fs/reiserfs/lbalance.c 	struct item_head *ih;
ih                153 fs/reiserfs/lbalance.c 		ih = item_head(src, 0);
ih                158 fs/reiserfs/lbalance.c 		    || (!op_is_left_mergeable(&ih->ih_key, src->b_size)))
ih                161 fs/reiserfs/lbalance.c 		RFALSE(!ih_item_len(ih),
ih                164 fs/reiserfs/lbalance.c 		if (is_direntry_le_ih(ih)) {
ih                167 fs/reiserfs/lbalance.c 				bytes_or_entries = ih_entry_count(ih);
ih                180 fs/reiserfs/lbalance.c 			bytes_or_entries = ih_item_len(ih);
ih                184 fs/reiserfs/lbalance.c 			if (bytes_or_entries == ih_item_len(ih)
ih                185 fs/reiserfs/lbalance.c 			    && is_indirect_le_ih(ih))
ih                186 fs/reiserfs/lbalance.c 				if (get_ih_free_space(ih))
ih                191 fs/reiserfs/lbalance.c 						       "entirely (%h)", ih);
ih                201 fs/reiserfs/lbalance.c 				     bytes_or_entries, ih_item_body(src, ih), 0);
ih                206 fs/reiserfs/lbalance.c 			       ih);
ih                207 fs/reiserfs/lbalance.c 			if (bytes_or_entries == ih_item_len(ih))
ih                208 fs/reiserfs/lbalance.c 				set_ih_free_space(dih, get_ih_free_space(ih));
ih                221 fs/reiserfs/lbalance.c 	ih = item_head(src, src_nr_item - 1);
ih                227 fs/reiserfs/lbalance.c 	if (is_direntry_le_ih(ih)) {
ih                233 fs/reiserfs/lbalance.c 			bytes_or_entries = ih_entry_count(ih);
ih                237 fs/reiserfs/lbalance.c 				      ih_entry_count(ih) - bytes_or_entries,
ih                249 fs/reiserfs/lbalance.c 	RFALSE(is_indirect_le_ih(ih) && get_ih_free_space(ih),
ih                251 fs/reiserfs/lbalance.c 	       ih);
ih                255 fs/reiserfs/lbalance.c 		bytes_or_entries = ih_item_len(ih);
ih                258 fs/reiserfs/lbalance.c 		       le_ih_k_offset(ih) + op_bytes_number(ih, src->b_size),
ih                259 fs/reiserfs/lbalance.c 		       "vs-10050: items %h and %h do not match", ih, dih);
ih                262 fs/reiserfs/lbalance.c 		set_le_ih_k_offset(dih, le_ih_k_offset(ih));
ih                266 fs/reiserfs/lbalance.c 		set_le_ih_k_type(dih, le_ih_k_type(ih));
ih                269 fs/reiserfs/lbalance.c 		RFALSE(ih_item_len(ih) <= bytes_or_entries,
ih                271 fs/reiserfs/lbalance.c 		       (unsigned long)ih_item_len(ih),
ih                298 fs/reiserfs/lbalance.c 				       ih) + ih_item_len(ih) - bytes_or_entries,
ih                320 fs/reiserfs/lbalance.c 	struct item_head *ih;
ih                348 fs/reiserfs/lbalance.c 	ih = item_head(dest, dest_before);
ih                355 fs/reiserfs/lbalance.c 	memmove(ih + cpy_num, ih, (nr - dest_before) * IH_SIZE);
ih                358 fs/reiserfs/lbalance.c 	memcpy(ih, item_head(src, first), cpy_num * IH_SIZE);
ih                364 fs/reiserfs/lbalance.c 	j = location = (dest_before == 0) ? dest->b_size : ih_location(ih - 1);
ih                366 fs/reiserfs/lbalance.c 		location -= ih_item_len(ih + i - dest_before);
ih                367 fs/reiserfs/lbalance.c 		put_ih_location(ih + i - dest_before, location);
ih                371 fs/reiserfs/lbalance.c 	last_loc = ih_location(&ih[nr + cpy_num - 1 - dest_before]);
ih                372 fs/reiserfs/lbalance.c 	last_inserted_loc = ih_location(&ih[cpy_num - 1]);
ih                419 fs/reiserfs/lbalance.c 	struct item_head *ih;
ih                429 fs/reiserfs/lbalance.c 		ih = item_head(src, item_num);
ih                430 fs/reiserfs/lbalance.c 		if (is_direntry_le_ih(ih))
ih                442 fs/reiserfs/lbalance.c 			memcpy(&n_ih, ih, IH_SIZE);
ih                444 fs/reiserfs/lbalance.c 			if (is_indirect_le_ih(ih)) {
ih                445 fs/reiserfs/lbalance.c 				RFALSE(cpy_bytes == ih_item_len(ih)
ih                446 fs/reiserfs/lbalance.c 				       && get_ih_free_space(ih),
ih                448 fs/reiserfs/lbalance.c 				       (long unsigned)get_ih_free_space(ih));
ih                452 fs/reiserfs/lbalance.c 			RFALSE(op_is_left_mergeable(&ih->ih_key, src->b_size),
ih                453 fs/reiserfs/lbalance.c 			       "vs-10190: bad mergeability of item %h", ih);
ih                454 fs/reiserfs/lbalance.c 			n_ih.ih_version = ih->ih_version;	/* JDM Endian safe, both le */
ih                463 fs/reiserfs/lbalance.c 		ih = item_head(src, item_num);
ih                464 fs/reiserfs/lbalance.c 		if (is_direntry_le_ih(ih))
ih                467 fs/reiserfs/lbalance.c 					      ih_entry_count(ih) - cpy_bytes,
ih                478 fs/reiserfs/lbalance.c 			memcpy(&n_ih.ih_key, &ih->ih_key, KEY_SIZE);
ih                481 fs/reiserfs/lbalance.c 			n_ih.ih_version = ih->ih_version;
ih                483 fs/reiserfs/lbalance.c 			if (is_direct_le_ih(ih)) {
ih                485 fs/reiserfs/lbalance.c 						   le_ih_k_offset(ih) +
ih                486 fs/reiserfs/lbalance.c 						   ih_item_len(ih) - cpy_bytes);
ih                491 fs/reiserfs/lbalance.c 				RFALSE(!cpy_bytes && get_ih_free_space(ih),
ih                494 fs/reiserfs/lbalance.c 						   le_ih_k_offset(ih) +
ih                495 fs/reiserfs/lbalance.c 						   (ih_item_len(ih) -
ih                499 fs/reiserfs/lbalance.c 				set_ih_free_space(&n_ih, get_ih_free_space(ih));
ih                506 fs/reiserfs/lbalance.c 			n_ih.ih_version = ih->ih_version;
ih                510 fs/reiserfs/lbalance.c 						ih_item_len(ih) - cpy_bytes, 0);
ih                868 fs/reiserfs/lbalance.c 			struct item_head *ih;
ih                878 fs/reiserfs/lbalance.c 			ih = item_head(bh, B_NR_ITEMS(bh) - 1);
ih                879 fs/reiserfs/lbalance.c 			if (is_direntry_le_ih(ih))
ih                885 fs/reiserfs/lbalance.c 				len = ih_entry_count(ih);
ih                888 fs/reiserfs/lbalance.c 				len = ih_item_len(ih);
ih                909 fs/reiserfs/lbalance.c 	struct item_head *ih;
ih                927 fs/reiserfs/lbalance.c 	ih = item_head(bh, before);
ih                930 fs/reiserfs/lbalance.c 	last_loc = nr ? ih_location(&ih[nr - before - 1]) : bh->b_size;
ih                931 fs/reiserfs/lbalance.c 	unmoved_loc = before ? ih_location(ih - 1) : bh->b_size;
ih                948 fs/reiserfs/lbalance.c 	memmove(ih + 1, ih, IH_SIZE * (nr - before));
ih                949 fs/reiserfs/lbalance.c 	memmove(ih, inserted_item_ih, IH_SIZE);
ih                953 fs/reiserfs/lbalance.c 		unmoved_loc -= ih_item_len(&ih[i - before]);
ih                954 fs/reiserfs/lbalance.c 		put_ih_location(&ih[i - before], unmoved_loc);
ih                985 fs/reiserfs/lbalance.c 	struct item_head *ih;
ih               1011 fs/reiserfs/lbalance.c 	ih = item_head(bh, affected_item_num);
ih               1013 fs/reiserfs/lbalance.c 	last_loc = ih_location(&ih[nr - affected_item_num - 1]);
ih               1014 fs/reiserfs/lbalance.c 	unmoved_loc = affected_item_num ? ih_location(ih - 1) : bh->b_size;
ih               1022 fs/reiserfs/lbalance.c 		put_ih_location(&ih[i - affected_item_num],
ih               1023 fs/reiserfs/lbalance.c 				ih_location(&ih[i - affected_item_num]) -
ih               1027 fs/reiserfs/lbalance.c 		if (!is_direntry_le_ih(ih)) {
ih               1030 fs/reiserfs/lbalance.c 				memmove(bh->b_data + ih_location(ih) +
ih               1032 fs/reiserfs/lbalance.c 					bh->b_data + ih_location(ih),
ih               1033 fs/reiserfs/lbalance.c 					ih_item_len(ih));
ih               1035 fs/reiserfs/lbalance.c 				memset(bh->b_data + ih_location(ih), 0,
ih               1037 fs/reiserfs/lbalance.c 				memcpy(bh->b_data + ih_location(ih) +
ih               1051 fs/reiserfs/lbalance.c 	put_ih_item_len(ih, ih_item_len(ih) + paste_size);
ih               1073 fs/reiserfs/lbalance.c 			    struct item_head *ih, int from, int del_count)
ih               1086 fs/reiserfs/lbalance.c 	RFALSE(!is_direntry_le_ih(ih), "10180: item is not directory item");
ih               1087 fs/reiserfs/lbalance.c 	RFALSE(ih_entry_count(ih) < from + del_count,
ih               1089 fs/reiserfs/lbalance.c 	       ih_entry_count(ih), from, del_count);
ih               1095 fs/reiserfs/lbalance.c 	item = bh->b_data + ih_location(ih);
ih               1098 fs/reiserfs/lbalance.c 	deh = B_I_DEH(bh, ih);
ih               1105 fs/reiserfs/lbalance.c 	    (from ? deh_location(&deh[from - 1]) : ih_item_len(ih));
ih               1111 fs/reiserfs/lbalance.c 	for (i = ih_entry_count(ih) - 1; i > from + del_count - 1; i--)
ih               1121 fs/reiserfs/lbalance.c 	put_ih_entry_count(ih, ih_entry_count(ih) - del_count);
ih               1131 fs/reiserfs/lbalance.c 		prev_record, item + ih_item_len(ih) - prev_record);
ih               1151 fs/reiserfs/lbalance.c 	struct item_head *ih;
ih               1159 fs/reiserfs/lbalance.c 	ih = item_head(bh, cut_item_num);
ih               1161 fs/reiserfs/lbalance.c 	if (is_direntry_le_ih(ih)) {
ih               1163 fs/reiserfs/lbalance.c 		cut_size = leaf_cut_entries(bh, ih, pos_in_item, cut_size);
ih               1170 fs/reiserfs/lbalance.c 			set_le_ih_k_offset(ih, deh_offset(B_I_DEH(bh, ih)));
ih               1174 fs/reiserfs/lbalance.c 		RFALSE(is_statdata_le_ih(ih), "10195: item is stat data");
ih               1175 fs/reiserfs/lbalance.c 		RFALSE(pos_in_item && pos_in_item + cut_size != ih_item_len(ih),
ih               1178 fs/reiserfs/lbalance.c 		       (long unsigned)ih_item_len(ih));
ih               1182 fs/reiserfs/lbalance.c 			memmove(bh->b_data + ih_location(ih),
ih               1183 fs/reiserfs/lbalance.c 				bh->b_data + ih_location(ih) + cut_size,
ih               1184 fs/reiserfs/lbalance.c 				ih_item_len(ih) - cut_size);
ih               1187 fs/reiserfs/lbalance.c 			if (is_direct_le_ih(ih))
ih               1188 fs/reiserfs/lbalance.c 				set_le_ih_k_offset(ih,
ih               1189 fs/reiserfs/lbalance.c 						   le_ih_k_offset(ih) +
ih               1192 fs/reiserfs/lbalance.c 				set_le_ih_k_offset(ih,
ih               1193 fs/reiserfs/lbalance.c 						   le_ih_k_offset(ih) +
ih               1196 fs/reiserfs/lbalance.c 				RFALSE(ih_item_len(ih) == cut_size
ih               1197 fs/reiserfs/lbalance.c 				       && get_ih_free_space(ih),
ih               1198 fs/reiserfs/lbalance.c 				       "10205: invalid ih_free_space (%h)", ih);
ih               1204 fs/reiserfs/lbalance.c 	last_loc = ih_location(&ih[nr - cut_item_num - 1]);
ih               1207 fs/reiserfs/lbalance.c 	unmoved_loc = cut_item_num ? ih_location(ih - 1) : bh->b_size;
ih               1214 fs/reiserfs/lbalance.c 	put_ih_item_len(ih, ih_item_len(ih) - cut_size);
ih               1216 fs/reiserfs/lbalance.c 	if (is_indirect_le_ih(ih)) {
ih               1218 fs/reiserfs/lbalance.c 			set_ih_free_space(ih, 0);
ih               1223 fs/reiserfs/lbalance.c 		put_ih_location(&ih[i - cut_item_num],
ih               1224 fs/reiserfs/lbalance.c 				ih_location(&ih[i - cut_item_num]) + cut_size);
ih               1248 fs/reiserfs/lbalance.c 	struct item_head *ih;
ih               1271 fs/reiserfs/lbalance.c 	ih = item_head(bh, first);
ih               1274 fs/reiserfs/lbalance.c 	j = (first == 0) ? bh->b_size : ih_location(ih - 1);
ih               1277 fs/reiserfs/lbalance.c 	last_loc = ih_location(&ih[nr - 1 - first]);
ih               1278 fs/reiserfs/lbalance.c 	last_removed_loc = ih_location(&ih[del_num - 1]);
ih               1284 fs/reiserfs/lbalance.c 	memmove(ih, ih + del_num, (nr - first - del_num) * IH_SIZE);
ih               1288 fs/reiserfs/lbalance.c 		put_ih_location(&ih[i - first],
ih               1289 fs/reiserfs/lbalance.c 				ih_location(&ih[i - first]) + (j -
ih               1321 fs/reiserfs/lbalance.c 	struct item_head *ih;
ih               1331 fs/reiserfs/lbalance.c 	ih = item_head(bh, item_num);
ih               1337 fs/reiserfs/lbalance.c 	RFALSE(!is_direntry_le_ih(ih), "10225: item is not directory item");
ih               1338 fs/reiserfs/lbalance.c 	RFALSE(ih_entry_count(ih) < before,
ih               1340 fs/reiserfs/lbalance.c 	       ih_entry_count(ih), before);
ih               1343 fs/reiserfs/lbalance.c 	item = bh->b_data + ih_location(ih);
ih               1346 fs/reiserfs/lbalance.c 	deh = B_I_DEH(bh, ih);
ih               1352 fs/reiserfs/lbalance.c 	     : (ih_item_len(ih) - paste_size));
ih               1355 fs/reiserfs/lbalance.c 	for (i = ih_entry_count(ih) - 1; i >= before; i--)
ih               1365 fs/reiserfs/lbalance.c 	put_ih_entry_count(ih, ih_entry_count(ih) + new_entry_count);
ih               1369 fs/reiserfs/lbalance.c 		item + (ih_item_len(ih) - paste_size) - insert_point);
ih               1396 fs/reiserfs/lbalance.c 		set_le_ih_k_offset(ih, deh_offset(new_dehs));
ih               1402 fs/reiserfs/lbalance.c 		deh = B_I_DEH(bh, ih);
ih               1403 fs/reiserfs/lbalance.c 		for (i = 0; i < ih_entry_count(ih); i++) {
ih               1406 fs/reiserfs/lbalance.c 			     ih_entry_count(ih) -
ih               1415 fs/reiserfs/lbalance.c 					       ih, deh + i - 1, i, deh + i);
ih               1421 fs/reiserfs/lbalance.c 					       ih, i, deh + i, deh + i + 1);
ih                 31 fs/reiserfs/namei.c 	struct item_head *ih = de->de_ih;
ih                 36 fs/reiserfs/namei.c 	rbound = ih_entry_count(ih) - 1;
ih                116 fs/reiserfs/prints.c static int scnprintf_item_head(char *buf, size_t size, struct item_head *ih)
ih                118 fs/reiserfs/prints.c 	if (ih) {
ih                123 fs/reiserfs/prints.c 			       (ih_version(ih) == KEY_FORMAT_3_6) ?
ih                126 fs/reiserfs/prints.c 		p += scnprintf_le_key(p, end - p, &ih->ih_key);
ih                130 fs/reiserfs/prints.c 			       ih_item_len(ih), ih_location(ih),
ih                131 fs/reiserfs/prints.c 			       ih_free_space(ih));
ih                481 fs/reiserfs/prints.c 	struct item_head *ih;
ih                491 fs/reiserfs/prints.c 	ih = item_head(bh, 0);
ih                500 fs/reiserfs/prints.c 				&(ih->ih_key), &((ih + nr - 1)->ih_key));
ih                514 fs/reiserfs/prints.c 	ih += from;
ih                519 fs/reiserfs/prints.c 	for (i = from; i < to; i++, ih++) {
ih                522 fs/reiserfs/prints.c 		reiserfs_printk("|%2d| %h |\n", i, ih);
ih                524 fs/reiserfs/prints.c 			op_print_item(ih, ih_item_body(bh, ih));
ih                765 fs/reiserfs/prints.c 	struct item_head *ih;
ih                770 fs/reiserfs/prints.c 	for (i = 0, ih = item_head(bh, 0); i < B_NR_ITEMS(bh); i++, ih++)
ih                771 fs/reiserfs/prints.c 		op_check_item(ih, ih_item_body(bh, ih));
ih               1398 fs/reiserfs/reiserfs.h #define ih_free_space(ih)            le16_to_cpu((ih)->u.ih_free_space_reserved)
ih               1399 fs/reiserfs/reiserfs.h #define ih_version(ih)               le16_to_cpu((ih)->ih_version)
ih               1400 fs/reiserfs/reiserfs.h #define ih_entry_count(ih)           le16_to_cpu((ih)->u.ih_entry_count)
ih               1401 fs/reiserfs/reiserfs.h #define ih_location(ih)              le16_to_cpu((ih)->ih_item_location)
ih               1402 fs/reiserfs/reiserfs.h #define ih_item_len(ih)              le16_to_cpu((ih)->ih_item_len)
ih               1404 fs/reiserfs/reiserfs.h #define put_ih_free_space(ih, val)   do { (ih)->u.ih_free_space_reserved = cpu_to_le16(val); } while(0)
ih               1405 fs/reiserfs/reiserfs.h #define put_ih_version(ih, val)      do { (ih)->ih_version = cpu_to_le16(val); } while (0)
ih               1406 fs/reiserfs/reiserfs.h #define put_ih_entry_count(ih, val)  do { (ih)->u.ih_entry_count = cpu_to_le16(val); } while (0)
ih               1407 fs/reiserfs/reiserfs.h #define put_ih_location(ih, val)     do { (ih)->ih_item_location = cpu_to_le16(val); } while (0)
ih               1408 fs/reiserfs/reiserfs.h #define put_ih_item_len(ih, val)     do { (ih)->ih_item_len = cpu_to_le16(val); } while (0)
ih               1410 fs/reiserfs/reiserfs.h #define unreachable_item(ih) (ih_version(ih) & (1 << 15))
ih               1412 fs/reiserfs/reiserfs.h #define get_ih_free_space(ih) (ih_version (ih) == KEY_FORMAT_3_6 ? 0 : ih_free_space (ih))
ih               1413 fs/reiserfs/reiserfs.h #define set_ih_free_space(ih,val) put_ih_free_space((ih), ((ih_version(ih) == KEY_FORMAT_3_6) ? 0 : (val)))
ih               1482 fs/reiserfs/reiserfs.h static inline loff_t le_ih_k_offset(const struct item_head *ih)
ih               1484 fs/reiserfs/reiserfs.h 	return le_key_k_offset(ih_version(ih), &(ih->ih_key));
ih               1496 fs/reiserfs/reiserfs.h static inline loff_t le_ih_k_type(const struct item_head *ih)
ih               1498 fs/reiserfs/reiserfs.h 	return le_key_k_type(ih_version(ih), &(ih->ih_key));
ih               1517 fs/reiserfs/reiserfs.h static inline void add_le_ih_k_offset(struct item_head *ih, loff_t offset)
ih               1519 fs/reiserfs/reiserfs.h 	add_le_key_k_offset(ih_version(ih), &(ih->ih_key), offset);
ih               1522 fs/reiserfs/reiserfs.h static inline void set_le_ih_k_offset(struct item_head *ih, loff_t offset)
ih               1524 fs/reiserfs/reiserfs.h 	set_le_key_k_offset(ih_version(ih), &(ih->ih_key), offset);
ih               1537 fs/reiserfs/reiserfs.h static inline void set_le_ih_k_type(struct item_head *ih, int type)
ih               1539 fs/reiserfs/reiserfs.h 	set_le_key_k_type(ih_version(ih), &(ih->ih_key), type);
ih               1563 fs/reiserfs/reiserfs.h static inline int is_direntry_le_ih(struct item_head *ih)
ih               1565 fs/reiserfs/reiserfs.h 	return is_direntry_le_key(ih_version(ih), &ih->ih_key);
ih               1568 fs/reiserfs/reiserfs.h static inline int is_direct_le_ih(struct item_head *ih)
ih               1570 fs/reiserfs/reiserfs.h 	return is_direct_le_key(ih_version(ih), &ih->ih_key);
ih               1573 fs/reiserfs/reiserfs.h static inline int is_indirect_le_ih(struct item_head *ih)
ih               1575 fs/reiserfs/reiserfs.h 	return is_indirect_le_key(ih_version(ih), &ih->ih_key);
ih               1578 fs/reiserfs/reiserfs.h static inline int is_statdata_le_ih(struct item_head *ih)
ih               1580 fs/reiserfs/reiserfs.h 	return is_statdata_le_key(ih_version(ih), &ih->ih_key);
ih               1615 fs/reiserfs/reiserfs.h #define is_direntry_cpu_ih(ih) (is_direntry_cpu_key (&((ih)->ih_key)))
ih               1616 fs/reiserfs/reiserfs.h #define is_direct_cpu_ih(ih) (is_direct_cpu_key (&((ih)->ih_key)))
ih               1617 fs/reiserfs/reiserfs.h #define is_indirect_cpu_ih(ih) (is_indirect_cpu_key (&((ih)->ih_key)))
ih               1618 fs/reiserfs/reiserfs.h #define is_statdata_cpu_ih(ih) (is_statdata_cpu_key (&((ih)->ih_key)))
ih               1620 fs/reiserfs/reiserfs.h #define I_K_KEY_IN_ITEM(ih, key, n_blocksize) \
ih               1621 fs/reiserfs/reiserfs.h     (!COMP_SHORT_KEYS(ih, key) && \
ih               1622 fs/reiserfs/reiserfs.h 	  I_OFF_BYTE_IN_ITEM(ih, k_offset(key), n_blocksize))
ih               1742 fs/reiserfs/reiserfs.h #define stat_data_v1(ih)        (ih_version (ih) == KEY_FORMAT_3_5)
ih               1822 fs/reiserfs/reiserfs.h #define stat_data_v2(ih)        (ih_version (ih) == KEY_FORMAT_3_6)
ih               2006 fs/reiserfs/reiserfs.h #define B_I_DEH_ENTRY_FILE_NAME(bh, ih, deh) \
ih               2007 fs/reiserfs/reiserfs.h 				(ih_item_body(bh, ih) + deh_location(deh))
ih               2010 fs/reiserfs/reiserfs.h #define I_DEH_N_ENTRY_FILE_NAME_LENGTH(ih,deh,entry_num) \
ih               2011 fs/reiserfs/reiserfs.h (I_DEH_N_ENTRY_LENGTH (ih, deh, entry_num) - (de_with_sd (deh) ? SD_SIZE : 0))
ih               2202 fs/reiserfs/reiserfs.h 	struct item_head *ih = reiserfs_node_data(bh);
ih               2204 fs/reiserfs/reiserfs.h 	return &ih[item_num];
ih               2215 fs/reiserfs/reiserfs.h 				 const struct item_head *ih)
ih               2217 fs/reiserfs/reiserfs.h 	return bh->b_data + ih_location(ih);
ih               2238 fs/reiserfs/reiserfs.h #define item_moved(ih,path) comp_items(ih, path)
ih               2239 fs/reiserfs/reiserfs.h #define path_changed(ih,path) comp_items (ih, path)
ih               2243 fs/reiserfs/reiserfs.h #define B_I_DEH(bh, ih) ((struct reiserfs_de_head *)(ih_item_body(bh, ih)))
ih               2254 fs/reiserfs/reiserfs.h 			       const struct item_head *ih, int pos_in_item)
ih               2258 fs/reiserfs/reiserfs.h 	deh = B_I_DEH(bh, ih) + pos_in_item;
ih               2262 fs/reiserfs/reiserfs.h 	return ih_item_len(ih) - deh_location(deh);
ih               2629 fs/reiserfs/reiserfs.h 	int (*bytes_number) (struct item_head * ih, int block_size);
ih               2631 fs/reiserfs/reiserfs.h 	int (*is_left_mergeable) (struct reiserfs_key * ih,
ih               2648 fs/reiserfs/reiserfs.h #define op_bytes_number(ih,bsize)                    item_ops[le_ih_k_type (ih)]->bytes_number (ih, bsize)
ih               2650 fs/reiserfs/reiserfs.h #define op_print_item(ih,item)                       item_ops[le_ih_k_type (ih)]->print_item (ih, item)
ih               2651 fs/reiserfs/reiserfs.h #define op_check_item(ih,item)                       item_ops[le_ih_k_type (ih)]->check_item (ih, item)
ih               2662 fs/reiserfs/reiserfs.h #define I_UNFM_NUM(ih)	(ih_item_len(ih) / UNFM_P_SIZE)
ih               2668 fs/reiserfs/reiserfs.h #define I_POS_UNFM_SIZE(ih,pos,size) (((pos) == I_UNFM_NUM(ih) - 1 ) ? (size) - ih_free_space(ih) : (size))
ih               2678 fs/reiserfs/reiserfs.h #define B_I_STAT_DATA(bh, ih) ( (struct stat_data * )((bh)->b_data + ih_location(ih)) )
ih               2688 fs/reiserfs/reiserfs.h #define B_I_POS_UNFM_POINTER(bh, ih, pos)				\
ih               2689 fs/reiserfs/reiserfs.h 	le32_to_cpu(*(((unp_t *)ih_item_body(bh, ih)) + (pos)))
ih               2690 fs/reiserfs/reiserfs.h #define PUT_B_I_POS_UNFM_POINTER(bh, ih, pos, val)			\
ih               2691 fs/reiserfs/reiserfs.h 	(*(((unp_t *)ih_item_body(bh, ih)) + (pos)) = cpu_to_le32(val))
ih               3019 fs/reiserfs/reiserfs.h 			 struct item_head *ih,
ih               3083 fs/reiserfs/reiserfs.h void make_le_item_head(struct item_head *ih, const struct cpu_key *key,
ih               3248 fs/reiserfs/reiserfs.h void do_balance(struct tree_balance *tb, struct item_head *ih,
ih                393 fs/reiserfs/stree.c 	struct item_head *ih;
ih                413 fs/reiserfs/stree.c 	ih = (struct item_head *)(buf + BLKH_SIZE) + nr - 1;
ih                414 fs/reiserfs/stree.c 	used_space = BLKH_SIZE + IH_SIZE * nr + (blocksize - ih_location(ih));
ih                428 fs/reiserfs/stree.c 	ih = (struct item_head *)(buf + BLKH_SIZE);
ih                430 fs/reiserfs/stree.c 	for (i = 0; i < nr; i++, ih++) {
ih                431 fs/reiserfs/stree.c 		if (le_ih_k_type(ih) == TYPE_ANY) {
ih                434 fs/reiserfs/stree.c 					 ih);
ih                437 fs/reiserfs/stree.c 		if (ih_location(ih) >= blocksize
ih                438 fs/reiserfs/stree.c 		    || ih_location(ih) < IH_SIZE * nr) {
ih                441 fs/reiserfs/stree.c 					 ih);
ih                444 fs/reiserfs/stree.c 		if (ih_item_len(ih) < 1
ih                445 fs/reiserfs/stree.c 		    || ih_item_len(ih) > MAX_ITEM_LEN(blocksize)) {
ih                448 fs/reiserfs/stree.c 					 ih);
ih                451 fs/reiserfs/stree.c 		if (prev_location - ih_location(ih) != ih_item_len(ih)) {
ih                454 fs/reiserfs/stree.c 					 "(second one): %h", ih);
ih                457 fs/reiserfs/stree.c 		prev_location = ih_location(ih);
ih                906 fs/reiserfs/stree.c 	struct item_head *ih;
ih                917 fs/reiserfs/stree.c 	ih = tp_item_head(path);
ih                918 fs/reiserfs/stree.c 	return memcmp(stored_ih, ih, IH_SIZE);
ih               1195 fs/reiserfs/stree.c char key2type(struct reiserfs_key *ih)
ih               1197 fs/reiserfs/stree.c 	if (is_direntry_le_key(2, ih))
ih               1199 fs/reiserfs/stree.c 	if (is_direct_le_key(2, ih))
ih               1201 fs/reiserfs/stree.c 	if (is_indirect_le_key(2, ih))
ih               1203 fs/reiserfs/stree.c 	if (is_statdata_le_key(2, ih))
ih               1208 fs/reiserfs/stree.c char head2type(struct item_head *ih)
ih               1210 fs/reiserfs/stree.c 	if (is_direntry_le_ih(ih))
ih               1212 fs/reiserfs/stree.c 	if (is_direct_le_ih(ih))
ih               1214 fs/reiserfs/stree.c 	if (is_indirect_le_ih(ih))
ih               1216 fs/reiserfs/stree.c 	if (is_statdata_le_ih(ih))
ih               2166 fs/reiserfs/stree.c 			 struct item_head *ih, struct inode *inode,
ih               2179 fs/reiserfs/stree.c 		quota_bytes = ih_item_len(ih);
ih               2186 fs/reiserfs/stree.c 		if (!S_ISLNK(inode->i_mode) && is_direct_le_ih(ih))
ih               2191 fs/reiserfs/stree.c 			       quota_bytes, inode->i_uid, head2type(ih));
ih               2206 fs/reiserfs/stree.c 		       IH_SIZE + ih_item_len(ih));
ih               2219 fs/reiserfs/stree.c 		fix_nodes(M_INSERT, &s_ins_balance, ih,
ih               2240 fs/reiserfs/stree.c 		do_balance(&s_ins_balance, ih, body, M_INSERT);
ih               2252 fs/reiserfs/stree.c 		       quota_bytes, inode->i_uid, head2type(ih));
ih                231 fs/reiserfs/super.c 	struct item_head *ih;
ih                301 fs/reiserfs/super.c 		ih = item_head(bh, item_pos);
ih                303 fs/reiserfs/super.c 		if (le32_to_cpu(ih->ih_key.k_dir_id) != MAX_KEY_OBJECTID)
ih                307 fs/reiserfs/super.c 		save_link_key = ih->ih_key;
ih                308 fs/reiserfs/super.c 		if (is_indirect_le_ih(ih))
ih                314 fs/reiserfs/super.c 		item = ih_item_body(bh, ih);
ih                317 fs/reiserfs/super.c 		    le32_to_cpu(ih->ih_key.k_objectid);
ih                435 fs/reiserfs/super.c 	struct item_head ih;
ih                460 fs/reiserfs/super.c 		make_le_item_head(&ih, &key, key.version,
ih                474 fs/reiserfs/super.c 		make_le_item_head(&ih, &key, key.version, 1, TYPE_INDIRECT,
ih                495 fs/reiserfs/super.c 	    reiserfs_insert_item(th, &path, &key, &ih, NULL, (char *)&link);
ih                330 include/linux/intel-iommu.h #define QI_IOTLB_IH(ih)		(((u64)ih) << 6)
ih                354 include/linux/intel-iommu.h #define QI_EIOTLB_IH(ih)	(((u64)ih) << 6)
ih                322 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                336 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                347 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                358 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                369 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                386 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                398 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                409 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                425 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                440 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                457 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                468 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                481 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                494 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                509 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                520 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                533 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                546 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                556 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                609 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                625 include/uapi/linux/coda.h     struct coda_in_hdr ih;
ih                653 include/uapi/linux/coda.h 	struct coda_in_hdr ih;
ih                672 include/uapi/linux/coda.h     struct coda_in_hdr ih;		/* NB: every struct below begins with an ih */
ih                223 net/bridge/br_multicast.c 	struct igmphdr *ih;
ih                227 net/bridge/br_multicast.c 	igmp_hdr_size = sizeof(*ih);
ih                276 net/bridge/br_multicast.c 		ih = igmp_hdr(skb);
ih                277 net/bridge/br_multicast.c 		ih->type = IGMP_HOST_MEMBERSHIP_QUERY;
ih                278 net/bridge/br_multicast.c 		ih->code = (group ? br->multicast_last_member_interval :
ih                281 net/bridge/br_multicast.c 		ih->group = group;
ih                282 net/bridge/br_multicast.c 		ih->csum = 0;
ih                283 net/bridge/br_multicast.c 		ih->csum = ip_compute_csum((void *)ih, sizeof(*ih));
ih                922 net/bridge/br_multicast.c 	struct igmpv3_report *ih;
ih                932 net/bridge/br_multicast.c 	ih = igmpv3_report_hdr(skb);
ih                933 net/bridge/br_multicast.c 	num = ntohs(ih->ngrec);
ih                934 net/bridge/br_multicast.c 	len = skb_transport_offset(skb) + sizeof(*ih);
ih               1222 net/bridge/br_multicast.c 	struct igmphdr *ih = igmp_hdr(skb);
ih               1237 net/bridge/br_multicast.c 	group = ih->group;
ih               1239 net/bridge/br_multicast.c 	if (transport_len == sizeof(*ih)) {
ih               1240 net/bridge/br_multicast.c 		max_delay = ih->code * (HZ / IGMP_TIMER_SCALE);
ih               1601 net/bridge/br_multicast.c 	struct igmphdr *ih;
ih               1622 net/bridge/br_multicast.c 	ih = igmp_hdr(skb);
ih               1624 net/bridge/br_multicast.c 	BR_INPUT_SKB_CB(skb)->igmp = ih->type;
ih               1626 net/bridge/br_multicast.c 	switch (ih->type) {
ih               1630 net/bridge/br_multicast.c 		err = br_ip4_multicast_add_group(br, port, ih->group, vid, src);
ih               1639 net/bridge/br_multicast.c 		br_ip4_multicast_leave_group(br, port, ih->group, vid, src);
ih               2338 net/bridge/br_multicast.c 				struct igmphdr *ih, _ihdr;
ih               2340 net/bridge/br_multicast.c 				ih = skb_header_pointer(skb, offset,
ih               2342 net/bridge/br_multicast.c 				if (!ih)
ih               2344 net/bridge/br_multicast.c 				if (!ih->code)
ih                 74 net/bridge/netfilter/ebt_among.c 		const struct iphdr *ih;
ih                 77 net/bridge/netfilter/ebt_among.c 		ih = skb_header_pointer(skb, 0, sizeof(_iph), &_iph);
ih                 78 net/bridge/netfilter/ebt_among.c 		if (ih == NULL)
ih                 80 net/bridge/netfilter/ebt_among.c 		*addr = ih->daddr;
ih                105 net/bridge/netfilter/ebt_among.c 		const struct iphdr *ih;
ih                108 net/bridge/netfilter/ebt_among.c 		ih = skb_header_pointer(skb, 0, sizeof(_iph), &_iph);
ih                109 net/bridge/netfilter/ebt_among.c 		if (ih == NULL)
ih                111 net/bridge/netfilter/ebt_among.c 		*addr = ih->saddr;
ih                 41 net/bridge/netfilter/ebt_ip.c 	const struct iphdr *ih;
ih                 46 net/bridge/netfilter/ebt_ip.c 	ih = skb_header_pointer(skb, 0, sizeof(_iph), &_iph);
ih                 47 net/bridge/netfilter/ebt_ip.c 	if (ih == NULL)
ih                 50 net/bridge/netfilter/ebt_ip.c 	    NF_INVF(info, EBT_IP_TOS, info->tos != ih->tos))
ih                 54 net/bridge/netfilter/ebt_ip.c 		    (ih->saddr & info->smsk) != info->saddr))
ih                 58 net/bridge/netfilter/ebt_ip.c 		    (ih->daddr & info->dmsk) != info->daddr))
ih                 61 net/bridge/netfilter/ebt_ip.c 		if (NF_INVF(info, EBT_IP_PROTO, info->protocol != ih->protocol))
ih                 66 net/bridge/netfilter/ebt_ip.c 		if (ntohs(ih->frag_off) & IP_OFFSET)
ih                 70 net/bridge/netfilter/ebt_ip.c 		pptr = skb_header_pointer(skb, ih->ihl*4,
ih                 99 net/bridge/netfilter/ebt_log.c 		const struct iphdr *ih;
ih                102 net/bridge/netfilter/ebt_log.c 		ih = skb_header_pointer(skb, 0, sizeof(_iph), &_iph);
ih                103 net/bridge/netfilter/ebt_log.c 		if (ih == NULL) {
ih                108 net/bridge/netfilter/ebt_log.c 			&ih->saddr, &ih->daddr, ih->tos, ih->protocol);
ih                109 net/bridge/netfilter/ebt_log.c 		print_ports(skb, ih->protocol, ih->ihl*4);
ih                116 net/bridge/netfilter/ebt_log.c 		const struct ipv6hdr *ih;
ih                122 net/bridge/netfilter/ebt_log.c 		ih = skb_header_pointer(skb, 0, sizeof(_iph), &_iph);
ih                123 net/bridge/netfilter/ebt_log.c 		if (ih == NULL) {
ih                128 net/bridge/netfilter/ebt_log.c 			&ih->saddr, &ih->daddr, ih->priority, ih->nexthdr);
ih                129 net/bridge/netfilter/ebt_log.c 		nexthdr = ih->nexthdr;
ih                729 net/ipv4/igmp.c 	struct igmphdr *ih;
ih                786 net/ipv4/igmp.c 	ih = skb_put(skb, sizeof(struct igmphdr));
ih                787 net/ipv4/igmp.c 	ih->type = type;
ih                788 net/ipv4/igmp.c 	ih->code = 0;
ih                789 net/ipv4/igmp.c 	ih->csum = 0;
ih                790 net/ipv4/igmp.c 	ih->group = group;
ih                791 net/ipv4/igmp.c 	ih->csum = ip_compute_csum((void *)ih, sizeof(struct igmphdr));
ih                936 net/ipv4/igmp.c 	struct igmphdr 		*ih = igmp_hdr(skb);
ih                939 net/ipv4/igmp.c 	__be32			group = ih->group;
ih                946 net/ipv4/igmp.c 		if (ih->code == 0) {
ih                956 net/ipv4/igmp.c 			max_delay = ih->code*(HZ/IGMP_TIMER_SCALE);
ih               1064 net/ipv4/igmp.c 	struct igmphdr *ih;
ih               1086 net/ipv4/igmp.c 	ih = igmp_hdr(skb);
ih               1087 net/ipv4/igmp.c 	switch (ih->type) {
ih               1099 net/ipv4/igmp.c 			dropped = igmp_heard_report(in_dev, ih->group);
ih                 40 net/ipv4/netfilter/nf_log_ipv4.c 	const struct iphdr *ih;
ih                 48 net/ipv4/netfilter/nf_log_ipv4.c 	ih = skb_header_pointer(skb, iphoff, sizeof(_iph), &_iph);
ih                 49 net/ipv4/netfilter/nf_log_ipv4.c 	if (ih == NULL) {
ih                 57 net/ipv4/netfilter/nf_log_ipv4.c 	nf_log_buf_add(m, "SRC=%pI4 DST=%pI4 ", &ih->saddr, &ih->daddr);
ih                 61 net/ipv4/netfilter/nf_log_ipv4.c 		       ntohs(ih->tot_len), ih->tos & IPTOS_TOS_MASK,
ih                 62 net/ipv4/netfilter/nf_log_ipv4.c 		       ih->tos & IPTOS_PREC_MASK, ih->ttl, ntohs(ih->id));
ih                 65 net/ipv4/netfilter/nf_log_ipv4.c 	if (ntohs(ih->frag_off) & IP_CE)
ih                 67 net/ipv4/netfilter/nf_log_ipv4.c 	if (ntohs(ih->frag_off) & IP_DF)
ih                 69 net/ipv4/netfilter/nf_log_ipv4.c 	if (ntohs(ih->frag_off) & IP_MF)
ih                 73 net/ipv4/netfilter/nf_log_ipv4.c 	if (ntohs(ih->frag_off) & IP_OFFSET)
ih                 74 net/ipv4/netfilter/nf_log_ipv4.c 		nf_log_buf_add(m, "FRAG:%u ", ntohs(ih->frag_off) & IP_OFFSET);
ih                 77 net/ipv4/netfilter/nf_log_ipv4.c 	    ih->ihl * 4 > sizeof(struct iphdr)) {
ih                 82 net/ipv4/netfilter/nf_log_ipv4.c 		optsize = ih->ihl * 4 - sizeof(struct iphdr);
ih                 97 net/ipv4/netfilter/nf_log_ipv4.c 	switch (ih->protocol) {
ih                 99 net/ipv4/netfilter/nf_log_ipv4.c 		if (nf_log_dump_tcp_header(m, skb, ih->protocol,
ih                100 net/ipv4/netfilter/nf_log_ipv4.c 					   ntohs(ih->frag_off) & IP_OFFSET,
ih                101 net/ipv4/netfilter/nf_log_ipv4.c 					   iphoff+ih->ihl*4, logflags))
ih                106 net/ipv4/netfilter/nf_log_ipv4.c 		if (nf_log_dump_udp_header(m, skb, ih->protocol,
ih                107 net/ipv4/netfilter/nf_log_ipv4.c 					   ntohs(ih->frag_off) & IP_OFFSET,
ih                108 net/ipv4/netfilter/nf_log_ipv4.c 					   iphoff+ih->ihl*4))
ih                135 net/ipv4/netfilter/nf_log_ipv4.c 		if (ntohs(ih->frag_off) & IP_OFFSET)
ih                139 net/ipv4/netfilter/nf_log_ipv4.c 		ich = skb_header_pointer(skb, iphoff + ih->ihl * 4,
ih                143 net/ipv4/netfilter/nf_log_ipv4.c 				       skb->len - iphoff - ih->ihl*4);
ih                153 net/ipv4/netfilter/nf_log_ipv4.c 		    skb->len-iphoff-ih->ihl*4 < required_len[ich->type]) {
ih                155 net/ipv4/netfilter/nf_log_ipv4.c 				       skb->len - iphoff - ih->ihl*4);
ih                184 net/ipv4/netfilter/nf_log_ipv4.c 					    iphoff + ih->ihl*4+sizeof(_icmph));
ih                202 net/ipv4/netfilter/nf_log_ipv4.c 		if (ntohs(ih->frag_off) & IP_OFFSET)
ih                209 net/ipv4/netfilter/nf_log_ipv4.c 		ah = skb_header_pointer(skb, iphoff+ih->ihl*4,
ih                213 net/ipv4/netfilter/nf_log_ipv4.c 				       skb->len - iphoff - ih->ihl*4);
ih                228 net/ipv4/netfilter/nf_log_ipv4.c 		if (ntohs(ih->frag_off) & IP_OFFSET)
ih                232 net/ipv4/netfilter/nf_log_ipv4.c 		eh = skb_header_pointer(skb, iphoff+ih->ihl*4,
ih                236 net/ipv4/netfilter/nf_log_ipv4.c 				       skb->len - iphoff - ih->ihl*4);
ih                246 net/ipv4/netfilter/nf_log_ipv4.c 		nf_log_buf_add(m, "PROTO=%u ", ih->protocol);
ih                109 net/ipv6/ila/ila_common.c 			struct icmp6hdr *ih = (struct icmp6hdr *)
ih                113 net/ipv6/ila/ila_common.c 			inet_proto_csum_replace_by_diff(&ih->icmp6_cksum, skb,
ih                 44 net/ipv6/netfilter/nf_log_ipv6.c 	const struct ipv6hdr *ih;
ih                 54 net/ipv6/netfilter/nf_log_ipv6.c 	ih = skb_header_pointer(skb, ip6hoff, sizeof(_ip6h), &_ip6h);
ih                 55 net/ipv6/netfilter/nf_log_ipv6.c 	if (ih == NULL) {
ih                 61 net/ipv6/netfilter/nf_log_ipv6.c 	nf_log_buf_add(m, "SRC=%pI6 DST=%pI6 ", &ih->saddr, &ih->daddr);
ih                 65 net/ipv6/netfilter/nf_log_ipv6.c 	       ntohs(ih->payload_len) + sizeof(struct ipv6hdr),
ih                 66 net/ipv6/netfilter/nf_log_ipv6.c 	       (ntohl(*(__be32 *)ih) & 0x0ff00000) >> 20,
ih                 67 net/ipv6/netfilter/nf_log_ipv6.c 	       ih->hop_limit,
ih                 68 net/ipv6/netfilter/nf_log_ipv6.c 	       (ntohl(*(__be32 *)ih) & 0x000fffff));
ih                 72 net/ipv6/netfilter/nf_log_ipv6.c 	currenthdr = ih->nexthdr;
ih                221 net/netfilter/ipvs/ip_vs_proto.c 	struct iphdr _iph, *ih;
ih                223 net/netfilter/ipvs/ip_vs_proto.c 	ih = skb_header_pointer(skb, offset, sizeof(_iph), &_iph);
ih                224 net/netfilter/ipvs/ip_vs_proto.c 	if (ih == NULL)
ih                226 net/netfilter/ipvs/ip_vs_proto.c 	else if (ih->frag_off & htons(IP_OFFSET))
ih                227 net/netfilter/ipvs/ip_vs_proto.c 		sprintf(buf, "%pI4->%pI4 frag", &ih->saddr, &ih->daddr);
ih                231 net/netfilter/ipvs/ip_vs_proto.c 		pptr = skb_header_pointer(skb, offset + ih->ihl*4,
ih                235 net/netfilter/ipvs/ip_vs_proto.c 				&ih->saddr, &ih->daddr);
ih                238 net/netfilter/ipvs/ip_vs_proto.c 				&ih->saddr, ntohs(pptr[0]),
ih                239 net/netfilter/ipvs/ip_vs_proto.c 				&ih->daddr, ntohs(pptr[1]));
ih                253 net/netfilter/ipvs/ip_vs_proto.c 	struct ipv6hdr _iph, *ih;
ih                255 net/netfilter/ipvs/ip_vs_proto.c 	ih = skb_header_pointer(skb, offset, sizeof(_iph), &_iph);
ih                256 net/netfilter/ipvs/ip_vs_proto.c 	if (ih == NULL)
ih                258 net/netfilter/ipvs/ip_vs_proto.c 	else if (ih->nexthdr == IPPROTO_FRAGMENT)
ih                259 net/netfilter/ipvs/ip_vs_proto.c 		sprintf(buf, "%pI6c->%pI6c frag", &ih->saddr, &ih->daddr);
ih                267 net/netfilter/ipvs/ip_vs_proto.c 				&ih->saddr, &ih->daddr);
ih                270 net/netfilter/ipvs/ip_vs_proto.c 				&ih->saddr, ntohs(pptr[0]),
ih                271 net/netfilter/ipvs/ip_vs_proto.c 				&ih->daddr, ntohs(pptr[1]));
ih                293 net/netfilter/nf_conntrack_proto_sctp.c 			struct sctp_inithdr _inithdr, *ih;
ih                298 net/netfilter/nf_conntrack_proto_sctp.c 			ih = skb_header_pointer(skb, offset + sizeof(_sch),
ih                300 net/netfilter/nf_conntrack_proto_sctp.c 			if (!ih)
ih                304 net/netfilter/nf_conntrack_proto_sctp.c 				 ih->init_tag);
ih                306 net/netfilter/nf_conntrack_proto_sctp.c 			ct->proto.sctp.vtag[IP_CT_DIR_REPLY] = ih->init_tag;
ih                456 net/netfilter/nf_conntrack_proto_sctp.c 			struct sctp_inithdr _inithdr, *ih;
ih                458 net/netfilter/nf_conntrack_proto_sctp.c 			ih = skb_header_pointer(skb, offset + sizeof(_sch),
ih                460 net/netfilter/nf_conntrack_proto_sctp.c 			if (ih == NULL)
ih                463 net/netfilter/nf_conntrack_proto_sctp.c 				 ih->init_tag, !dir);
ih                464 net/netfilter/nf_conntrack_proto_sctp.c 			ct->proto.sctp.vtag[!dir] = ih->init_tag;
ih                 32 net/netfilter/nft_log.c 	const struct iphdr *ih;
ih                 34 net/netfilter/nft_log.c 	ih = skb_header_pointer(skb, skb_network_offset(skb), sizeof(_iph), &_iph);
ih                 35 net/netfilter/nft_log.c 	if (!ih)
ih                 39 net/netfilter/nft_log.c 			 &ih->saddr, &ih->daddr, ih->protocol);
ih                 47 net/netfilter/nft_log.c 	const struct ipv6hdr *ih;
ih                 51 net/netfilter/nft_log.c 	ih = skb_header_pointer(skb, skb_network_offset(skb), sizeof(_ip6h), &_ip6h);
ih                 52 net/netfilter/nft_log.c 	if (!ih)
ih                 55 net/netfilter/nft_log.c 	nexthdr = ih->nexthdr;
ih                 59 net/netfilter/nft_log.c 			 &ih->saddr, &ih->daddr, nexthdr);
ih                 34 net/netfilter/xt_AUDIT.c 	const struct iphdr *ih;
ih                 36 net/netfilter/xt_AUDIT.c 	ih = skb_header_pointer(skb, skb_network_offset(skb), sizeof(_iph), &_iph);
ih                 37 net/netfilter/xt_AUDIT.c 	if (!ih)
ih                 41 net/netfilter/xt_AUDIT.c 			 &ih->saddr, &ih->daddr, ih->protocol);
ih                 49 net/netfilter/xt_AUDIT.c 	const struct ipv6hdr *ih;
ih                 53 net/netfilter/xt_AUDIT.c 	ih = skb_header_pointer(skb, skb_network_offset(skb), sizeof(_ip6h), &_ip6h);
ih                 54 net/netfilter/xt_AUDIT.c 	if (!ih)
ih                 57 net/netfilter/xt_AUDIT.c 	nexthdr = ih->nexthdr;
ih                 61 net/netfilter/xt_AUDIT.c 			 &ih->saddr, &ih->daddr, nexthdr);
ih                 43 security/lsm_audit.c 	struct iphdr *ih;
ih                 45 security/lsm_audit.c 	ih = ip_hdr(skb);
ih                 46 security/lsm_audit.c 	if (ih == NULL)
ih                 49 security/lsm_audit.c 	ad->u.net->v4info.saddr = ih->saddr;
ih                 50 security/lsm_audit.c 	ad->u.net->v4info.daddr = ih->daddr;
ih                 53 security/lsm_audit.c 		*proto = ih->protocol;
ih                 55 security/lsm_audit.c 	if (ntohs(ih->frag_off) & IP_OFFSET)
ih                 58 security/lsm_audit.c 	switch (ih->protocol) {
ih               4179 security/selinux/hooks.c 	struct iphdr _iph, *ih;
ih               4182 security/selinux/hooks.c 	ih = skb_header_pointer(skb, offset, sizeof(_iph), &_iph);
ih               4183 security/selinux/hooks.c 	if (ih == NULL)
ih               4186 security/selinux/hooks.c 	ihlen = ih->ihl * 4;
ih               4190 security/selinux/hooks.c 	ad->u.net->v4info.saddr = ih->saddr;
ih               4191 security/selinux/hooks.c 	ad->u.net->v4info.daddr = ih->daddr;
ih               4195 security/selinux/hooks.c 		*proto = ih->protocol;
ih               4197 security/selinux/hooks.c 	switch (ih->protocol) {
ih               4201 security/selinux/hooks.c 		if (ntohs(ih->frag_off) & IP_OFFSET)
ih               4217 security/selinux/hooks.c 		if (ntohs(ih->frag_off) & IP_OFFSET)
ih               4233 security/selinux/hooks.c 		if (ntohs(ih->frag_off) & IP_OFFSET)
ih               4250 security/selinux/hooks.c 		if (ntohs(ih->frag_off) & IP_OFFSET)