Home
last modified time | relevance | path

Searched refs:ib (Results 1 – 69 of 69) sorted by relevance

/linux-4.1.27/drivers/gpu/drm/radeon/
Dradeon_ib.c56 struct radeon_ib *ib, struct radeon_vm *vm, in radeon_ib_get() argument
61 r = radeon_sa_bo_new(rdev, &rdev->ring_tmp_bo, &ib->sa_bo, size, 256); in radeon_ib_get()
67 radeon_sync_create(&ib->sync); in radeon_ib_get()
69 ib->ring = ring; in radeon_ib_get()
70 ib->fence = NULL; in radeon_ib_get()
71 ib->ptr = radeon_sa_bo_cpu_addr(ib->sa_bo); in radeon_ib_get()
72 ib->vm = vm; in radeon_ib_get()
77 ib->gpu_addr = ib->sa_bo->soffset + RADEON_VA_IB_OFFSET; in radeon_ib_get()
79 ib->gpu_addr = radeon_sa_bo_gpu_addr(ib->sa_bo); in radeon_ib_get()
81 ib->is_const_ib = false; in radeon_ib_get()
[all …]
Dradeon_vce.c332 struct radeon_ib ib; in radeon_vce_get_create_msg() local
336 r = radeon_ib_get(rdev, ring, &ib, NULL, ib_size_dw * 4); in radeon_vce_get_create_msg()
342 dummy = ib.gpu_addr + 1024; in radeon_vce_get_create_msg()
345 ib.length_dw = 0; in radeon_vce_get_create_msg()
346 ib.ptr[ib.length_dw++] = 0x0000000c; /* len */ in radeon_vce_get_create_msg()
347 ib.ptr[ib.length_dw++] = 0x00000001; /* session cmd */ in radeon_vce_get_create_msg()
348 ib.ptr[ib.length_dw++] = handle; in radeon_vce_get_create_msg()
350 ib.ptr[ib.length_dw++] = 0x00000030; /* len */ in radeon_vce_get_create_msg()
351 ib.ptr[ib.length_dw++] = 0x01000001; /* create cmd */ in radeon_vce_get_create_msg()
352 ib.ptr[ib.length_dw++] = 0x00000000; in radeon_vce_get_create_msg()
[all …]
Dsi_dma.c70 struct radeon_ib *ib, in si_dma_vm_copy_pages() argument
79 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pages()
81 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages()
82 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pages()
83 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages()
84 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pages()
106 struct radeon_ib *ib, in si_dma_vm_write_pages() argument
120 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages()
121 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages()
122 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages()
[all …]
Dni_dma.c123 struct radeon_ib *ib) in cayman_dma_ring_ib_execute() argument
125 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cayman_dma_ring_ib_execute()
126 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cayman_dma_ring_ib_execute()
145 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in cayman_dma_ring_ib_execute()
146 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute()
316 struct radeon_ib *ib, in cayman_dma_vm_copy_pages() argument
327 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in cayman_dma_vm_copy_pages()
329 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages()
330 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cayman_dma_vm_copy_pages()
331 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages()
[all …]
Dcik_sdma.c134 struct radeon_ib *ib) in cik_sdma_ring_ib_execute() argument
136 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cik_sdma_ring_ib_execute()
137 u32 extra_bits = (ib->vm ? ib->vm->ids[ib->ring].id : 0) & 0xf; in cik_sdma_ring_ib_execute()
155 radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_ib_execute()
156 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr)); in cik_sdma_ring_ib_execute()
157 radeon_ring_write(ring, ib->length_dw); in cik_sdma_ring_ib_execute()
704 struct radeon_ib ib; in cik_sdma_ib_test() local
721 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in cik_sdma_ib_test()
727 ib.ptr[0] = SDMA_PACKET(SDMA_OPCODE_WRITE, SDMA_WRITE_SUB_OPCODE_LINEAR, 0); in cik_sdma_ib_test()
728 ib.ptr[1] = lower_32_bits(gpu_addr); in cik_sdma_ib_test()
[all …]
Devergreen_cs.c447 volatile u32 *ib = p->ib.ptr; in evergreen_cs_track_validate_cb() local
469 ib[track->cb_color_slice_idx[id]] = slice; in evergreen_cs_track_validate_cb()
1100 u32 m, i, tmp, *ib; in evergreen_cs_check_reg() local
1121 ib = p->ib.ptr; in evergreen_cs_check_reg()
1169 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in evergreen_cs_check_reg()
1198 ib[idx] &= ~Z_ARRAY_MODE(0xf); in evergreen_cs_check_reg()
1200 ib[idx] |= Z_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags)); in evergreen_cs_check_reg()
1208 ib[idx] |= DB_NUM_BANKS(evergreen_cs_get_num_banks(track->nbanks)); in evergreen_cs_check_reg()
1209 ib[idx] |= DB_TILE_SPLIT(tile_split) | in evergreen_cs_check_reg()
1241 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in evergreen_cs_check_reg()
[all …]
Dradeon_vm.c361 struct radeon_ib *ib, in radeon_vm_set_pages() argument
370 radeon_asic_vm_copy_pages(rdev, ib, pe, src, count); in radeon_vm_set_pages()
373 radeon_asic_vm_write_pages(rdev, ib, pe, addr, in radeon_vm_set_pages()
377 radeon_asic_vm_set_pages(rdev, ib, pe, addr, in radeon_vm_set_pages()
391 struct radeon_ib ib; in radeon_vm_clear_bo() local
407 r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, 256); in radeon_vm_clear_bo()
411 ib.length_dw = 0; in radeon_vm_clear_bo()
413 radeon_vm_set_pages(rdev, &ib, addr, 0, entries, 0, 0); in radeon_vm_clear_bo()
414 radeon_asic_vm_pad_ib(rdev, &ib); in radeon_vm_clear_bo()
415 WARN_ON(ib.length_dw > 64); in radeon_vm_clear_bo()
[all …]
Dr600_cs.c356 volatile u32 *ib = p->ib.ptr; in r600_cs_track_validate_cb() local
467 ib[track->cb_color_size_idx[i]] = tmp; in r600_cs_track_validate_cb()
526 volatile u32 *ib = p->ib.ptr; in r600_cs_track_validate_db() local
564 ib[track->db_depth_size_idx] = S_028000_SLICE_TILE_MAX(tmp - 1) | (track->db_depth_size & 0x3FF); in r600_cs_track_validate_db()
834 volatile uint32_t *ib; in r600_cs_common_vline_parse() local
836 ib = p->ib.ptr; in r600_cs_common_vline_parse()
899 ib[h_idx + 2] = PACKET2(0); in r600_cs_common_vline_parse()
900 ib[h_idx + 3] = PACKET2(0); in r600_cs_common_vline_parse()
901 ib[h_idx + 4] = PACKET2(0); in r600_cs_common_vline_parse()
902 ib[h_idx + 5] = PACKET2(0); in r600_cs_common_vline_parse()
[all …]
Dr600_dma.c339 struct radeon_ib ib; in r600_dma_ib_test() local
353 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in r600_dma_ib_test()
359 ib.ptr[0] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 1); in r600_dma_ib_test()
360 ib.ptr[1] = lower_32_bits(gpu_addr); in r600_dma_ib_test()
361 ib.ptr[2] = upper_32_bits(gpu_addr) & 0xff; in r600_dma_ib_test()
362 ib.ptr[3] = 0xDEADBEEF; in r600_dma_ib_test()
363 ib.length_dw = 4; in r600_dma_ib_test()
365 r = radeon_ib_schedule(rdev, &ib, NULL, false); in r600_dma_ib_test()
367 radeon_ib_free(rdev, &ib); in r600_dma_ib_test()
371 r = radeon_fence_wait(ib.fence, false); in r600_dma_ib_test()
[all …]
Devergreen_dma.c68 struct radeon_ib *ib) in evergreen_dma_ring_ib_execute() argument
70 struct radeon_ring *ring = &rdev->ring[ib->ring]; in evergreen_dma_ring_ib_execute()
89 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in evergreen_dma_ring_ib_execute()
90 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in evergreen_dma_ring_ib_execute()
Dr200.c151 volatile uint32_t *ib; in r200_packet0_check() local
159 ib = p->ib.ptr; in r200_packet0_check()
191 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r200_packet0_check()
204 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r200_packet0_check()
228 ib[idx] = tmp + ((u32)reloc->gpu_offset); in r200_packet0_check()
230 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r200_packet0_check()
274 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r200_packet0_check()
300 ib[idx] = tmp; in r200_packet0_check()
302 ib[idx] = idx_value; in r200_packet0_check()
368 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r200_packet0_check()
Dradeon_cs.c177 p->vm_bos = radeon_vm_get_bos(p->rdev, p->ib.vm, in radeon_cs_parser_relocs()
242 r = radeon_sync_resv(p->rdev, &p->ib.sync, resv, in radeon_cs_sync_rings()
267 p->ib.sa_bo = NULL; in radeon_cs_parser_init()
415 &parser->ib.fence->base); in radeon_cs_parser_fini()
437 radeon_ib_free(parser->rdev, &parser->ib); in radeon_cs_parser_fini()
471 r = radeon_ib_schedule(rdev, &parser->ib, NULL, true); in radeon_cs_ib_chunk()
517 radeon_sync_fence(&p->ib.sync, bo_va->last_pt_update); in radeon_bo_vm_update_pte()
542 r = radeon_ring_ib_parse(rdev, parser->ring, &parser->ib); in radeon_cs_ib_vm_chunk()
565 r = radeon_ib_schedule(rdev, &parser->ib, &parser->const_ib, true); in radeon_cs_ib_vm_chunk()
567 r = radeon_ib_schedule(rdev, &parser->ib, NULL, true); in radeon_cs_ib_vm_chunk()
[all …]
Dradeon_uvd.c552 p->ib.ptr[data0] = start & 0xFFFFFFFF; in radeon_uvd_cs_reloc()
553 p->ib.ptr[data1] = start >> 32; in radeon_uvd_cs_reloc()
699 struct radeon_ib ib; in radeon_uvd_send_msg() local
702 r = radeon_ib_get(rdev, ring, &ib, NULL, 64); in radeon_uvd_send_msg()
706 ib.ptr[0] = PACKET0(UVD_GPCOM_VCPU_DATA0, 0); in radeon_uvd_send_msg()
707 ib.ptr[1] = addr; in radeon_uvd_send_msg()
708 ib.ptr[2] = PACKET0(UVD_GPCOM_VCPU_DATA1, 0); in radeon_uvd_send_msg()
709 ib.ptr[3] = addr >> 32; in radeon_uvd_send_msg()
710 ib.ptr[4] = PACKET0(UVD_GPCOM_VCPU_CMD, 0); in radeon_uvd_send_msg()
711 ib.ptr[5] = 0; in radeon_uvd_send_msg()
[all …]
Dradeon_asic.h95 void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
334 void r600_dma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
344 void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
518 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
545 struct radeon_ib *ib);
610 void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
616 int evergreen_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
617 int evergreen_dma_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
619 struct radeon_ib *ib);
624 struct radeon_ib *ib,
[all …]
Dr100.c1295 p->ib.ptr[idx] = (value & 0x3fc00000) | tmp; in r100_reloc_pitch_offset()
1297 p->ib.ptr[idx] = (value & 0xffc00000) | tmp; in r100_reloc_pitch_offset()
1309 volatile uint32_t *ib; in r100_packet3_load_vbpntr() local
1312 ib = p->ib.ptr; in r100_packet3_load_vbpntr()
1331 ib[idx+1] = radeon_get_ib_value(p, idx + 1) + ((u32)reloc->gpu_offset); in r100_packet3_load_vbpntr()
1343 ib[idx+2] = radeon_get_ib_value(p, idx + 2) + ((u32)reloc->gpu_offset); in r100_packet3_load_vbpntr()
1357 ib[idx+1] = radeon_get_ib_value(p, idx + 1) + ((u32)reloc->gpu_offset); in r100_packet3_load_vbpntr()
1432 volatile uint32_t *ib; in r100_cs_packet_parse_vline() local
1434 ib = p->ib.ptr; in r100_cs_packet_parse_vline()
1475 ib[h_idx + 2] = PACKET2(0); in r100_cs_packet_parse_vline()
[all …]
Dr300.c609 volatile uint32_t *ib; in r300_packet0_check() local
615 ib = p->ib.ptr; in r300_packet0_check()
651 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r300_packet0_check()
664 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r300_packet0_check()
692 ib[idx] = (idx_value & 31) | /* keep the 1st 5 bits */ in r300_packet0_check()
704 ib[idx] = tmp; in r300_packet0_check()
773 ib[idx] = tmp; in r300_packet0_check()
858 ib[idx] = tmp; in r300_packet0_check()
1063 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r300_packet0_check()
1076 ib[idx] = idx_value & ~1; in r300_packet0_check()
[all …]
Duvd_v1_0.c481 void uvd_v1_0_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) in uvd_v1_0_ib_execute() argument
483 struct radeon_ring *ring = &rdev->ring[ib->ring]; in uvd_v1_0_ib_execute()
486 radeon_ring_write(ring, ib->gpu_addr); in uvd_v1_0_ib_execute()
488 radeon_ring_write(ring, ib->length_dw); in uvd_v1_0_ib_execute()
Dradeon.h1018 struct radeon_ib *ib, struct radeon_vm *vm,
1020 void radeon_ib_free(struct radeon_device *rdev, struct radeon_ib *ib);
1021 int radeon_ib_schedule(struct radeon_device *rdev, struct radeon_ib *ib,
1090 struct radeon_ib ib; member
1107 return p->ib.ptr[idx]; in radeon_get_ib_value()
1744 void radeon_vce_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
1822 int (*ib_parse)(struct radeon_device *rdev, struct radeon_ib *ib);
1826 void (*ib_execute)(struct radeon_device *rdev, struct radeon_ib *ib);
1876 struct radeon_ib *ib,
1880 struct radeon_ib *ib,
[all …]
Dsi.c3398 void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) in si_ring_ib_execute() argument
3400 struct radeon_ring *ring = &rdev->ring[ib->ring]; in si_ring_ib_execute()
3401 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in si_ring_ib_execute()
3404 if (ib->is_const_ib) { in si_ring_ib_execute()
3435 (ib->gpu_addr & 0xFFFFFFFC)); in si_ring_ib_execute()
3436 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF); in si_ring_ib_execute()
3437 radeon_ring_write(ring, ib->length_dw | (vm_id << 24)); in si_ring_ib_execute()
3439 if (!ib->is_const_ib) { in si_ring_ib_execute()
4442 u32 *ib, struct radeon_cs_packet *pkt) in si_vm_packet3_ce_check() argument
4463 static int si_vm_packet3_cp_dma_check(u32 *ib, u32 idx) in si_vm_packet3_cp_dma_check() argument
[all …]
Dni.c1406 void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) in cayman_ring_ib_execute() argument
1408 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cayman_ring_ib_execute()
1409 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cayman_ring_ib_execute()
1430 (ib->gpu_addr & 0xFFFFFFFC)); in cayman_ring_ib_execute()
1431 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF); in cayman_ring_ib_execute()
1432 radeon_ring_write(ring, ib->length_dw | (vm_id << 24)); in cayman_ring_ib_execute()
Dr600.c3279 void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) in r600_ring_ib_execute() argument
3281 struct radeon_ring *ring = &rdev->ring[ib->ring]; in r600_ring_ib_execute()
3304 (ib->gpu_addr & 0xFFFFFFFC)); in r600_ring_ib_execute()
3305 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF); in r600_ring_ib_execute()
3306 radeon_ring_write(ring, ib->length_dw); in r600_ring_ib_execute()
3311 struct radeon_ib ib; in r600_ib_test() local
3323 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in r600_ib_test()
3328 ib.ptr[0] = PACKET3(PACKET3_SET_CONFIG_REG, 1); in r600_ib_test()
3329 ib.ptr[1] = ((scratch - PACKET3_SET_CONFIG_REG_OFFSET) >> 2); in r600_ib_test()
3330 ib.ptr[2] = 0xDEADBEEF; in r600_ib_test()
[all …]
Dcik.c4116 void cik_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) in cik_ring_ib_execute() argument
4118 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cik_ring_ib_execute()
4119 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cik_ring_ib_execute()
4122 if (ib->is_const_ib) { in cik_ring_ib_execute()
4148 control |= ib->length_dw | (vm_id << 24); in cik_ring_ib_execute()
4155 (ib->gpu_addr & 0xFFFFFFFC)); in cik_ring_ib_execute()
4156 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF); in cik_ring_ib_execute()
4172 struct radeon_ib ib; in cik_ib_test() local
4184 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in cik_ib_test()
4190 ib.ptr[0] = PACKET3(PACKET3_SET_UCONFIG_REG, 1); in cik_ib_test()
[all …]
Dr600_cp.c66 unsigned family, u32 *ib, int *l);
2619 u32 *ib, cs_id = 0; in r600_cs_legacy_ioctl() local
2637 ib = dev->agp_buffer_map->handle + buf->offset; in r600_cs_legacy_ioctl()
2639 r = r600_cs_legacy(dev, data, fpriv, family, ib, &l); in r600_cs_legacy_ioctl()
Devergreen.c2971 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) in evergreen_ring_ib_execute() argument
2973 struct radeon_ring *ring = &rdev->ring[ib->ring]; in evergreen_ring_ib_execute()
3000 (ib->gpu_addr & 0xFFFFFFFC)); in evergreen_ring_ib_execute()
3001 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF); in evergreen_ring_ib_execute()
3002 radeon_ring_write(ring, ib->length_dw); in evergreen_ring_ib_execute()
/linux-4.1.27/arch/s390/include/asm/
Didals.h119 struct idal_buffer *ib; in idal_buffer_alloc() local
124 ib = kmalloc(sizeof(struct idal_buffer) + nr_ptrs*sizeof(void *), in idal_buffer_alloc()
126 if (ib == NULL) in idal_buffer_alloc()
128 ib->size = size; in idal_buffer_alloc()
129 ib->page_order = page_order; in idal_buffer_alloc()
132 ib->data[i] = ib->data[i-1] + IDA_BLOCK_SIZE; in idal_buffer_alloc()
135 ib->data[i] = (void *) in idal_buffer_alloc()
137 if (ib->data[i] != NULL) in idal_buffer_alloc()
142 free_pages((unsigned long) ib->data[i], in idal_buffer_alloc()
143 ib->page_order); in idal_buffer_alloc()
[all …]
/linux-4.1.27/drivers/net/ethernet/amd/
D7990.c99 t, ib->brx_ring[t].rmd1_hadr, ib->brx_ring[t].rmd0, \
100 ib->brx_ring[t].length, \
101 ib->brx_ring[t].mblength, ib->brx_ring[t].rmd1_bits); \
105 t, ib->btx_ring[t].tmd1_hadr, ib->btx_ring[t].tmd0, \
106 ib->btx_ring[t].length, \
107 ib->btx_ring[t].misc, ib->btx_ring[t].tmd1_bits); \
139 volatile struct lance_init_block *ib = lp->init_block; in lance_init_ring() local
149 ib->mode = LE_MO_PROM; /* normal, enable Tx & Rx */ in lance_init_ring()
162 ib->phys_addr[0] = dev->dev_addr[1]; in lance_init_ring()
163 ib->phys_addr[1] = dev->dev_addr[0]; in lance_init_ring()
[all …]
Dsunlance.c323 struct lance_init_block *ib = lp->init_block_mem; in lance_init_ring_dvma() local
336 ib->phys_addr [0] = dev->dev_addr [1]; in lance_init_ring_dvma()
337 ib->phys_addr [1] = dev->dev_addr [0]; in lance_init_ring_dvma()
338 ib->phys_addr [2] = dev->dev_addr [3]; in lance_init_ring_dvma()
339 ib->phys_addr [3] = dev->dev_addr [2]; in lance_init_ring_dvma()
340 ib->phys_addr [4] = dev->dev_addr [5]; in lance_init_ring_dvma()
341 ib->phys_addr [5] = dev->dev_addr [4]; in lance_init_ring_dvma()
346 ib->btx_ring [i].tmd0 = leptr; in lance_init_ring_dvma()
347 ib->btx_ring [i].tmd1_hadr = leptr >> 16; in lance_init_ring_dvma()
348 ib->btx_ring [i].tmd1_bits = 0; in lance_init_ring_dvma()
[all …]
Da2065.c152 volatile struct lance_init_block *ib = lp->init_block; in lance_init_ring() local
163 ib->mode = 0; in lance_init_ring()
168 ib->phys_addr[0] = dev->dev_addr[1]; in lance_init_ring()
169 ib->phys_addr[1] = dev->dev_addr[0]; in lance_init_ring()
170 ib->phys_addr[2] = dev->dev_addr[3]; in lance_init_ring()
171 ib->phys_addr[3] = dev->dev_addr[2]; in lance_init_ring()
172 ib->phys_addr[4] = dev->dev_addr[5]; in lance_init_ring()
173 ib->phys_addr[5] = dev->dev_addr[4]; in lance_init_ring()
179 ib->btx_ring[i].tmd0 = leptr; in lance_init_ring()
180 ib->btx_ring[i].tmd1_hadr = leptr >> 16; in lance_init_ring()
[all …]
Ddeclance.c234 #define lib_ptr(ib, rt, type) \ argument
235 ((volatile u16 *)((u8 *)(ib) + lib_off(rt, type)))
451 volatile u16 *ib = (volatile u16 *)dev->mem_start; in lance_init_ring() local
463 *lib_ptr(ib, phys_addr[0], lp->type) = (dev->dev_addr[1] << 8) | in lance_init_ring()
465 *lib_ptr(ib, phys_addr[1], lp->type) = (dev->dev_addr[3] << 8) | in lance_init_ring()
467 *lib_ptr(ib, phys_addr[2], lp->type) = (dev->dev_addr[5] << 8) | in lance_init_ring()
473 *lib_ptr(ib, rx_len, lp->type) = (LANCE_LOG_RX_BUFFERS << 13) | in lance_init_ring()
475 *lib_ptr(ib, rx_ptr, lp->type) = leptr; in lance_init_ring()
482 *lib_ptr(ib, tx_len, lp->type) = (LANCE_LOG_TX_BUFFERS << 13) | in lance_init_ring()
484 *lib_ptr(ib, tx_ptr, lp->type) = leptr; in lance_init_ring()
[all …]
Dni65.c226 struct init_block ib; member
580 p->ib.eaddr[i] = daddr[i]; in ni65_init_lance()
583 p->ib.filter[i] = filter; in ni65_init_lance()
584 p->ib.mode = mode; in ni65_init_lance()
586 p->ib.trp = (u32) isa_virt_to_bus(p->tmdhead) | TMDNUMMASK; in ni65_init_lance()
587 p->ib.rrp = (u32) isa_virt_to_bus(p->rmdhead) | RMDNUMMASK; in ni65_init_lance()
589 pib = (u32) isa_virt_to_bus(&p->ib); in ni65_init_lance()
Dpcnet32.c2622 volatile struct pcnet32_init_block *ib = lp->init_block; in pcnet32_load_multicast() local
2623 volatile __le16 *mcast_table = (__le16 *)ib->filter; in pcnet32_load_multicast()
2631 ib->filter[0] = cpu_to_le32(~0U); in pcnet32_load_multicast()
2632 ib->filter[1] = cpu_to_le32(~0U); in pcnet32_load_multicast()
2640 ib->filter[0] = 0; in pcnet32_load_multicast()
2641 ib->filter[1] = 0; in pcnet32_load_multicast()
/linux-4.1.27/drivers/infiniband/hw/mlx4/
Dah.c48 ah->av.ib.port_pd = cpu_to_be32(to_mpd(pd)->pdn | (ah_attr->port_num << 24)); in create_ib_ah()
49 ah->av.ib.g_slid = ah_attr->src_path_bits; in create_ib_ah()
51 ah->av.ib.g_slid |= 0x80; in create_ib_ah()
52 ah->av.ib.gid_index = ah_attr->grh.sgid_index; in create_ib_ah()
53 ah->av.ib.hop_limit = ah_attr->grh.hop_limit; in create_ib_ah()
54 ah->av.ib.sl_tclass_flowlabel |= in create_ib_ah()
57 memcpy(ah->av.ib.dgid, ah_attr->grh.dgid.raw, 16); in create_ib_ah()
60 ah->av.ib.dlid = cpu_to_be16(ah_attr->dlid); in create_ib_ah()
62 ah->av.ib.stat_rate = ah_attr->static_rate + MLX4_STAT_RATE_OFFSET; in create_ib_ah()
63 while (ah->av.ib.stat_rate > IB_RATE_2_5_GBPS + MLX4_STAT_RATE_OFFSET && in create_ib_ah()
[all …]
Dqp.c2066 be32_to_cpu(ah->av.ib.sl_tclass_flowlabel) >> 28; in build_sriov_qp0_header()
2068 cpu_to_be16(ah->av.ib.g_slid & 0x7f); in build_sriov_qp0_header()
2070 cpu_to_be16(ah->av.ib.g_slid & 0x7f); in build_sriov_qp0_header()
2188 be32_to_cpu(ah->av.ib.port_pd) >> 24, in build_mlx_header()
2189 ah->av.ib.gid_index, &sgid.raw[0]); in build_mlx_header()
2194 be32_to_cpu(ah->av.ib.port_pd) >> 24, in build_mlx_header()
2195 ah->av.ib.gid_index, &sgid); in build_mlx_header()
2209 be32_to_cpu(ah->av.ib.sl_tclass_flowlabel) >> 28; in build_mlx_header()
2210 sqp->ud_header.lrh.destination_lid = ah->av.ib.dlid; in build_mlx_header()
2211 sqp->ud_header.lrh.source_lid = cpu_to_be16(ah->av.ib.g_slid & 0x7f); in build_mlx_header()
[all …]
Dmlx4_ib.h733 u8 port = be32_to_cpu(ah->av.ib.port_pd) >> 24 & 3; in mlx4_ib_ah_grh_present()
738 return !!(ah->av.ib.g_slid & 0x80); in mlx4_ib_ah_grh_present()
Dmad.c1210 to_mah(ah)->av.ib.gid_index = sgid_index; in mlx4_ib_send_to_wire()
1212 to_mah(ah)->av.ib.port_pd &= cpu_to_be32(0x7FFFFFFF); in mlx4_ib_send_to_wire()
Dmain.c878 mlx4_spec->ib.l3_qpn = in parse_flow_attr()
880 mlx4_spec->ib.qpn_mask = in parse_flow_attr()
/linux-4.1.27/drivers/isdn/divert/
Ddivert_procfs.c43 struct divert_info *ib; in put_info_buffer() local
52 if (!(ib = kmalloc(sizeof(struct divert_info) + strlen(cp), GFP_ATOMIC))) in put_info_buffer()
54 strcpy(ib->info_start, cp); /* set output string */ in put_info_buffer()
55 ib->next = NULL; in put_info_buffer()
57 ib->usage_cnt = if_used; in put_info_buffer()
59 divert_info_head = ib; /* new head */ in put_info_buffer()
61 divert_info_tail->next = ib; /* follows existing messages */ in put_info_buffer()
62 divert_info_tail = ib; /* new tail */ in put_info_buffer()
68 ib = divert_info_head; in put_info_buffer()
70 kfree(ib); in put_info_buffer()
/linux-4.1.27/drivers/isdn/hysdn/
Dhysdn_proclog.c103 struct log_data *ib; in put_log_buffer() local
117 if (!(ib = kmalloc(sizeof(struct log_data) + strlen(cp), GFP_ATOMIC))) in put_log_buffer()
119 strcpy(ib->log_start, cp); /* set output string */ in put_log_buffer()
120 ib->next = NULL; in put_log_buffer()
121 ib->proc_ctrl = pd; /* point to own control structure */ in put_log_buffer()
123 ib->usage_cnt = pd->if_used; in put_log_buffer()
125 pd->log_head = ib; /* new head */ in put_log_buffer()
127 pd->log_tail->next = ib; /* follows existing messages */ in put_log_buffer()
128 pd->log_tail = ib; /* new tail */ in put_log_buffer()
137 ib = pd->log_head; in put_log_buffer()
[all …]
/linux-4.1.27/drivers/s390/char/
Dfs3270.c247 struct idal_buffer *ib; in fs3270_read() local
255 ib = idal_buffer_alloc(count, 0); in fs3270_read()
256 if (IS_ERR(ib)) in fs3270_read()
263 raw3270_request_set_idal(rq, ib); in fs3270_read()
270 if (idal_buffer_to_user(ib, data, count) != 0) in fs3270_read()
280 idal_buffer_free(ib); in fs3270_read()
292 struct idal_buffer *ib; in fs3270_write() local
299 ib = idal_buffer_alloc(count, 0); in fs3270_write()
300 if (IS_ERR(ib)) in fs3270_write()
304 if (idal_buffer_from_user(ib, data, count) == 0) { in fs3270_write()
[all …]
Draw3270.c224 raw3270_request_set_idal(struct raw3270_request *rq, struct idal_buffer *ib) in raw3270_request_set_idal() argument
226 rq->ccw.cda = __pa(ib->data); in raw3270_request_set_idal()
227 rq->ccw.count = ib->size; in raw3270_request_set_idal()
/linux-4.1.27/drivers/net/ethernet/brocade/bna/
Dbna_hw_defs.h244 struct bna_ib *ib = _ib; \
245 if ((ib->intr_type == BNA_INTR_T_INTX)) { \
247 intx_mask &= ~(ib->intr_vector); \
250 bna_ib_coalescing_timer_set(&ib->door_bell, \
251 ib->coalescing_timeo); \
253 bna_ib_ack(&ib->door_bell, 0); \
259 struct bna_ib *ib = _ib; \
261 ib->door_bell.doorbell_addr); \
262 if (ib->intr_type == BNA_INTR_T_INTX) { \
264 intx_mask |= ib->intr_vector; \
Dbna_tx_rx.c24 bna_ib_coalescing_timeo_set(struct bna_ib *ib, u8 coalescing_timeo) in bna_ib_coalescing_timeo_set() argument
26 ib->coalescing_timeo = coalescing_timeo; in bna_ib_coalescing_timeo_set()
27 ib->door_bell.doorbell_ack = BNA_DOORBELL_IB_INT_ACK( in bna_ib_coalescing_timeo_set()
28 (u32)ib->coalescing_timeo, 0); in bna_ib_coalescing_timeo_set()
1663 bna_ib_start(rx->bna, &rxp->cq.ib, is_regular); in bna_rx_sm_started_entry()
1859 cfg_req->q_cfg[i].ib.index_addr.a32.addr_lo = in bna_bfi_rx_enet_start()
1860 rxp->cq.ib.ib_seg_host_addr.lsb; in bna_bfi_rx_enet_start()
1861 cfg_req->q_cfg[i].ib.index_addr.a32.addr_hi = in bna_bfi_rx_enet_start()
1862 rxp->cq.ib.ib_seg_host_addr.msb; in bna_bfi_rx_enet_start()
1863 cfg_req->q_cfg[i].ib.intr.msix_index = in bna_bfi_rx_enet_start()
[all …]
Dbfi_enet.h433 struct bfi_enet_ib ib; member
506 struct bfi_enet_ib ib; member
Dbna_types.h464 struct bna_ib ib; member
658 struct bna_ib ib; member
/linux-4.1.27/Documentation/devicetree/bindings/arm/
Dmarvell,kirkwood.txt65 "raidsonic,ib-nas6210"
66 "raidsonic,ib-nas6210-b"
67 "raidsonic,ib-nas6220"
68 "raidsonic,ib-nas6220-b"
69 "raidsonic,ib-nas62x0"
/linux-4.1.27/arch/arm/boot/dts/
Dkirkwood-ib62x0.dts8 …compatible = "raidsonic,ib-nas6210-b", "raidsonic,ib-nas6220-b", "raidsonic,ib-nas6210", "raidsoni…
/linux-4.1.27/drivers/infiniband/core/
Dcma.c136 struct ib_cm_id *ib; member
154 struct ib_sa_multicast *ib; member
268 kfree(mc->multicast.ib); in release_mc()
740 if (!id_priv->cm_id.ib || (id_priv->id.qp_type == IB_QPT_UD)) in rdma_init_qp_attr()
743 ret = ib_cm_init_qp_attr(id_priv->cm_id.ib, qp_attr, in rdma_init_qp_attr()
843 struct sockaddr_ib *listen_ib, *ib; in cma_save_ib_info() local
846 ib = (struct sockaddr_ib *) &id->route.addr.src_addr; in cma_save_ib_info()
847 ib->sib_family = listen_ib->sib_family; in cma_save_ib_info()
849 ib->sib_pkey = path->pkey; in cma_save_ib_info()
850 ib->sib_flowinfo = path->flow_label; in cma_save_ib_info()
[all …]
/linux-4.1.27/drivers/infiniband/hw/ehca/
Dehca_mrmw.c176 &e_maxmr->ib.ib_mr.lkey, in ehca_get_dma_mr()
177 &e_maxmr->ib.ib_mr.rkey); in ehca_get_dma_mr()
183 ib_mr = &e_maxmr->ib.ib_mr; in ehca_get_dma_mr()
260 e_pd, &e_mr->ib.ib_mr.lkey, in ehca_reg_phys_mr()
261 &e_mr->ib.ib_mr.rkey); in ehca_reg_phys_mr()
289 e_pd, &pginfo, &e_mr->ib.ib_mr.lkey, in ehca_reg_phys_mr()
290 &e_mr->ib.ib_mr.rkey, EHCA_REG_MR); in ehca_reg_phys_mr()
298 return &e_mr->ib.ib_mr; in ehca_reg_phys_mr()
405 e_pd, &pginfo, &e_mr->ib.ib_mr.lkey, in ehca_reg_user_mr()
406 &e_mr->ib.ib_mr.rkey, EHCA_REG_MR); in ehca_reg_user_mr()
[all …]
Dehca_classes.h270 } ib; member
/linux-4.1.27/drivers/pinctrl/meson/
Dpinctrl-meson.h195 #define BANK(n, f, l, per, peb, pr, pb, dr, db, or, ob, ir, ib) \ argument
205 [REG_IN] = { ir, ib }, \
/linux-4.1.27/drivers/media/platform/vivid/
Dvivid-tpg-colors.c714 double ir, ig, ib; in mult_matrix() local
718 ib = m[2][0] * (*r) + m[2][1] * (*g) + m[2][2] * (*b); in mult_matrix()
721 *b = ib; in mult_matrix()
/linux-4.1.27/drivers/net/ethernet/chelsio/cxgb/
Dvsc7326.c221 static void run_table(adapter_t *adapter, struct init_table *ib, int len) in run_table() argument
226 if (ib[i].addr == INITBLOCK_SLEEP) { in run_table()
227 udelay( ib[i].data ); in run_table()
228 pr_err("sleep %d us\n",ib[i].data); in run_table()
230 vsc_write( adapter, ib[i].addr, ib[i].data ); in run_table()
/linux-4.1.27/drivers/net/ethernet/mellanox/mlx4/
Dmcg.c861 rule_hw->ib.l3_qpn = spec->ib.l3_qpn; in parse_trans_rule()
862 rule_hw->ib.qpn_mask = spec->ib.qpn_msk; in parse_trans_rule()
863 memcpy(&rule_hw->ib.dst_gid, &spec->ib.dst_gid, 16); in parse_trans_rule()
864 memcpy(&rule_hw->ib.dst_gid_msk, &spec->ib.dst_gid_msk, 16); in parse_trans_rule()
952 "dst-gid = %pI6\n", cur->ib.dst_gid); in mlx4_err_rule()
955 cur->ib.dst_gid_msk); in mlx4_err_rule()
1388 memcpy(spec.ib.dst_gid, gid, 16); in mlx4_trans_to_dmfs_attach()
1389 memset(&spec.ib.dst_gid_msk, 0xff, 16); in mlx4_trans_to_dmfs_attach()
/linux-4.1.27/drivers/video/fbdev/matrox/
Dmatroxfb_maven.c832 unsigned int ib; in maven_compute_timming() local
869 ib = ((0x3C0000 * i - 0x8000)/ hdec + 0x05E7) >> 8; in maven_compute_timming()
871 } while (ib < ibmin); in maven_compute_timming()
872 if (ib >= m->htotal + 2) { in maven_compute_timming()
873 ib = ibmin; in maven_compute_timming()
879 m->regs[0x9E] = ib; in maven_compute_timming()
880 m->regs[0x9F] = ib >> 8; in maven_compute_timming()
/linux-4.1.27/drivers/gpu/drm/amd/amdkfd/
Dkfd_packet_manager.c117 uint64_t ib, size_t ib_size_in_dwords, bool chain) in pm_create_runlist() argument
121 BUG_ON(!pm || !buffer || !ib); in pm_create_runlist()
133 packet->ordinal2 = lower_32_bits(ib); in pm_create_runlist()
134 packet->bitfields3.ib_base_hi = upper_32_bits(ib); in pm_create_runlist()
/linux-4.1.27/net/rds/
DMakefile8 ib.o ib_cm.o ib_recv.o ib_ring.o ib_send.o ib_stats.o \
/linux-4.1.27/drivers/media/dvb-frontends/
Dbcm3510.c207 u8 ob[MAX_XFER_SIZE], ib[MAX_XFER_SIZE]; in bcm3510_do_hab_cmd() local
210 if (ilen + 2 > sizeof(ib)) { in bcm3510_do_hab_cmd()
232 (ret = bcm3510_hab_get_response(st, ib, ilen+2)) < 0) in bcm3510_do_hab_cmd()
236 dbufout(ib,ilen+2,deb_hab); in bcm3510_do_hab_cmd()
239 memcpy(ibuf,&ib[2],ilen); in bcm3510_do_hab_cmd()
/linux-4.1.27/drivers/net/ethernet/dec/tulip/
Dde2104x.c1856 struct de_srom_media_block *ib = bufp; in de21041_get_srom_info() local
1860 switch(ib->opts & MediaBlockMask) { in de21041_get_srom_info()
1892 bufp += sizeof (ib->opts); in de21041_get_srom_info()
1894 if (ib->opts & MediaCustomCSRs) { in de21041_get_srom_info()
1895 de->media[idx].csr13 = get_unaligned(&ib->csr13); in de21041_get_srom_info()
1896 de->media[idx].csr14 = get_unaligned(&ib->csr14); in de21041_get_srom_info()
1897 de->media[idx].csr15 = get_unaligned(&ib->csr15); in de21041_get_srom_info()
1898 bufp += sizeof(ib->csr13) + sizeof(ib->csr14) + in de21041_get_srom_info()
1899 sizeof(ib->csr15); in de21041_get_srom_info()
/linux-4.1.27/include/net/
Dinet_hashtables.h89 static inline struct net *ib_net(struct inet_bind_bucket *ib) in ib_net() argument
91 return read_pnet(&ib->ib_net); in ib_net()
/linux-4.1.27/drivers/gpu/drm/qxl/
Dqxl_fb.c50 struct fb_image ib; member
233 op->op.ib = *fb_image; in qxl_fb_delayed_imageblit()
239 op->op.ib.data = op->img_data; in qxl_fb_delayed_imageblit()
385 qxl_fb_imageblit_internal(qfbdev->helper.fbdev, &entry->op.ib); in qxl_fb_work()
/linux-4.1.27/arch/ia64/sn/kernel/sn2/
Dsn2_smp.c330 sn2_ptc_deadlock_recovery(short *nasids, short ib, short ie, int mynasid, in sn2_ptc_deadlock_recovery() argument
343 for (i=ib; i <= ie; i++) { in sn2_ptc_deadlock_recovery()
/linux-4.1.27/arch/ia64/kernel/
Dmca_drv.c409 if (!pbci || pbci->ib) in is_mca_global()
699 if (pbci->ib) in recover_from_processor_error()
/linux-4.1.27/drivers/gpu/drm/msm/mdp/mdp5/
Dmdp5_cmd_encoder.c42 .ib = (ib_val), \
Dmdp5_encoder.c48 .ib = (ib_val), \
/linux-4.1.27/include/linux/mlx4/
Ddevice.h770 struct mlx4_av ib; member
1175 struct mlx4_spec_ib ib; member
1282 struct mlx4_net_trans_rule_hw_ib ib; member
/linux-4.1.27/arch/ia64/include/asm/
Dpal.h574 ib : 1, /* Internal bus error */ member
742 #define pmci_bus_internal_error pme_bus.ib
/linux-4.1.27/include/rdma/
Dib_verbs.h345 struct ib_protocol_stats ib; member
1387 struct ib_flow_spec_ib ib; member
/linux-4.1.27/Documentation/DocBook/media/
Dfieldseq_tb.gif.b64394 XH/2aR9jRCn0f9ohLoEeeL/3ff/3gT/4hX/4ib/4jf/4eb8PYN9XCOMZquD5oT/6pX/6qb/6rf/6
/linux-4.1.27/
DMAINTAINERS6484 T: git git://openfabrics.org/~eli/connect-ib.git
6494 T: git git://openfabrics.org/~eli/connect-ib.git