mc                726 arch/ia64/include/asm/pal.h #define pmci_cache_mc				pme_cache.mc
mc                733 arch/ia64/include/asm/pal.h #define pmci_tlb_mc				pme_tlb.mc
mc                745 arch/ia64/include/asm/pal.h #define pmci_bus_mc				pme_bus.mc
mc                111 arch/ia64/include/asm/processor.h 	__u64 mc : 1;
mc               1311 arch/mips/kernel/cpu-probe.c 			int mc;
mc               1324 arch/mips/kernel/cpu-probe.c 				mc = 1;
mc               1327 arch/mips/kernel/cpu-probe.c 				mc = 0;
mc               1332 arch/mips/kernel/cpu-probe.c 				c->cputype = mc ? CPU_R4400MC : CPU_R4400SC;
mc               1333 arch/mips/kernel/cpu-probe.c 				__cpu_name[cpu] = mc ? "R4400MC" : "R4400SC";
mc               1335 arch/mips/kernel/cpu-probe.c 				c->cputype = mc ? CPU_R4000MC : CPU_R4000SC;
mc               1336 arch/mips/kernel/cpu-probe.c 				__cpu_name[cpu] = mc ? "R4000MC" : "R4000SC";
mc                 45 arch/mips/kvm/mmu.c static void mmu_free_memory_cache(struct kvm_mmu_memory_cache *mc)
mc                 47 arch/mips/kvm/mmu.c 	while (mc->nobjs)
mc                 48 arch/mips/kvm/mmu.c 		free_page((unsigned long)mc->objects[--mc->nobjs]);
mc                 51 arch/mips/kvm/mmu.c static void *mmu_memory_cache_alloc(struct kvm_mmu_memory_cache *mc)
mc                 55 arch/mips/kvm/mmu.c 	BUG_ON(!mc || !mc->nobjs);
mc                 56 arch/mips/kvm/mmu.c 	p = mc->objects[--mc->nobjs];
mc               2771 arch/powerpc/kernel/prom_init.c 	phandle mc;
mc               2776 arch/powerpc/kernel/prom_init.c 	mc = call_prom("finddevice", 1, 1, ADDR(name));
mc               2777 arch/powerpc/kernel/prom_init.c 	if (!PHANDLE_VALID(mc))
mc               2780 arch/powerpc/kernel/prom_init.c 	if (prom_getproplen(mc, "reg") != 8)
mc               2788 arch/powerpc/kernel/prom_init.c 	if (prom_getprop(mc, "reg", mc_reg, sizeof(mc_reg)) == PROM_ERROR)
mc               2800 arch/powerpc/kernel/prom_init.c 	prom_setprop(mc, name, "reg", mc_reg, sizeof(mc_reg));
mc                 32 arch/powerpc/platforms/powernv/opal-msglog.c ssize_t memcons_copy(struct memcons *mc, char *to, loff_t pos, size_t count)
mc                 39 arch/powerpc/platforms/powernv/opal-msglog.c 	if (!mc)
mc                 42 arch/powerpc/platforms/powernv/opal-msglog.c 	out_pos = be32_to_cpu(READ_ONCE(mc->out_pos));
mc                 48 arch/powerpc/platforms/powernv/opal-msglog.c 	conbuf = phys_to_virt(be64_to_cpu(mc->obuf_phys));
mc                 56 arch/powerpc/platforms/powernv/opal-msglog.c 		avail = be32_to_cpu(mc->obuf_size) - out_pos;
mc                 74 arch/powerpc/platforms/powernv/opal-msglog.c 	if (out_pos > be32_to_cpu(mc->obuf_size)) {
mc                109 arch/powerpc/platforms/powernv/opal-msglog.c 	struct memcons *mc;
mc                117 arch/powerpc/platforms/powernv/opal-msglog.c 	mc = phys_to_virt(mcaddr);
mc                118 arch/powerpc/platforms/powernv/opal-msglog.c 	if (!mc) {
mc                123 arch/powerpc/platforms/powernv/opal-msglog.c 	if (be64_to_cpu(mc->magic) != MEMCONS_MAGIC) {
mc                128 arch/powerpc/platforms/powernv/opal-msglog.c 	return mc;
mc                134 arch/powerpc/platforms/powernv/opal-msglog.c u32 memcons_get_size(struct memcons *mc)
mc                136 arch/powerpc/platforms/powernv/opal-msglog.c 	return be32_to_cpu(mc->ibuf_size) + be32_to_cpu(mc->obuf_size);
mc                 34 arch/powerpc/platforms/powernv/powernv.h ssize_t memcons_copy(struct memcons *mc, char *to, loff_t pos, size_t count);
mc                 35 arch/powerpc/platforms/powernv/powernv.h u32 memcons_get_size(struct memcons *mc);
mc               1159 arch/s390/include/asm/pgtable.h 			       bool nq, bool mr, bool mc);
mc               1009 arch/s390/kvm/priv.c 	bool mr = false, mc = false, nq;
mc               1036 arch/s390/kvm/priv.c 		mc = vcpu->run->s.regs.gprs[reg1] & PFMF_MC;
mc               1091 arch/s390/kvm/priv.c 							key, NULL, nq, mr, mc);
mc                828 arch/s390/mm/pgtable.c 			       bool nq, bool mr, bool mc)
mc                834 arch/s390/mm/pgtable.c 	if (mr | mc) {
mc                842 arch/s390/mm/pgtable.c 		if (!mc)
mc                353 arch/s390/numa/mode_emu.c 	struct toptree *phys, *node, *drawer, *book, *mc, *core;
mc                364 arch/s390/numa/mode_emu.c 		mc = toptree_get_child(book, top->socket_id);
mc                365 arch/s390/numa/mode_emu.c 		core = toptree_get_child(mc, smp_get_base_cpu(cpu));
mc                366 arch/s390/numa/mode_emu.c 		if (!drawer || !book || !mc || !core)
mc                369 arch/s390/numa/mode_emu.c 		toptree_update_mask(mc);
mc                581 arch/um/drivers/line.c 	mconsole_register_dev(&line_driver->mc);
mc                 30 arch/um/drivers/line.h 	struct mc_device mc;
mc                 54 arch/um/drivers/ssl.c 	.mc  = {
mc                 55 arch/um/drivers/ssl.c 		.list		= LIST_HEAD_INIT(driver.mc.list),
mc                 60 arch/um/drivers/stdio_console.c 	.mc  = {
mc                 61 arch/um/drivers/stdio_console.c 		.list		= LIST_HEAD_INIT(driver.mc.list),
mc                 32 arch/um/os-Linux/signal.c static void sig_handler_common(int sig, struct siginfo *si, mcontext_t *mc)
mc                 40 arch/um/os-Linux/signal.c 		get_regs_from_mc(&r, mc);
mc                 41 arch/um/os-Linux/signal.c 		GET_FAULTINFO_FROM_MC(r.faultinfo, mc);
mc                 69 arch/um/os-Linux/signal.c void sig_handler(int sig, struct siginfo *si, mcontext_t *mc)
mc                 81 arch/um/os-Linux/signal.c 	sig_handler_common(sig, si, mc);
mc                 86 arch/um/os-Linux/signal.c static void timer_real_alarm_handler(mcontext_t *mc)
mc                 90 arch/um/os-Linux/signal.c 	if (mc != NULL)
mc                 91 arch/um/os-Linux/signal.c 		get_regs_from_mc(&regs, mc);
mc                 97 arch/um/os-Linux/signal.c void timer_alarm_handler(int sig, struct siginfo *unused_si, mcontext_t *mc)
mc                111 arch/um/os-Linux/signal.c 	timer_real_alarm_handler(mc);
mc                139 arch/um/os-Linux/signal.c static void (*handlers[_NSIG])(int sig, struct siginfo *si, mcontext_t *mc) = {
mc                154 arch/um/os-Linux/signal.c 	mcontext_t *mc = &uc->uc_mcontext;
mc                180 arch/um/os-Linux/signal.c 			(*handlers[sig])(sig, (struct siginfo *)si, mc);
mc                 56 arch/x86/include/asm/microcode.h 	void			*mc;
mc                 45 arch/x86/include/asm/microcode_intel.h #define get_totalsize(mc) \
mc                 46 arch/x86/include/asm/microcode_intel.h 	(((struct microcode_intel *)mc)->hdr.datasize ? \
mc                 47 arch/x86/include/asm/microcode_intel.h 	 ((struct microcode_intel *)mc)->hdr.totalsize : \
mc                 50 arch/x86/include/asm/microcode_intel.h #define get_datasize(mc) \
mc                 51 arch/x86/include/asm/microcode_intel.h 	(((struct microcode_intel *)mc)->hdr.datasize ? \
mc                 52 arch/x86/include/asm/microcode_intel.h 	 ((struct microcode_intel *)mc)->hdr.datasize : DEFAULT_UCODE_DATASIZE)
mc                 50 arch/x86/kernel/cpu/microcode/amd.c 	struct microcode_amd *mc;
mc                327 arch/x86/kernel/cpu/microcode/amd.c 		struct microcode_amd *mc;
mc                342 arch/x86/kernel/cpu/microcode/amd.c 		mc = (struct microcode_amd *)(buf + SECTION_HDR_SIZE);
mc                343 arch/x86/kernel/cpu/microcode/amd.c 		if (eq_id == mc->hdr.processor_rev_id) {
mc                345 arch/x86/kernel/cpu/microcode/amd.c 			desc->mc = mc;
mc                361 arch/x86/kernel/cpu/microcode/amd.c 	if (desc->mc) {
mc                393 arch/x86/kernel/cpu/microcode/amd.c static int __apply_microcode_amd(struct microcode_amd *mc)
mc                397 arch/x86/kernel/cpu/microcode/amd.c 	native_wrmsrl(MSR_AMD64_PATCH_LOADER, (u64)(long)&mc->hdr.data_code);
mc                401 arch/x86/kernel/cpu/microcode/amd.c 	if (rev != mc->hdr.patch_id)
mc                423 arch/x86/kernel/cpu/microcode/amd.c 	struct microcode_amd *mc;
mc                439 arch/x86/kernel/cpu/microcode/amd.c 	mc = desc.mc;
mc                440 arch/x86/kernel/cpu/microcode/amd.c 	if (!mc)
mc                444 arch/x86/kernel/cpu/microcode/amd.c 	if (rev >= mc->hdr.patch_id)
mc                447 arch/x86/kernel/cpu/microcode/amd.c 	if (!__apply_microcode_amd(mc)) {
mc                448 arch/x86/kernel/cpu/microcode/amd.c 		*new_rev = mc->hdr.patch_id;
mc                452 arch/x86/kernel/cpu/microcode/amd.c 			memcpy(patch, mc, min_t(u32, desc.psize, PATCH_MAX_SIZE));
mc                512 arch/x86/kernel/cpu/microcode/amd.c 	struct microcode_amd *mc;
mc                517 arch/x86/kernel/cpu/microcode/amd.c 		mc	= (struct microcode_amd *)__pa_nodebug(amd_ucode_patch);
mc                520 arch/x86/kernel/cpu/microcode/amd.c 		mc	= (struct microcode_amd *)amd_ucode_patch;
mc                527 arch/x86/kernel/cpu/microcode/amd.c 	if (*new_rev && rev < mc->hdr.patch_id) {
mc                528 arch/x86/kernel/cpu/microcode/amd.c 		if (!__apply_microcode_amd(mc)) {
mc                529 arch/x86/kernel/cpu/microcode/amd.c 			*new_rev = mc->hdr.patch_id;
mc                557 arch/x86/kernel/cpu/microcode/amd.c 	if (!desc.mc)
mc                569 arch/x86/kernel/cpu/microcode/amd.c 	struct microcode_amd *mc;
mc                572 arch/x86/kernel/cpu/microcode/amd.c 	mc = (struct microcode_amd *)amd_ucode_patch;
mc                576 arch/x86/kernel/cpu/microcode/amd.c 	if (rev < mc->hdr.patch_id) {
mc                577 arch/x86/kernel/cpu/microcode/amd.c 		if (!__apply_microcode_amd(mc)) {
mc                578 arch/x86/kernel/cpu/microcode/amd.c 			ucode_new_rev = mc->hdr.patch_id;
mc                662 arch/x86/kernel/cpu/microcode/amd.c 		uci->mc = p->data;
mc                687 arch/x86/kernel/cpu/microcode/amd.c 	uci->mc = p->data;
mc                932 arch/x86/kernel/cpu/microcode/amd.c 	uci->mc = NULL;
mc                782 arch/x86/kernel/cpu/microcode/core.c 	if (uci->valid && uci->mc)
mc                784 arch/x86/kernel/cpu/microcode/core.c 	else if (!uci->mc)
mc                 65 arch/x86/kernel/cpu/microcode/intel.c static int find_matching_signature(void *mc, unsigned int csig, int cpf)
mc                 67 arch/x86/kernel/cpu/microcode/intel.c 	struct microcode_header_intel *mc_hdr = mc;
mc                 79 arch/x86/kernel/cpu/microcode/intel.c 	ext_hdr = mc + get_datasize(mc_hdr) + MC_HEADER_SIZE;
mc                 93 arch/x86/kernel/cpu/microcode/intel.c static int has_newer_microcode(void *mc, unsigned int csig, int cpf, int new_rev)
mc                 95 arch/x86/kernel/cpu/microcode/intel.c 	struct microcode_header_intel *mc_hdr = mc;
mc                100 arch/x86/kernel/cpu/microcode/intel.c 	return find_matching_signature(mc, csig, cpf);
mc                224 arch/x86/kernel/cpu/microcode/intel.c static int microcode_sanity_check(void *mc, int print_err)
mc                227 arch/x86/kernel/cpu/microcode/intel.c 	struct microcode_header_intel *mc_header = mc;
mc                259 arch/x86/kernel/cpu/microcode/intel.c 		ext_header = mc + MC_HEADER_SIZE + data_size;
mc                293 arch/x86/kernel/cpu/microcode/intel.c 		orig_sum += ((u32 *)mc)[i];
mc                486 arch/x86/kernel/cpu/microcode/intel.c static void save_mc_for_early(u8 *mc, unsigned int size)
mc                493 arch/x86/kernel/cpu/microcode/intel.c 	save_microcode_patch(mc, size);
mc                553 arch/x86/kernel/cpu/microcode/intel.c 	struct microcode_intel *mc;
mc                557 arch/x86/kernel/cpu/microcode/intel.c 	mc = uci->mc;
mc                558 arch/x86/kernel/cpu/microcode/intel.c 	if (!mc)
mc                565 arch/x86/kernel/cpu/microcode/intel.c 	*current_mc_date_p = mc->hdr.date;
mc                571 arch/x86/kernel/cpu/microcode/intel.c 	struct microcode_intel *mc;
mc                573 arch/x86/kernel/cpu/microcode/intel.c 	mc = uci->mc;
mc                574 arch/x86/kernel/cpu/microcode/intel.c 	if (!mc)
mc                577 arch/x86/kernel/cpu/microcode/intel.c 	print_ucode_info(uci, mc->hdr.date);
mc                583 arch/x86/kernel/cpu/microcode/intel.c 	struct microcode_intel *mc;
mc                586 arch/x86/kernel/cpu/microcode/intel.c 	mc = uci->mc;
mc                587 arch/x86/kernel/cpu/microcode/intel.c 	if (!mc)
mc                596 arch/x86/kernel/cpu/microcode/intel.c 	if (rev >= mc->hdr.rev) {
mc                608 arch/x86/kernel/cpu/microcode/intel.c 	native_wrmsrl(MSR_IA32_UCODE_WRITE, (unsigned long)mc->bits);
mc                611 arch/x86/kernel/cpu/microcode/intel.c 	if (rev != mc->hdr.rev)
mc                619 arch/x86/kernel/cpu/microcode/intel.c 		print_ucode_info(uci, mc->hdr.date);
mc                690 arch/x86/kernel/cpu/microcode/intel.c 	uci.mc = patch;
mc                714 arch/x86/kernel/cpu/microcode/intel.c 	uci.mc = *iup;
mc                757 arch/x86/kernel/cpu/microcode/intel.c 	uci.mc = p;
mc                794 arch/x86/kernel/cpu/microcode/intel.c 	struct microcode_intel *mc;
mc                804 arch/x86/kernel/cpu/microcode/intel.c 	mc = find_patch(uci);
mc                805 arch/x86/kernel/cpu/microcode/intel.c 	if (!mc) {
mc                806 arch/x86/kernel/cpu/microcode/intel.c 		mc = uci->mc;
mc                807 arch/x86/kernel/cpu/microcode/intel.c 		if (!mc)
mc                817 arch/x86/kernel/cpu/microcode/intel.c 	if (rev >= mc->hdr.rev) {
mc                829 arch/x86/kernel/cpu/microcode/intel.c 	wrmsrl(MSR_IA32_UCODE_WRITE, (unsigned long)mc->bits);
mc                833 arch/x86/kernel/cpu/microcode/intel.c 	if (rev != mc->hdr.rev) {
mc                835 arch/x86/kernel/cpu/microcode/intel.c 		       cpu, mc->hdr.rev);
mc                842 arch/x86/kernel/cpu/microcode/intel.c 			mc->hdr.date & 0xffff,
mc                843 arch/x86/kernel/cpu/microcode/intel.c 			mc->hdr.date >> 24,
mc                844 arch/x86/kernel/cpu/microcode/intel.c 			(mc->hdr.date >> 16) & 0xff);
mc                867 arch/x86/kernel/cpu/microcode/intel.c 	u8 *new_mc = NULL, *mc = NULL;
mc                892 arch/x86/kernel/cpu/microcode/intel.c 		if (!mc || mc_size > curr_mc_size) {
mc                893 arch/x86/kernel/cpu/microcode/intel.c 			vfree(mc);
mc                894 arch/x86/kernel/cpu/microcode/intel.c 			mc = vmalloc(mc_size);
mc                895 arch/x86/kernel/cpu/microcode/intel.c 			if (!mc)
mc                900 arch/x86/kernel/cpu/microcode/intel.c 		memcpy(mc, &mc_header, sizeof(mc_header));
mc                901 arch/x86/kernel/cpu/microcode/intel.c 		data = mc + sizeof(mc_header);
mc                903 arch/x86/kernel/cpu/microcode/intel.c 		    microcode_sanity_check(mc, 1) < 0) {
mc                909 arch/x86/kernel/cpu/microcode/intel.c 		if (has_newer_microcode(mc, csig, cpf, new_rev)) {
mc                912 arch/x86/kernel/cpu/microcode/intel.c 			new_mc  = mc;
mc                914 arch/x86/kernel/cpu/microcode/intel.c 			mc = NULL;	/* trigger new vmalloc */
mc                919 arch/x86/kernel/cpu/microcode/intel.c 	vfree(mc);
mc                929 arch/x86/kernel/cpu/microcode/intel.c 	vfree(uci->mc);
mc                930 arch/x86/kernel/cpu/microcode/intel.c 	uci->mc = (struct microcode_intel *)new_mc;
mc               1446 arch/x86/kvm/emulate.c 	struct read_cache *mc = &ctxt->mem_read;
mc               1448 arch/x86/kvm/emulate.c 	if (mc->pos < mc->end)
mc               1451 arch/x86/kvm/emulate.c 	WARN_ON((mc->end + size) >= sizeof(mc->data));
mc               1453 arch/x86/kvm/emulate.c 	rc = ctxt->ops->read_emulated(ctxt, addr, mc->data + mc->end, size,
mc               1458 arch/x86/kvm/emulate.c 	mc->end += size;
mc               1461 arch/x86/kvm/emulate.c 	memcpy(dest, mc->data + mc->pos, size);
mc               1462 arch/x86/kvm/emulate.c 	mc->pos += size;
mc               1081 arch/x86/kvm/mmu.c static void mmu_free_memory_cache(struct kvm_mmu_memory_cache *mc,
mc               1084 arch/x86/kvm/mmu.c 	while (mc->nobjs)
mc               1085 arch/x86/kvm/mmu.c 		kmem_cache_free(cache, mc->objects[--mc->nobjs]);
mc               1104 arch/x86/kvm/mmu.c static void mmu_free_memory_cache_page(struct kvm_mmu_memory_cache *mc)
mc               1106 arch/x86/kvm/mmu.c 	while (mc->nobjs)
mc               1107 arch/x86/kvm/mmu.c 		free_page((unsigned long)mc->objects[--mc->nobjs]);
mc               1136 arch/x86/kvm/mmu.c static void *mmu_memory_cache_alloc(struct kvm_mmu_memory_cache *mc)
mc               1140 arch/x86/kvm/mmu.c 	BUG_ON(!mc->nobjs);
mc               1141 arch/x86/kvm/mmu.c 	p = mc->objects[--mc->nobjs];
mc                  7 arch/x86/um/os-Linux/mcontext.c void get_regs_from_mc(struct uml_pt_regs *regs, mcontext_t *mc)
mc                 10 arch/x86/um/os-Linux/mcontext.c #define COPY2(X,Y) regs->gp[X] = mc->gregs[REG_##Y]
mc                 11 arch/x86/um/os-Linux/mcontext.c #define COPY(X) regs->gp[X] = mc->gregs[REG_##X]
mc                 12 arch/x86/um/os-Linux/mcontext.c #define COPY_SEG(X) regs->gp[X] = mc->gregs[REG_##X] & 0xffff;
mc                 13 arch/x86/um/os-Linux/mcontext.c #define COPY_SEG_CPL3(X) regs->gp[X] = (mc->gregs[REG_##X] & 0xffff) | 3;
mc                 20 arch/x86/um/os-Linux/mcontext.c #define COPY2(X,Y) regs->gp[X/sizeof(unsigned long)] = mc->gregs[REG_##Y]
mc                 21 arch/x86/um/os-Linux/mcontext.c #define COPY(X) regs->gp[X/sizeof(unsigned long)] = mc->gregs[REG_##X]
mc                 13 arch/x86/um/shared/sysdep/mcontext.h #define GET_FAULTINFO_FROM_MC(fi, mc) \
mc                 15 arch/x86/um/shared/sysdep/mcontext.h 		(fi).cr2 = (mc)->cr2; \
mc                 16 arch/x86/um/shared/sysdep/mcontext.h 		(fi).error_code = (mc)->gregs[REG_ERR]; \
mc                 17 arch/x86/um/shared/sysdep/mcontext.h 		(fi).trap_no = (mc)->gregs[REG_TRAPNO]; \
mc                 22 arch/x86/um/shared/sysdep/mcontext.h #define GET_FAULTINFO_FROM_MC(fi, mc) \
mc                 24 arch/x86/um/shared/sysdep/mcontext.h 		(fi).cr2 = (mc)->gregs[REG_CR2]; \
mc                 25 arch/x86/um/shared/sysdep/mcontext.h 		(fi).error_code = (mc)->gregs[REG_ERR]; \
mc                 26 arch/x86/um/shared/sysdep/mcontext.h 		(fi).trap_no = (mc)->gregs[REG_TRAPNO]; \
mc                445 arch/x86/xen/enlighten_pv.c 	MULTI_mmuext_op(mcs.mc, op, 1, NULL, DOMID_SELF);
mc                522 arch/x86/xen/enlighten_pv.c 	struct multicall_space mc;
mc                531 arch/x86/xen/enlighten_pv.c 	mc = __xen_mc_entry(0);
mc                533 arch/x86/xen/enlighten_pv.c 	MULTI_update_descriptor(mc.mc, maddr.maddr, t->tls_array[i]);
mc                835 arch/x86/xen/enlighten_pv.c 	MULTI_stack_switch(mcs.mc, __KERNEL_DS, sp0);
mc                877 arch/x86/xen/enlighten_pv.c 	MULTI_fpu_taskswitch(mcs.mc, (cr0 & X86_CR0_TS) != 0);
mc                190 arch/x86/xen/mmu_pv.c 	if (mcs.mc != NULL) {
mc                191 arch/x86/xen/mmu_pv.c 		mcs.mc->args[1]++;
mc                194 arch/x86/xen/mmu_pv.c 		MULTI_mmu_update(mcs.mc, mcs.args, 1, NULL, DOMID_SELF);
mc                208 arch/x86/xen/mmu_pv.c 	if (mcs.mc != NULL) {
mc                209 arch/x86/xen/mmu_pv.c 		mcs.mc->args[1]++;
mc                212 arch/x86/xen/mmu_pv.c 		MULTI_mmuext_op(mcs.mc, mcs.args, 1, NULL, DOMID_SELF);
mc                769 arch/x86/xen/mmu_pv.c 		MULTI_update_va_mapping(mcs.mc, (unsigned long)pt,
mc                906 arch/x86/xen/mmu_pv.c 		MULTI_update_va_mapping(mcs.mc, (unsigned long)pt,
mc               1321 arch/x86/xen/mmu_pv.c 	MULTI_mmuext_op(mcs.mc, op, 1, NULL, DOMID_SELF);
mc               1341 arch/x86/xen/mmu_pv.c 	MULTI_mmuext_op(mcs.mc, op, 1, NULL, DOMID_SELF);
mc               1379 arch/x86/xen/mmu_pv.c 	MULTI_mmuext_op(mcs.mc, &args->op, 1, NULL, DOMID_SELF);
mc               1617 arch/x86/xen/mmu_pv.c 	MULTI_mmuext_op(mcs.mc, mcs.args, 1, NULL, DOMID_SELF);
mc               1626 arch/x86/xen/mmu_pv.c 	MULTI_update_va_mapping(mcs.mc, (unsigned long)addr,
mc               2489 arch/x86/xen/mmu_pv.c 		MULTI_update_va_mapping(mcs.mc, vaddr, VOID_PTE, 0);
mc               2532 arch/x86/xen/mmu_pv.c 		MULTI_update_va_mapping(mcs.mc, vaddr,
mc               2674 arch/x86/xen/mmu_pv.c 	MULTI_mmuext_op(mcs.mc, op, 1, NULL, DOMID_SELF);
mc                 59 arch/x86/xen/multicalls.c 	struct multicall_entry *mc;
mc                 86 arch/x86/xen/multicalls.c 		mc = &b->entries[0];
mc                 88 arch/x86/xen/multicalls.c 		mc->result = xen_single_call(mc->op, mc->args[0], mc->args[1],
mc                 89 arch/x86/xen/multicalls.c 					     mc->args[2], mc->args[3],
mc                 90 arch/x86/xen/multicalls.c 					     mc->args[4]);
mc                 91 arch/x86/xen/multicalls.c 		ret = mc->result < 0;
mc                157 arch/x86/xen/multicalls.c 	ret.mc = &b->entries[b->mcidx];
mc                188 arch/x86/xen/multicalls.c 	ret.mc = &b->entries[b->mcidx - 1];
mc                 12 arch/x86/xen/multicalls.h 	struct multicall_entry *mc;
mc                 68 drivers/acpi/apei/hest.c 		struct acpi_hest_ia_machine_check *mc;
mc                 69 drivers/acpi/apei/hest.c 		mc = (struct acpi_hest_ia_machine_check *)hest_hdr;
mc                 70 drivers/acpi/apei/hest.c 		len = sizeof(*mc) + mc->num_hardware_banks *
mc                 73 drivers/acpi/apei/hest.c 		struct acpi_hest_ia_deferred_check *mc;
mc                 74 drivers/acpi/apei/hest.c 		mc = (struct acpi_hest_ia_deferred_check *)hest_hdr;
mc                 75 drivers/acpi/apei/hest.c 		len = sizeof(*mc) + mc->num_hardware_banks *
mc                163 drivers/base/component.c 	struct component_match_array *mc)
mc                171 drivers/base/component.c 		if (mc->compare && mc->compare(c->dev, mc->data))
mc                174 drivers/base/component.c 		if (mc->compare_typed &&
mc                175 drivers/base/component.c 		    mc->compare_typed(c->dev, c->subcomponent, mc->data))
mc                193 drivers/base/component.c 		struct component_match_array *mc = &match->compare[i];
mc                201 drivers/base/component.c 		c = find_component(master, mc);
mc                300 drivers/base/component.c 		struct component_match_array *mc = &match->compare[i];
mc                302 drivers/base/component.c 		if (mc->release)
mc                303 drivers/base/component.c 			mc->release(master, mc->data);
mc                419 drivers/bus/fsl-mc/fsl-mc-bus.c 	struct fsl_mc *mc;
mc                422 drivers/bus/fsl-mc/fsl-mc-bus.c 	mc = dev_get_drvdata(root_dprc_dev->parent);
mc                424 drivers/bus/fsl-mc/fsl-mc-bus.c 	if (mc->num_translation_ranges == 0) {
mc                432 drivers/bus/fsl-mc/fsl-mc-bus.c 	for (i = 0; i < mc->num_translation_ranges; i++) {
mc                434 drivers/bus/fsl-mc/fsl-mc-bus.c 			&mc->translation_ranges[i];
mc                816 drivers/bus/fsl-mc/fsl-mc-bus.c 	struct fsl_mc *mc;
mc                825 drivers/bus/fsl-mc/fsl-mc-bus.c 	mc = devm_kzalloc(&pdev->dev, sizeof(*mc), GFP_KERNEL);
mc                826 drivers/bus/fsl-mc/fsl-mc-bus.c 	if (!mc)
mc                829 drivers/bus/fsl-mc/fsl-mc-bus.c 	platform_set_drvdata(pdev, mc);
mc                861 drivers/bus/fsl-mc/fsl-mc-bus.c 					       &mc->translation_ranges,
mc                862 drivers/bus/fsl-mc/fsl-mc-bus.c 					       &mc->num_translation_ranges);
mc                890 drivers/bus/fsl-mc/fsl-mc-bus.c 	mc->root_mc_bus_dev = mc_bus_dev;
mc                904 drivers/bus/fsl-mc/fsl-mc-bus.c 	struct fsl_mc *mc = platform_get_drvdata(pdev);
mc                906 drivers/bus/fsl-mc/fsl-mc-bus.c 	if (!fsl_mc_is_root_dprc(&mc->root_mc_bus_dev->dev))
mc                909 drivers/bus/fsl-mc/fsl-mc-bus.c 	fsl_mc_device_remove(mc->root_mc_bus_dev);
mc                911 drivers/bus/fsl-mc/fsl-mc-bus.c 	fsl_destroy_mc_io(mc->root_mc_bus_dev->mc_io);
mc                912 drivers/bus/fsl-mc/fsl-mc-bus.c 	mc->root_mc_bus_dev->mc_io = NULL;
mc                159 drivers/char/hpet.c 		unsigned long m, t, mc, base, k;
mc                165 drivers/char/hpet.c 		mc = read_counter(&hpet->hpet_mc);
mc                181 drivers/char/hpet.c 		base = mc % t;
mc                182 drivers/char/hpet.c 		k = (mc - base + hpetp->hp_delta) / t;
mc                287 drivers/clk/clk-asm9260.c 		const struct asm9260_mux_clock *mc = &asm9260_mux_clks[n];
mc                289 drivers/clk/clk-asm9260.c 		mc->parent_names[0] = ref_clk;
mc                290 drivers/clk/clk-asm9260.c 		mc->parent_names[1] = pll_clk;
mc                291 drivers/clk/clk-asm9260.c 		hw = clk_hw_register_mux_table(NULL, mc->name, mc->parent_names,
mc                292 drivers/clk/clk-asm9260.c 				mc->num_parents, mc->flags, base + mc->offset,
mc                293 drivers/clk/clk-asm9260.c 				0, mc->mask, 0, mc->table, &asm9260_clk_lock);
mc                149 drivers/clk/mediatek/clk-mtk.c struct clk *mtk_clk_register_composite(const struct mtk_composite *mc,
mc                163 drivers/clk/mediatek/clk-mtk.c 	if (mc->mux_shift >= 0) {
mc                168 drivers/clk/mediatek/clk-mtk.c 		mux->reg = base + mc->mux_reg;
mc                169 drivers/clk/mediatek/clk-mtk.c 		mux->mask = BIT(mc->mux_width) - 1;
mc                170 drivers/clk/mediatek/clk-mtk.c 		mux->shift = mc->mux_shift;
mc                172 drivers/clk/mediatek/clk-mtk.c 		mux->flags = mc->mux_flags;
mc                176 drivers/clk/mediatek/clk-mtk.c 		parent_names = mc->parent_names;
mc                177 drivers/clk/mediatek/clk-mtk.c 		num_parents = mc->num_parents;
mc                179 drivers/clk/mediatek/clk-mtk.c 		parent = mc->parent;
mc                184 drivers/clk/mediatek/clk-mtk.c 	if (mc->gate_shift >= 0) {
mc                191 drivers/clk/mediatek/clk-mtk.c 		gate->reg = base + mc->gate_reg;
mc                192 drivers/clk/mediatek/clk-mtk.c 		gate->bit_idx = mc->gate_shift;
mc                200 drivers/clk/mediatek/clk-mtk.c 	if (mc->divider_shift >= 0) {
mc                207 drivers/clk/mediatek/clk-mtk.c 		div->reg = base + mc->divider_reg;
mc                208 drivers/clk/mediatek/clk-mtk.c 		div->shift = mc->divider_shift;
mc                209 drivers/clk/mediatek/clk-mtk.c 		div->width = mc->divider_width;
mc                216 drivers/clk/mediatek/clk-mtk.c 	clk = clk_register_composite(NULL, mc->name, parent_names, num_parents,
mc                220 drivers/clk/mediatek/clk-mtk.c 		mc->flags);
mc                244 drivers/clk/mediatek/clk-mtk.c 		const struct mtk_composite *mc = &mcs[i];
mc                246 drivers/clk/mediatek/clk-mtk.c 		if (clk_data && !IS_ERR_OR_NULL(clk_data->clks[mc->id]))
mc                249 drivers/clk/mediatek/clk-mtk.c 		clk = mtk_clk_register_composite(mc, base, lock);
mc                253 drivers/clk/mediatek/clk-mtk.c 					mc->name, PTR_ERR(clk));
mc                258 drivers/clk/mediatek/clk-mtk.c 			clk_data->clks[mc->id] = clk;
mc                145 drivers/clk/mediatek/clk-mtk.h struct clk *mtk_clk_register_composite(const struct mtk_composite *mc,
mc                104 drivers/devfreq/tegra20-devfreq.c 	struct tegra_mc *mc;
mc                115 drivers/devfreq/tegra20-devfreq.c 	mc = platform_get_drvdata(pdev);
mc                116 drivers/devfreq/tegra20-devfreq.c 	if (!mc)
mc                119 drivers/devfreq/tegra20-devfreq.c 	return mc;
mc                125 drivers/devfreq/tegra20-devfreq.c 	struct tegra_mc *mc;
mc                130 drivers/devfreq/tegra20-devfreq.c 	mc = tegra_get_memory_controller();
mc                131 drivers/devfreq/tegra20-devfreq.c 	if (IS_ERR(mc)) {
mc                132 drivers/devfreq/tegra20-devfreq.c 		err = PTR_ERR(mc);
mc                150 drivers/devfreq/tegra20-devfreq.c 	tegra->regs = mc->regs;
mc                524 drivers/dma/moxart-dma.c 	struct moxart_dmadev *mc = devid;
mc                525 drivers/dma/moxart-dma.c 	struct moxart_chan *ch = &mc->slave_chans[0];
mc                 88 drivers/dma/uniphier-mdmac.c uniphier_mdmac_next_desc(struct uniphier_mdmac_chan *mc)
mc                 92 drivers/dma/uniphier-mdmac.c 	vd = vchan_next_desc(&mc->vc);
mc                 94 drivers/dma/uniphier-mdmac.c 		mc->md = NULL;
mc                100 drivers/dma/uniphier-mdmac.c 	mc->md = to_uniphier_mdmac_desc(vd);
mc                102 drivers/dma/uniphier-mdmac.c 	return mc->md;
mc                106 drivers/dma/uniphier-mdmac.c static void uniphier_mdmac_handle(struct uniphier_mdmac_chan *mc,
mc                109 drivers/dma/uniphier-mdmac.c 	struct uniphier_mdmac_device *mdev = mc->mdev;
mc                130 drivers/dma/uniphier-mdmac.c 	writel(src_mode, mc->reg_ch_base + UNIPHIER_MDMAC_CH_SRC_MODE);
mc                131 drivers/dma/uniphier-mdmac.c 	writel(dest_mode, mc->reg_ch_base + UNIPHIER_MDMAC_CH_DEST_MODE);
mc                132 drivers/dma/uniphier-mdmac.c 	writel(src_addr, mc->reg_ch_base + UNIPHIER_MDMAC_CH_SRC_ADDR);
mc                133 drivers/dma/uniphier-mdmac.c 	writel(dest_addr, mc->reg_ch_base + UNIPHIER_MDMAC_CH_DEST_ADDR);
mc                134 drivers/dma/uniphier-mdmac.c 	writel(chunk_size, mc->reg_ch_base + UNIPHIER_MDMAC_CH_SIZE);
mc                137 drivers/dma/uniphier-mdmac.c 	writel(irq_flag, mc->reg_ch_base + UNIPHIER_MDMAC_CH_IRQ_REQ);
mc                139 drivers/dma/uniphier-mdmac.c 	writel(irq_flag, mc->reg_ch_base + UNIPHIER_MDMAC_CH_IRQ_EN);
mc                141 drivers/dma/uniphier-mdmac.c 	writel(BIT(mc->chan_id), mdev->reg_base + UNIPHIER_MDMAC_CMD);
mc                145 drivers/dma/uniphier-mdmac.c static void uniphier_mdmac_start(struct uniphier_mdmac_chan *mc)
mc                149 drivers/dma/uniphier-mdmac.c 	md = uniphier_mdmac_next_desc(mc);
mc                151 drivers/dma/uniphier-mdmac.c 		uniphier_mdmac_handle(mc, md);
mc                155 drivers/dma/uniphier-mdmac.c static int uniphier_mdmac_abort(struct uniphier_mdmac_chan *mc)
mc                157 drivers/dma/uniphier-mdmac.c 	struct uniphier_mdmac_device *mdev = mc->mdev;
mc                162 drivers/dma/uniphier-mdmac.c 	writel(irq_flag, mc->reg_ch_base + UNIPHIER_MDMAC_CH_IRQ_REQ);
mc                164 drivers/dma/uniphier-mdmac.c 	writel(UNIPHIER_MDMAC_CMD_ABORT | BIT(mc->chan_id),
mc                171 drivers/dma/uniphier-mdmac.c 	return readl_poll_timeout(mc->reg_ch_base + UNIPHIER_MDMAC_CH_IRQ_REQ,
mc                177 drivers/dma/uniphier-mdmac.c 	struct uniphier_mdmac_chan *mc = dev_id;
mc                182 drivers/dma/uniphier-mdmac.c 	spin_lock(&mc->vc.lock);
mc                184 drivers/dma/uniphier-mdmac.c 	irq_stat = readl(mc->reg_ch_base + UNIPHIER_MDMAC_CH_IRQ_DET);
mc                196 drivers/dma/uniphier-mdmac.c 	writel(irq_stat, mc->reg_ch_base + UNIPHIER_MDMAC_CH_IRQ_REQ);
mc                203 drivers/dma/uniphier-mdmac.c 	md = mc->md;
mc                211 drivers/dma/uniphier-mdmac.c 		md = uniphier_mdmac_next_desc(mc);
mc                216 drivers/dma/uniphier-mdmac.c 	uniphier_mdmac_handle(mc, md);
mc                219 drivers/dma/uniphier-mdmac.c 	spin_unlock(&mc->vc.lock);
mc                255 drivers/dma/uniphier-mdmac.c 	struct uniphier_mdmac_chan *mc = to_uniphier_mdmac_chan(vc);
mc                262 drivers/dma/uniphier-mdmac.c 	if (mc->md) {
mc                263 drivers/dma/uniphier-mdmac.c 		vchan_terminate_vdesc(&mc->md->vd);
mc                264 drivers/dma/uniphier-mdmac.c 		mc->md = NULL;
mc                265 drivers/dma/uniphier-mdmac.c 		ret = uniphier_mdmac_abort(mc);
mc                287 drivers/dma/uniphier-mdmac.c 	struct uniphier_mdmac_chan *mc;
mc                302 drivers/dma/uniphier-mdmac.c 	mc = to_uniphier_mdmac_chan(vc);
mc                304 drivers/dma/uniphier-mdmac.c 	if (mc->md && mc->md->vd.tx.cookie == cookie) {
mc                306 drivers/dma/uniphier-mdmac.c 		txstate->residue = readl(mc->reg_ch_base +
mc                308 drivers/dma/uniphier-mdmac.c 		md = mc->md;
mc                331 drivers/dma/uniphier-mdmac.c 	struct uniphier_mdmac_chan *mc = to_uniphier_mdmac_chan(vc);
mc                336 drivers/dma/uniphier-mdmac.c 	if (vchan_issue_pending(vc) && !mc->md)
mc                337 drivers/dma/uniphier-mdmac.c 		uniphier_mdmac_start(mc);
mc                352 drivers/dma/uniphier-mdmac.c 	struct uniphier_mdmac_chan *mc = &mdev->channels[chan_id];
mc                366 drivers/dma/uniphier-mdmac.c 			       IRQF_SHARED, irq_name, mc);
mc                370 drivers/dma/uniphier-mdmac.c 	mc->mdev = mdev;
mc                371 drivers/dma/uniphier-mdmac.c 	mc->reg_ch_base = mdev->reg_base + UNIPHIER_MDMAC_CH_OFFSET +
mc                373 drivers/dma/uniphier-mdmac.c 	mc->chan_id = chan_id;
mc                374 drivers/dma/uniphier-mdmac.c 	mc->vc.desc_free = uniphier_mdmac_desc_free;
mc                375 drivers/dma/uniphier-mdmac.c 	vchan_init(&mc->vc, &mdev->ddev);
mc                162 drivers/edac/i10nm_base.c 				 mtr, mcddrtcfg, imc->mc, i, j);
mc                173 drivers/edac/i10nm_base.c 				     imc->mc, i);
mc                236 drivers/edac/i10nm_base.c 	u8 mc = 0, src_id = 0, node_id = 0;
mc                283 drivers/edac/i10nm_base.c 			d->imc[i].mc  = mc++;
mc                 95 drivers/edac/i5100_edac.c static inline u32 i5100_mc_scrben(u32 mc)
mc                 97 drivers/edac/i5100_edac.c 	return mc >> 7 & 1;
mc                100 drivers/edac/i5100_edac.c static inline u32 i5100_mc_errdeten(u32 mc)
mc                102 drivers/edac/i5100_edac.c 	return mc >> 5 & 1;
mc                105 drivers/edac/i5100_edac.c static inline u32 i5100_mc_scrbdone(u32 mc)
mc                107 drivers/edac/i5100_edac.c 	return mc >> 4 & 1;
mc                344 drivers/edac/i5100_edac.c 	struct pci_dev *mc;	/* device 16 func 1 */
mc                557 drivers/edac/i5100_edac.c 	pci_read_config_dword(priv->mc, I5100_FERR_NF_MEM, &dw);
mc                560 drivers/edac/i5100_edac.c 		pci_read_config_dword(priv->mc, I5100_NERR_NF_MEM, &dw2);
mc                566 drivers/edac/i5100_edac.c 		pci_write_config_dword(priv->mc, I5100_NERR_NF_MEM, dw2);
mc                568 drivers/edac/i5100_edac.c 	pci_write_config_dword(priv->mc, I5100_FERR_NF_MEM, dw);
mc                585 drivers/edac/i5100_edac.c 	pci_read_config_dword(priv->mc, I5100_MC, &dw);
mc                589 drivers/edac/i5100_edac.c 		pci_read_config_dword(priv->mc, I5100_MC, &dw);
mc                593 drivers/edac/i5100_edac.c 			pci_write_config_dword(priv->mc, I5100_MC, dw);
mc                594 drivers/edac/i5100_edac.c 			pci_read_config_dword(priv->mc, I5100_MC, &dw);
mc                609 drivers/edac/i5100_edac.c 	pci_read_config_dword(priv->mc, I5100_MC, &dw);
mc                620 drivers/edac/i5100_edac.c 	pci_write_config_dword(priv->mc, I5100_MC, dw);
mc                622 drivers/edac/i5100_edac.c 	pci_read_config_dword(priv->mc, I5100_MC, &dw);
mc                634 drivers/edac/i5100_edac.c 	pci_read_config_dword(priv->mc, I5100_MC, &dw);
mc                718 drivers/edac/i5100_edac.c 	pci_read_config_word(priv->mc, I5100_SPDDATA, &w);
mc                722 drivers/edac/i5100_edac.c 	pci_write_config_dword(priv->mc, I5100_SPDCMD,
mc                730 drivers/edac/i5100_edac.c 		pci_read_config_word(priv->mc, I5100_SPDDATA, &w);
mc                914 drivers/edac/i5100_edac.c 		pci_write_config_dword(priv->mc, I5100_MEM0EINJMSK0, mask0);
mc                915 drivers/edac/i5100_edac.c 		pci_write_config_word(priv->mc, I5100_MEM0EINJMSK1, mask1);
mc                917 drivers/edac/i5100_edac.c 		pci_write_config_dword(priv->mc, I5100_MEM1EINJMSK0, mask0);
mc                918 drivers/edac/i5100_edac.c 		pci_write_config_word(priv->mc, I5100_MEM1EINJMSK1, mask1);
mc               1091 drivers/edac/i5100_edac.c 	priv->mc = pdev;
mc                135 drivers/edac/i7300_edac.c   #define IS_MIRRORED(mc)		((mc) & (1 << 16))
mc                136 drivers/edac/i7300_edac.c   #define IS_ECC_ENABLED(mc)		((mc) & (1 << 5))
mc                137 drivers/edac/i7300_edac.c   #define IS_RETRY_ENABLED(mc)		((mc) & (1 << 31))
mc                138 drivers/edac/i7300_edac.c   #define IS_SCRBALGO_ENHANCED(mc)	((mc) & (1 << 8))
mc                229 drivers/edac/octeon_edac-lmc.c 	int mc = pdev->id;
mc                246 drivers/edac/octeon_edac-lmc.c 		mci = edac_mc_alloc(mc, ARRAY_SIZE(layers), layers, sizeof(struct octeon_lmc_pvt));
mc                263 drivers/edac/octeon_edac-lmc.c 		cfg0.u64 = cvmx_read_csr(CVMX_LMCX_MEM_CFG0(mc));
mc                266 drivers/edac/octeon_edac-lmc.c 		cvmx_write_csr(CVMX_LMCX_MEM_CFG0(mc), cfg0.u64);
mc                278 drivers/edac/octeon_edac-lmc.c 		mci = edac_mc_alloc(mc, ARRAY_SIZE(layers), layers, sizeof(struct octeon_lmc_pvt));
mc                295 drivers/edac/octeon_edac-lmc.c 		en.u64 = cvmx_read_csr(CVMX_LMCX_MEM_CFG0(mc));
mc                298 drivers/edac/octeon_edac-lmc.c 		cvmx_write_csr(CVMX_LMCX_MEM_CFG0(mc), en.u64);
mc                355 drivers/edac/sb_edac.c 	u8			bus, mc;
mc                579 drivers/edac/sb_edac.c #define knl_channel_remap(mc, chan) ((mc) ? (chan) : (chan) + 3)
mc               1149 drivers/edac/sb_edac.c 		const int mc,
mc               1158 drivers/edac/sb_edac.c 	switch (mc) {
mc               1253 drivers/edac/sb_edac.c 	int mc, chan;
mc               1257 drivers/edac/sb_edac.c 	mc = GET_BITFIELD(reg, entry*3, (entry*3)+2);
mc               1260 drivers/edac/sb_edac.c 	return knl_channel_remap(mc, chan);
mc               1358 drivers/edac/sb_edac.c 	int mc;
mc               1479 drivers/edac/sb_edac.c 		for (mc = 0; mc < 2; mc++) {
mc               1480 drivers/edac/sb_edac.c 			sad_actual_size[mc] = 0;
mc               1488 drivers/edac/sb_edac.c 						mc,
mc               1510 drivers/edac/sb_edac.c 							mc);
mc               1511 drivers/edac/sb_edac.c 						sad_actual_size[mc] += tad_size;
mc               1517 drivers/edac/sb_edac.c 		for (mc = 0; mc < 2; mc++) {
mc               1519 drivers/edac/sb_edac.c 				mc, sad_actual_size[mc], sad_actual_size[mc]);
mc               1550 drivers/edac/sb_edac.c 			mc = knl_channel_mc(channel);
mc               1554 drivers/edac/sb_edac.c 					sad_actual_size[mc]/intrlv_ways,
mc               1557 drivers/edac/sb_edac.c 					sad_actual_size[mc]/intrlv_ways;
mc               1657 drivers/edac/sb_edac.c 					 pvt->sbridge_dev->mc, pvt->sbridge_dev->dom, i, j,
mc               1685 drivers/edac/sb_edac.c 		 pvt->sbridge_dev->mc,
mc               2006 drivers/edac/sb_edac.c 			 pvt->sbridge_dev->mc,
mc               3242 drivers/edac/sb_edac.c 	mci = edac_mc_alloc(sbridge_dev->mc, ARRAY_SIZE(layers), layers,
mc               3450 drivers/edac/sb_edac.c 	u8 mc, num_mc = 0;
mc               3462 drivers/edac/sb_edac.c 	mc = 0;
mc               3466 drivers/edac/sb_edac.c 			 mc, mc + 1, num_mc);
mc               3468 drivers/edac/sb_edac.c 		sbridge_dev->mc = mc++;
mc                189 drivers/edac/skx_base.c 			skx_printk(KERN_ERR, "ECC is disabled on imc %d\n", imc->mc);
mc                326 drivers/edac/skx_base.c #define SKX_GET_TADBASE(d, mc, i, reg)			\
mc                327 drivers/edac/skx_base.c 	pci_read_config_dword((d)->imc[mc].chan[0].cdev, 0x850 + 4 * (i), &(reg))
mc                328 drivers/edac/skx_base.c #define SKX_GET_TADWAYNESS(d, mc, i, reg)		\
mc                329 drivers/edac/skx_base.c 	pci_read_config_dword((d)->imc[mc].chan[0].cdev, 0x880 + 4 * (i), &(reg))
mc                330 drivers/edac/skx_base.c #define SKX_GET_TADCHNILVOFFSET(d, mc, ch, i, reg)	\
mc                331 drivers/edac/skx_base.c 	pci_read_config_dword((d)->imc[mc].chan[ch].cdev, 0x90 + 4 * (i), &(reg))
mc                401 drivers/edac/skx_base.c #define SKX_GET_RIRWAYNESS(d, mc, ch, i, reg)		\
mc                402 drivers/edac/skx_base.c 	pci_read_config_dword((d)->imc[mc].chan[ch].cdev,	\
mc                404 drivers/edac/skx_base.c #define SKX_GET_RIRILV(d, mc, ch, idx, i, reg)		\
mc                405 drivers/edac/skx_base.c 	pci_read_config_dword((d)->imc[mc].chan[ch].cdev,	\
mc                599 drivers/edac/skx_base.c 	u8 mc = 0, src_id, node_id;
mc                646 drivers/edac/skx_base.c 			d->imc[i].mc = mc++;
mc                303 drivers/edac/skx_common.c 		 imc->mc, chan, dimmno, size, npages,
mc                363 drivers/edac/skx_common.c 		 imc->mc, chan, dimmno, size >> 20, dimm->nr_pages);
mc                387 drivers/edac/skx_common.c 	mci = edac_mc_alloc(imc->mc, ARRAY_SIZE(layers), layers,
mc                393 drivers/edac/skx_common.c 	edac_dbg(0, "MC#%d: mci = %p\n", imc->mc, mci);
mc                445 drivers/edac/skx_common.c 	edac_dbg(0, "MC%d: mci = %p\n", imc->mc, mci);
mc                 62 drivers/edac/skx_common.h 		u8 mc;	/* system wide mc# */
mc               2825 drivers/firewire/ohci.c 		ctx->base.callback.mc(&ctx->base,
mc               2841 drivers/firewire/ohci.c 	ctx->base.callback.mc(&ctx->base,
mc                 43 drivers/gpio/gpio-mc33880.c static int mc33880_write_config(struct mc33880 *mc)
mc                 45 drivers/gpio/gpio-mc33880.c 	return spi_write(mc->spi, &mc->port_config, sizeof(mc->port_config));
mc                 49 drivers/gpio/gpio-mc33880.c static int __mc33880_set(struct mc33880 *mc, unsigned offset, int value)
mc                 52 drivers/gpio/gpio-mc33880.c 		mc->port_config |= 1 << offset;
mc                 54 drivers/gpio/gpio-mc33880.c 		mc->port_config &= ~(1 << offset);
mc                 56 drivers/gpio/gpio-mc33880.c 	return mc33880_write_config(mc);
mc                 62 drivers/gpio/gpio-mc33880.c 	struct mc33880 *mc = gpiochip_get_data(chip);
mc                 64 drivers/gpio/gpio-mc33880.c 	mutex_lock(&mc->lock);
mc                 66 drivers/gpio/gpio-mc33880.c 	__mc33880_set(mc, offset, value);
mc                 68 drivers/gpio/gpio-mc33880.c 	mutex_unlock(&mc->lock);
mc                 73 drivers/gpio/gpio-mc33880.c 	struct mc33880 *mc;
mc                 92 drivers/gpio/gpio-mc33880.c 	mc = devm_kzalloc(&spi->dev, sizeof(struct mc33880), GFP_KERNEL);
mc                 93 drivers/gpio/gpio-mc33880.c 	if (!mc)
mc                 96 drivers/gpio/gpio-mc33880.c 	mutex_init(&mc->lock);
mc                 98 drivers/gpio/gpio-mc33880.c 	spi_set_drvdata(spi, mc);
mc                100 drivers/gpio/gpio-mc33880.c 	mc->spi = spi;
mc                102 drivers/gpio/gpio-mc33880.c 	mc->chip.label = DRIVER_NAME,
mc                103 drivers/gpio/gpio-mc33880.c 	mc->chip.set = mc33880_set;
mc                104 drivers/gpio/gpio-mc33880.c 	mc->chip.base = pdata->base;
mc                105 drivers/gpio/gpio-mc33880.c 	mc->chip.ngpio = PIN_NUMBER;
mc                106 drivers/gpio/gpio-mc33880.c 	mc->chip.can_sleep = true;
mc                107 drivers/gpio/gpio-mc33880.c 	mc->chip.parent = &spi->dev;
mc                108 drivers/gpio/gpio-mc33880.c 	mc->chip.owner = THIS_MODULE;
mc                110 drivers/gpio/gpio-mc33880.c 	mc->port_config = 0x00;
mc                115 drivers/gpio/gpio-mc33880.c 	ret = mc33880_write_config(mc);
mc                116 drivers/gpio/gpio-mc33880.c 	mc->port_config = 0x00;
mc                118 drivers/gpio/gpio-mc33880.c 		ret = mc33880_write_config(mc);
mc                126 drivers/gpio/gpio-mc33880.c 	ret = gpiochip_add_data(&mc->chip, mc);
mc                133 drivers/gpio/gpio-mc33880.c 	mutex_destroy(&mc->lock);
mc                139 drivers/gpio/gpio-mc33880.c 	struct mc33880 *mc;
mc                141 drivers/gpio/gpio-mc33880.c 	mc = spi_get_drvdata(spi);
mc                142 drivers/gpio/gpio-mc33880.c 	if (!mc)
mc                145 drivers/gpio/gpio-mc33880.c 	gpiochip_remove(&mc->chip);
mc                146 drivers/gpio/gpio-mc33880.c 	mutex_destroy(&mc->lock);
mc                144 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c void amdgpu_gmc_vram_location(struct amdgpu_device *adev, struct amdgpu_gmc *mc,
mc                149 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	mc->vram_start = base;
mc                150 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	mc->vram_end = mc->vram_start + mc->mc_vram_size - 1;
mc                151 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	if (limit && limit < mc->real_vram_size)
mc                152 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		mc->real_vram_size = limit;
mc                154 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	if (mc->xgmi.num_physical_nodes == 0) {
mc                155 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		mc->fb_start = mc->vram_start;
mc                156 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		mc->fb_end = mc->vram_end;
mc                159 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 			mc->mc_vram_size >> 20, mc->vram_start,
mc                160 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 			mc->vram_end, mc->real_vram_size >> 20);
mc                174 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c void amdgpu_gmc_gart_location(struct amdgpu_device *adev, struct amdgpu_gmc *mc)
mc                181 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	mc->gart_size += adev->pm.smu_prv_buffer_size;
mc                186 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	size_bf = mc->fb_start;
mc                187 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	size_af = max_mc_address + 1 - ALIGN(mc->fb_end + 1, four_gb);
mc                189 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	if (mc->gart_size > max(size_bf, size_af)) {
mc                191 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		mc->gart_size = max(size_bf, size_af);
mc                194 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	if ((size_bf >= mc->gart_size && size_bf < size_af) ||
mc                195 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	    (size_af < mc->gart_size))
mc                196 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		mc->gart_start = 0;
mc                198 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		mc->gart_start = max_mc_address - mc->gart_size + 1;
mc                200 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	mc->gart_start &= ~(four_gb - 1);
mc                201 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	mc->gart_end = mc->gart_start + mc->gart_size - 1;
mc                203 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 			mc->gart_size >> 20, mc->gart_start, mc->gart_end);
mc                217 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c void amdgpu_gmc_agp_location(struct amdgpu_device *adev, struct amdgpu_gmc *mc)
mc                224 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		mc->agp_start = 0xffffffff;
mc                225 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		mc->agp_end = 0x0;
mc                226 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		mc->agp_size = 0;
mc                231 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	if (mc->fb_start > mc->gart_start) {
mc                232 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		size_bf = (mc->fb_start & sixteen_gb_mask) -
mc                233 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 			ALIGN(mc->gart_end + 1, sixteen_gb);
mc                234 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		size_af = mc->mc_mask + 1 - ALIGN(mc->fb_end + 1, sixteen_gb);
mc                236 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		size_bf = mc->fb_start & sixteen_gb_mask;
mc                237 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		size_af = (mc->gart_start & sixteen_gb_mask) -
mc                238 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 			ALIGN(mc->fb_end + 1, sixteen_gb);
mc                242 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		mc->agp_start = (mc->fb_start - size_bf) & sixteen_gb_mask;
mc                243 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		mc->agp_size = size_bf;
mc                245 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		mc->agp_start = ALIGN(mc->fb_end + 1, sixteen_gb);
mc                246 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		mc->agp_size = size_af;
mc                249 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	mc->agp_end = mc->agp_start + mc->agp_size - 1;
mc                251 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 			mc->agp_size >> 20, mc->agp_start, mc->agp_end);
mc                227 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.h void amdgpu_gmc_vram_location(struct amdgpu_device *adev, struct amdgpu_gmc *mc,
mc                230 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.h 			      struct amdgpu_gmc *mc);
mc                232 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.h 			     struct amdgpu_gmc *mc);
mc                251 drivers/gpu/drm/amd/amdgpu/amdgpu_ucode.h 	struct mc_firmware_header_v1_0 mc;
mc                575 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 					struct amdgpu_gmc *mc)
mc                583 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 	amdgpu_gmc_gart_location(adev, mc);
mc                226 drivers/gpu/drm/amd/amdgpu/gmc_v6_0.c 				       struct amdgpu_gmc *mc)
mc                231 drivers/gpu/drm/amd/amdgpu/gmc_v6_0.c 	amdgpu_gmc_vram_location(adev, mc, base);
mc                232 drivers/gpu/drm/amd/amdgpu/gmc_v6_0.c 	amdgpu_gmc_gart_location(adev, mc);
mc                243 drivers/gpu/drm/amd/amdgpu/gmc_v7_0.c 				       struct amdgpu_gmc *mc)
mc                248 drivers/gpu/drm/amd/amdgpu/gmc_v7_0.c 	amdgpu_gmc_vram_location(adev, mc, base);
mc                249 drivers/gpu/drm/amd/amdgpu/gmc_v7_0.c 	amdgpu_gmc_gart_location(adev, mc);
mc                431 drivers/gpu/drm/amd/amdgpu/gmc_v8_0.c 				       struct amdgpu_gmc *mc)
mc                439 drivers/gpu/drm/amd/amdgpu/gmc_v8_0.c 	amdgpu_gmc_vram_location(adev, mc, base);
mc                440 drivers/gpu/drm/amd/amdgpu/gmc_v8_0.c 	amdgpu_gmc_gart_location(adev, mc);
mc               1013 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 					struct amdgpu_gmc *mc)
mc               1024 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 	amdgpu_gmc_vram_location(adev, mc, base);
mc               1025 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 	amdgpu_gmc_gart_location(adev, mc);
mc               1026 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 	amdgpu_gmc_agp_location(adev, mc);
mc                151 drivers/gpu/drm/arm/malidp_crtc.c 	struct malidp_crtc_state *mc = to_malidp_crtc_state(state);
mc                191 drivers/gpu/drm/arm/malidp_crtc.c 	malidp_generate_gamma_table(state->gamma_lut, mc->gamma_coeffs);
mc                205 drivers/gpu/drm/arm/malidp_crtc.c 	struct malidp_crtc_state *mc = to_malidp_crtc_state(state);
mc                241 drivers/gpu/drm/arm/malidp_crtc.c 		mc->coloradj_coeffs[i] = mag;
mc                 74 drivers/gpu/drm/arm/malidp_drv.c 		struct malidp_crtc_state *mc =
mc                 79 drivers/gpu/drm/arm/malidp_drv.c 			malidp_write_gamma_table(hwdev, mc->gamma_coeffs);
mc                101 drivers/gpu/drm/arm/malidp_drv.c 		struct malidp_crtc_state *mc =
mc                108 drivers/gpu/drm/arm/malidp_drv.c 						mc->coloradj_coeffs[i],
mc                275 drivers/gpu/drm/arm/malidp_planes.c 	struct malidp_crtc_state *mc;
mc                282 drivers/gpu/drm/arm/malidp_planes.c 	mc = to_malidp_crtc_state(crtc_state);
mc                299 drivers/gpu/drm/arm/malidp_planes.c 		mc->scaled_planes_mask &= ~(mp->layer->id);
mc                306 drivers/gpu/drm/arm/malidp_planes.c 	mc->scaled_planes_mask |= mp->layer->id;
mc                128 drivers/gpu/drm/cirrus/cirrus_drv.h 	struct cirrus_mc			mc;
mc               1100 drivers/gpu/drm/i810/i810_dma.c 	drm_i810_mc_t *mc = data;
mc               1104 drivers/gpu/drm/i810/i810_dma.c 	if (mc->idx >= dma->buf_count || mc->idx < 0)
mc               1107 drivers/gpu/drm/i810/i810_dma.c 	i810_dma_dispatch_mc(dev, dma->buflist[mc->idx], mc->used,
mc               1108 drivers/gpu/drm/i810/i810_dma.c 			     mc->last_render);
mc                 90 drivers/gpu/drm/mgag200/mgag200_drv.c 		pg_align = PFN_UP(mdev->mc.vram_size);
mc                178 drivers/gpu/drm/mgag200/mgag200_drv.h 	struct mga_mc			mc;
mc                 33 drivers/gpu/drm/mgag200/mgag200_main.c 	vram_size = mdev->mc.vram_window;
mc                 71 drivers/gpu/drm/mgag200/mgag200_main.c 	mdev->mc.vram_base = pci_resource_start(mdev->dev->pdev, 0);
mc                 72 drivers/gpu/drm/mgag200/mgag200_main.c 	mdev->mc.vram_window = pci_resource_len(mdev->dev->pdev, 0);
mc                 74 drivers/gpu/drm/mgag200/mgag200_main.c 	if (!devm_request_mem_region(mdev->dev->dev, mdev->mc.vram_base, mdev->mc.vram_window,
mc                 84 drivers/gpu/drm/mgag200/mgag200_main.c 	mdev->mc.vram_size = mga_probe_vram(mdev, mem);
mc                163 drivers/gpu/drm/mgag200/mgag200_main.c 	if (IS_G200_SE(mdev) && mdev->mc.vram_size < (2048*1024))
mc               1632 drivers/gpu/drm/mgag200/mgag200_mode.c 	if ((mode->hdisplay * mode->vdisplay * (bpp/8)) > mdev->mc.vram_size) {
mc               1708 drivers/gpu/drm/mgag200/mgag200_mode.c 	mdev->dev->mode_config.fb_base = mdev->mc.vram_base;
mc                 40 drivers/gpu/drm/mgag200/mgag200_ttm.c 				       mdev->mc.vram_size,
mc                147 drivers/gpu/drm/nouveau/include/nvkm/core/device.h 	struct nvkm_mc *mc;
mc                220 drivers/gpu/drm/nouveau/include/nvkm/core/device.h 	int (*mc      )(struct nvkm_device *, int idx, struct nvkm_mc **);
mc                 87 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv04_mc_new,
mc                108 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv04_mc_new,
mc                130 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv04_mc_new,
mc                150 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv11_mc_new,
mc                172 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv04_mc_new,
mc                194 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                216 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                238 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv04_mc_new,
mc                260 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                282 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                304 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                326 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                348 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                370 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                392 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                415 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                438 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                460 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                483 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                509 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                535 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                561 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                587 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv44_mc_new,
mc                613 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                639 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv44_mc_new,
mc                665 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                691 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                717 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv44_mc_new,
mc                743 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv17_mc_new,
mc                769 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv44_mc_new,
mc                795 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv44_mc_new,
mc                823 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv50_mc_new,
mc                850 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv44_mc_new,
mc                876 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv44_mc_new,
mc                902 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = nv44_mc_new,
mc                930 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = g84_mc_new,
mc                962 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = g84_mc_new,
mc                994 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = g84_mc_new,
mc               1026 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = g84_mc_new,
mc               1058 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = g84_mc_new,
mc               1090 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = g98_mc_new,
mc               1122 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = g84_mc_new,
mc               1154 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gt215_mc_new,
mc               1188 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gt215_mc_new,
mc               1221 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gt215_mc_new,
mc               1254 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = g98_mc_new,
mc               1286 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = g98_mc_new,
mc               1318 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gt215_mc_new,
mc               1354 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gf100_mc_new,
mc               1391 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gf100_mc_new,
mc               1427 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gf100_mc_new,
mc               1463 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gf100_mc_new,
mc               1500 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gf100_mc_new,
mc               1537 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gf100_mc_new,
mc               1574 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gf100_mc_new,
mc               1610 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gf100_mc_new,
mc               1645 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gf100_mc_new,
mc               1681 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gk104_mc_new,
mc               1720 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gk104_mc_new,
mc               1759 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gk104_mc_new,
mc               1793 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gk20a_mc_new,
mc               1823 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gk104_mc_new,
mc               1861 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gk104_mc_new,
mc               1899 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gk20a_mc_new,
mc               1937 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gk20a_mc_new,
mc               1975 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gk20a_mc_new,
mc               2009 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gk20a_mc_new,
mc               2042 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gk20a_mc_new,
mc               2077 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gk20a_mc_new,
mc               2112 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gk20a_mc_new,
mc               2143 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gk20a_mc_new,
mc               2172 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gp100_mc_new,
mc               2208 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gp100_mc_new,
mc               2244 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gp100_mc_new,
mc               2280 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gp100_mc_new,
mc               2316 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gp100_mc_new,
mc               2352 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gp100_mc_new,
mc               2384 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gp10b_mc_new,
mc               2413 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = gp100_mc_new,
mc               2454 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = tu102_mc_new,
mc               2489 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = tu102_mc_new,
mc               2524 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = tu102_mc_new,
mc               2559 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = tu102_mc_new,
mc               2594 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.mc = tu102_mc_new,
mc               2656 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	_(MC      , device->mc      , &device->mc->subdev);
mc               3162 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 		_(NVKM_SUBDEV_MC      ,       mc);
mc                 32 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	struct nvkm_mc *mc = device->mc;
mc                 33 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	if (likely(mc) && mc->func->unk260)
mc                 34 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 		mc->func->unk260(mc, data);
mc                 40 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	struct nvkm_mc *mc = device->mc;
mc                 42 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	if (likely(mc) && mc->func->intr_mask) {
mc                 44 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 		for (map = mc->func->intr; !mask && map->stat; map++) {
mc                 48 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 		mc->func->intr_mask(mc, mask, en ? mask : 0);
mc                 55 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	struct nvkm_mc *mc = device->mc;
mc                 56 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	if (likely(mc))
mc                 57 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 		mc->func->intr_unarm(mc);
mc                 63 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	struct nvkm_mc *mc = device->mc;
mc                 64 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	if (likely(mc))
mc                 65 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 		mc->func->intr_rearm(mc);
mc                 69 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c nvkm_mc_intr_stat(struct nvkm_mc *mc)
mc                 71 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	u32 intr = mc->func->intr_stat(mc);
mc                 80 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	struct nvkm_mc *mc = device->mc;
mc                 86 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	if (unlikely(!mc))
mc                 89 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	intr = nvkm_mc_intr_stat(mc);
mc                 99 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	for (map = mc->func->intr; map->stat; map++) {
mc                109 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 		nvkm_error(&mc->subdev, "intr %08x\n", stat);
mc                112 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	if (mc->func->intr_hack)
mc                113 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 		mc->func->intr_hack(mc, handled);
mc                120 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	struct nvkm_mc *mc = device->mc;
mc                123 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	if (likely(mc)) {
mc                125 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 			for (map = mc->func->reset; map && map->stat; map++) {
mc                187 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	struct nvkm_mc *mc = nvkm_mc(subdev);
mc                188 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	if (mc->func->init)
mc                189 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 		mc->func->init(mc);
mc                209 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	     int index, struct nvkm_mc *mc)
mc                211 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	nvkm_subdev_ctor(&nvkm_mc, device, index, &mc->subdev);
mc                212 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	mc->func = func;
mc                219 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	struct nvkm_mc *mc;
mc                220 drivers/gpu/drm/nouveau/nvkm/subdev/mc/base.c 	if (!(mc = *pmc = kzalloc(sizeof(*mc), GFP_KERNEL)))
mc                 63 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gf100.c gf100_mc_intr_unarm(struct nvkm_mc *mc)
mc                 65 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gf100.c 	struct nvkm_device *device = mc->subdev.device;
mc                 72 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gf100.c gf100_mc_intr_rearm(struct nvkm_mc *mc)
mc                 74 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gf100.c 	struct nvkm_device *device = mc->subdev.device;
mc                 80 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gf100.c gf100_mc_intr_stat(struct nvkm_mc *mc)
mc                 82 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gf100.c 	struct nvkm_device *device = mc->subdev.device;
mc                 89 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gf100.c gf100_mc_intr_mask(struct nvkm_mc *mc, u32 mask, u32 stat)
mc                 91 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gf100.c 	struct nvkm_device *device = mc->subdev.device;
mc                 97 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gf100.c gf100_mc_unk260(struct nvkm_mc *mc, u32 data)
mc                 99 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gf100.c 	nvkm_wr32(mc->subdev.device, 0x000260, data);
mc                 35 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c gp100_mc_intr_update(struct gp100_mc *mc)
mc                 37 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	struct nvkm_device *device = mc->base.subdev.device;
mc                 38 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	u32 mask = mc->intr ? mc->mask : 0, i;
mc                 48 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	struct gp100_mc *mc = gp100_mc(base);
mc                 50 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	spin_lock_irqsave(&mc->lock, flags);
mc                 51 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	mc->intr = false;
mc                 52 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	gp100_mc_intr_update(mc);
mc                 53 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	spin_unlock_irqrestore(&mc->lock, flags);
mc                 59 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	struct gp100_mc *mc = gp100_mc(base);
mc                 61 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	spin_lock_irqsave(&mc->lock, flags);
mc                 62 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	mc->intr = true;
mc                 63 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	gp100_mc_intr_update(mc);
mc                 64 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	spin_unlock_irqrestore(&mc->lock, flags);
mc                 70 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	struct gp100_mc *mc = gp100_mc(base);
mc                 72 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	spin_lock_irqsave(&mc->lock, flags);
mc                 73 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	mc->mask = (mc->mask & ~mask) | intr;
mc                 74 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	gp100_mc_intr_update(mc);
mc                 75 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	spin_unlock_irqrestore(&mc->lock, flags);
mc                111 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	struct gp100_mc *mc;
mc                113 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	if (!(mc = kzalloc(sizeof(*mc), GFP_KERNEL)))
mc                115 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	nvkm_mc_ctor(func, device, index, &mc->base);
mc                116 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	*pmc = &mc->base;
mc                118 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	spin_lock_init(&mc->lock);
mc                119 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	mc->intr = false;
mc                120 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp100.c 	mc->mask = 0x7fffffff;
mc                 27 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp10b.c gp10b_mc_init(struct nvkm_mc *mc)
mc                 29 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gp10b.c 	struct nvkm_device *device = mc->subdev.device;
mc                 57 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gt215.c gt215_mc_intr_mask(struct nvkm_mc *mc, u32 mask, u32 stat)
mc                 59 drivers/gpu/drm/nouveau/nvkm/subdev/mc/gt215.c 	nvkm_mask(mc->subdev.device, 0x000640, mask, stat);
mc                 44 drivers/gpu/drm/nouveau/nvkm/subdev/mc/nv04.c nv04_mc_intr_unarm(struct nvkm_mc *mc)
mc                 46 drivers/gpu/drm/nouveau/nvkm/subdev/mc/nv04.c 	struct nvkm_device *device = mc->subdev.device;
mc                 52 drivers/gpu/drm/nouveau/nvkm/subdev/mc/nv04.c nv04_mc_intr_rearm(struct nvkm_mc *mc)
mc                 54 drivers/gpu/drm/nouveau/nvkm/subdev/mc/nv04.c 	struct nvkm_device *device = mc->subdev.device;
mc                 59 drivers/gpu/drm/nouveau/nvkm/subdev/mc/nv04.c nv04_mc_intr_stat(struct nvkm_mc *mc)
mc                 61 drivers/gpu/drm/nouveau/nvkm/subdev/mc/nv04.c 	return nvkm_rd32(mc->subdev.device, 0x000100);
mc                 65 drivers/gpu/drm/nouveau/nvkm/subdev/mc/nv04.c nv04_mc_init(struct nvkm_mc *mc)
mc                 67 drivers/gpu/drm/nouveau/nvkm/subdev/mc/nv04.c 	struct nvkm_device *device = mc->subdev.device;
mc                 27 drivers/gpu/drm/nouveau/nvkm/subdev/mc/nv44.c nv44_mc_init(struct nvkm_mc *mc)
mc                 29 drivers/gpu/drm/nouveau/nvkm/subdev/mc/nv44.c 	struct nvkm_device *device = mc->subdev.device;
mc                 41 drivers/gpu/drm/nouveau/nvkm/subdev/mc/nv50.c nv50_mc_init(struct nvkm_mc *mc)
mc                 43 drivers/gpu/drm/nouveau/nvkm/subdev/mc/nv50.c 	struct nvkm_device *device = mc->subdev.device;
mc                 25 drivers/gpu/drm/nouveau/nvkm/subdev/mc/tu102.c tu102_mc_intr_hack(struct nvkm_mc *mc, bool *handled)
mc                 27 drivers/gpu/drm/nouveau/nvkm/subdev/mc/tu102.c 	struct nvkm_device *device = mc->subdev.device;
mc                 45 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm20b.c 	void __iomem *mc;
mc                 48 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm20b.c 	mc = ioremap(mc_base, 0xd00);
mc                 49 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm20b.c 	if (!mc) {
mc                 53 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm20b.c 	sb->wpr_addr = ioread32_native(mc + MC_SECURITY_CARVEOUT2_BOM_0) |
mc                 54 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm20b.c 	      ((u64)ioread32_native(mc + MC_SECURITY_CARVEOUT2_BOM_HI_0) << 32);
mc                 55 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm20b.c 	sb->wpr_size = ioread32_native(mc + MC_SECURITY_CARVEOUT2_SIZE_128K)
mc                 57 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm20b.c 	cfg = ioread32_native(mc + MC_SECURITY_CARVEOUT2_CFG0);
mc                 58 drivers/gpu/drm/nouveau/nvkm/subdev/secboot/gm20b.c 	iounmap(mc);
mc                134 drivers/gpu/drm/radeon/cik.c extern void si_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
mc               3883 drivers/gpu/drm/radeon/cik.c 			radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
mc               4122 drivers/gpu/drm/radeon/cik.c 		radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
mc               5309 drivers/gpu/drm/radeon/cik.c 	       rdev->mc.vram_start >> 12);
mc               5311 drivers/gpu/drm/radeon/cik.c 	       rdev->mc.vram_end >> 12);
mc               5314 drivers/gpu/drm/radeon/cik.c 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
mc               5315 drivers/gpu/drm/radeon/cik.c 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
mc               5318 drivers/gpu/drm/radeon/cik.c 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
mc               5348 drivers/gpu/drm/radeon/cik.c 	rdev->mc.vram_is_ddr = true;
mc               5386 drivers/gpu/drm/radeon/cik.c 	rdev->mc.vram_width = numchan * chansize;
mc               5388 drivers/gpu/drm/radeon/cik.c 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
mc               5389 drivers/gpu/drm/radeon/cik.c 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
mc               5391 drivers/gpu/drm/radeon/cik.c 	rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
mc               5392 drivers/gpu/drm/radeon/cik.c 	rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
mc               5393 drivers/gpu/drm/radeon/cik.c 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
mc               5394 drivers/gpu/drm/radeon/cik.c 	si_vram_gtt_location(rdev, &rdev->mc);
mc               5464 drivers/gpu/drm/radeon/cik.c 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
mc               5465 drivers/gpu/drm/radeon/cik.c 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
mc               5537 drivers/gpu/drm/radeon/cik.c 		 (unsigned)(rdev->mc.gtt_size >> 20),
mc                257 drivers/gpu/drm/radeon/cik_sdma.c 		radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
mc                435 drivers/gpu/drm/radeon/cik_sdma.c 		radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
mc               2438 drivers/gpu/drm/radeon/evergreen.c 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
mc               2439 drivers/gpu/drm/radeon/evergreen.c 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
mc               2449 drivers/gpu/drm/radeon/evergreen.c 		 (unsigned)(rdev->mc.gtt_size >> 20),
mc               2770 drivers/gpu/drm/radeon/evergreen.c 		       upper_32_bits(rdev->mc.vram_start));
mc               2772 drivers/gpu/drm/radeon/evergreen.c 		       upper_32_bits(rdev->mc.vram_start));
mc               2774 drivers/gpu/drm/radeon/evergreen.c 		       (u32)rdev->mc.vram_start);
mc               2776 drivers/gpu/drm/radeon/evergreen.c 		       (u32)rdev->mc.vram_start);
mc               2780 drivers/gpu/drm/radeon/evergreen.c 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
mc               2781 drivers/gpu/drm/radeon/evergreen.c 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
mc               2874 drivers/gpu/drm/radeon/evergreen.c 		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
mc               2877 drivers/gpu/drm/radeon/evergreen.c 				rdev->mc.vram_start >> 12);
mc               2879 drivers/gpu/drm/radeon/evergreen.c 				rdev->mc.gtt_end >> 12);
mc               2883 drivers/gpu/drm/radeon/evergreen.c 				rdev->mc.gtt_start >> 12);
mc               2885 drivers/gpu/drm/radeon/evergreen.c 				rdev->mc.vram_end >> 12);
mc               2889 drivers/gpu/drm/radeon/evergreen.c 			rdev->mc.vram_start >> 12);
mc               2891 drivers/gpu/drm/radeon/evergreen.c 			rdev->mc.vram_end >> 12);
mc               2899 drivers/gpu/drm/radeon/evergreen.c 		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
mc               2900 drivers/gpu/drm/radeon/evergreen.c 		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
mc               2903 drivers/gpu/drm/radeon/evergreen.c 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
mc               2904 drivers/gpu/drm/radeon/evergreen.c 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
mc               2906 drivers/gpu/drm/radeon/evergreen.c 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
mc               2910 drivers/gpu/drm/radeon/evergreen.c 		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
mc               2911 drivers/gpu/drm/radeon/evergreen.c 		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
mc               2912 drivers/gpu/drm/radeon/evergreen.c 		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
mc               3716 drivers/gpu/drm/radeon/evergreen.c 	rdev->mc.vram_is_ddr = true;
mc               3746 drivers/gpu/drm/radeon/evergreen.c 	rdev->mc.vram_width = numchan * chansize;
mc               3748 drivers/gpu/drm/radeon/evergreen.c 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
mc               3749 drivers/gpu/drm/radeon/evergreen.c 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
mc               3755 drivers/gpu/drm/radeon/evergreen.c 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
mc               3756 drivers/gpu/drm/radeon/evergreen.c 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
mc               3759 drivers/gpu/drm/radeon/evergreen.c 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
mc               3760 drivers/gpu/drm/radeon/evergreen.c 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
mc               3762 drivers/gpu/drm/radeon/evergreen.c 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
mc               3763 drivers/gpu/drm/radeon/evergreen.c 	r700_vram_gtt_location(rdev, &rdev->mc);
mc               1305 drivers/gpu/drm/radeon/ni.c 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
mc               1306 drivers/gpu/drm/radeon/ni.c 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
mc               1352 drivers/gpu/drm/radeon/ni.c 		 (unsigned)(rdev->mc.gtt_size >> 20),
mc               1467 drivers/gpu/drm/radeon/ni.c 			radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
mc               1741 drivers/gpu/drm/radeon/ni.c 		radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
mc                163 drivers/gpu/drm/radeon/ni_dma.c 		radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
mc                259 drivers/gpu/drm/radeon/ni_dma.c 		radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
mc                668 drivers/gpu/drm/radeon/r100.c 	WREG32(RADEON_AIC_LO_ADDR, rdev->mc.gtt_start);
mc                669 drivers/gpu/drm/radeon/r100.c 	WREG32(RADEON_AIC_HI_ADDR, rdev->mc.gtt_end);
mc                676 drivers/gpu/drm/radeon/r100.c 		 (unsigned)(rdev->mc.gtt_size >> 20),
mc               1218 drivers/gpu/drm/radeon/r100.c 	radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
mc               1246 drivers/gpu/drm/radeon/r100.c 	radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
mc               2703 drivers/gpu/drm/radeon/r100.c 	rdev->mc.vram_is_ddr = false;
mc               2705 drivers/gpu/drm/radeon/r100.c 		rdev->mc.vram_is_ddr = true;
mc               2707 drivers/gpu/drm/radeon/r100.c 		rdev->mc.vram_is_ddr = true;
mc               2713 drivers/gpu/drm/radeon/r100.c 			rdev->mc.vram_width = 32;
mc               2715 drivers/gpu/drm/radeon/r100.c 			rdev->mc.vram_width = 64;
mc               2718 drivers/gpu/drm/radeon/r100.c 			rdev->mc.vram_width /= 4;
mc               2719 drivers/gpu/drm/radeon/r100.c 			rdev->mc.vram_is_ddr = true;
mc               2724 drivers/gpu/drm/radeon/r100.c 			rdev->mc.vram_width = 128;
mc               2726 drivers/gpu/drm/radeon/r100.c 			rdev->mc.vram_width = 64;
mc               2730 drivers/gpu/drm/radeon/r100.c 		rdev->mc.vram_width = 128;
mc               2777 drivers/gpu/drm/radeon/r100.c 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
mc               2778 drivers/gpu/drm/radeon/r100.c 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
mc               2779 drivers/gpu/drm/radeon/r100.c 	rdev->mc.visible_vram_size = r100_get_accessible_vram(rdev);
mc               2781 drivers/gpu/drm/radeon/r100.c 	if (rdev->mc.visible_vram_size > rdev->mc.aper_size)
mc               2782 drivers/gpu/drm/radeon/r100.c 		rdev->mc.visible_vram_size = rdev->mc.aper_size;
mc               2788 drivers/gpu/drm/radeon/r100.c 		rdev->mc.real_vram_size = (((tom >> 16) - (tom & 0xffff) + 1) << 16);
mc               2789 drivers/gpu/drm/radeon/r100.c 		WREG32(RADEON_CONFIG_MEMSIZE, rdev->mc.real_vram_size);
mc               2790 drivers/gpu/drm/radeon/r100.c 		rdev->mc.mc_vram_size = rdev->mc.real_vram_size;
mc               2792 drivers/gpu/drm/radeon/r100.c 		rdev->mc.real_vram_size = RREG32(RADEON_CONFIG_MEMSIZE);
mc               2796 drivers/gpu/drm/radeon/r100.c 		if (rdev->mc.real_vram_size == 0) {
mc               2797 drivers/gpu/drm/radeon/r100.c 			rdev->mc.real_vram_size = 8192 * 1024;
mc               2798 drivers/gpu/drm/radeon/r100.c 			WREG32(RADEON_CONFIG_MEMSIZE, rdev->mc.real_vram_size);
mc               2803 drivers/gpu/drm/radeon/r100.c 		if (rdev->mc.aper_size > config_aper_size)
mc               2804 drivers/gpu/drm/radeon/r100.c 			config_aper_size = rdev->mc.aper_size;
mc               2806 drivers/gpu/drm/radeon/r100.c 		if (config_aper_size > rdev->mc.real_vram_size)
mc               2807 drivers/gpu/drm/radeon/r100.c 			rdev->mc.mc_vram_size = config_aper_size;
mc               2809 drivers/gpu/drm/radeon/r100.c 			rdev->mc.mc_vram_size = rdev->mc.real_vram_size;
mc               2833 drivers/gpu/drm/radeon/r100.c 	base = rdev->mc.aper_base;
mc               2836 drivers/gpu/drm/radeon/r100.c 	radeon_vram_location(rdev, &rdev->mc, base);
mc               2837 drivers/gpu/drm/radeon/r100.c 	rdev->mc.gtt_base_align = 0;
mc               2839 drivers/gpu/drm/radeon/r100.c 		radeon_gtt_location(rdev, &rdev->mc);
mc               3264 drivers/gpu/drm/radeon/r100.c 	temp = (rdev->mc.vram_width / 8) * (rdev->mc.vram_is_ddr ? 2 : 1);
mc               3396 drivers/gpu/drm/radeon/r100.c 			if (rdev->mc.vram_is_ddr)
mc               3401 drivers/gpu/drm/radeon/r100.c 			if (rdev->mc.vram_width == 128)
mc               3410 drivers/gpu/drm/radeon/r100.c 	if (rdev->mc.vram_is_ddr) {
mc               3411 drivers/gpu/drm/radeon/r100.c 		if (rdev->mc.vram_width == 32) {
mc               3438 drivers/gpu/drm/radeon/r100.c 	temp_ff.full = dfixed_const((2 * (cur_size - (rdev->mc.vram_is_ddr + 1))));
mc               3579 drivers/gpu/drm/radeon/r100.c 			temp = (rdev->mc.vram_width * rdev->mc.vram_is_ddr + 1)/128;
mc               3816 drivers/gpu/drm/radeon/r100.c 	WREG32(R_00023C_DISPLAY_BASE_ADDR, rdev->mc.vram_start);
mc               3818 drivers/gpu/drm/radeon/r100.c 		WREG32(R_00033C_CRTC2_DISPLAY_BASE_ADDR, rdev->mc.vram_start);
mc               3854 drivers/gpu/drm/radeon/r100.c 			S_00014C_MC_AGP_START(rdev->mc.gtt_start >> 16) |
mc               3855 drivers/gpu/drm/radeon/r100.c 			S_00014C_MC_AGP_TOP(rdev->mc.gtt_end >> 16));
mc               3856 drivers/gpu/drm/radeon/r100.c 		WREG32(R_000170_AGP_BASE, lower_32_bits(rdev->mc.agp_base));
mc               3859 drivers/gpu/drm/radeon/r100.c 				upper_32_bits(rdev->mc.agp_base) & 0xff);
mc               3871 drivers/gpu/drm/radeon/r100.c 		S_000148_MC_FB_START(rdev->mc.vram_start >> 16) |
mc               3872 drivers/gpu/drm/radeon/r100.c 		S_000148_MC_FB_TOP(rdev->mc.vram_end >> 16));
mc                169 drivers/gpu/drm/radeon/r300.c 	WREG32_PCIE(RADEON_PCIE_TX_GART_START_LO, rdev->mc.gtt_start);
mc                170 drivers/gpu/drm/radeon/r300.c 	tmp = rdev->mc.gtt_end & ~RADEON_GPU_PAGE_MASK;
mc                177 drivers/gpu/drm/radeon/r300.c 	WREG32_PCIE(RADEON_PCIE_TX_DISCARD_RD_ADDR_LO, rdev->mc.vram_start);
mc                187 drivers/gpu/drm/radeon/r300.c 		 (unsigned)(rdev->mc.gtt_size >> 20),
mc                482 drivers/gpu/drm/radeon/r300.c 	rdev->mc.vram_is_ddr = true;
mc                486 drivers/gpu/drm/radeon/r300.c 	case 0: rdev->mc.vram_width = 64; break;
mc                487 drivers/gpu/drm/radeon/r300.c 	case 1: rdev->mc.vram_width = 128; break;
mc                488 drivers/gpu/drm/radeon/r300.c 	case 2: rdev->mc.vram_width = 256; break;
mc                489 drivers/gpu/drm/radeon/r300.c 	default:  rdev->mc.vram_width = 128; break;
mc                492 drivers/gpu/drm/radeon/r300.c 	base = rdev->mc.aper_base;
mc                495 drivers/gpu/drm/radeon/r300.c 	radeon_vram_location(rdev, &rdev->mc, base);
mc                496 drivers/gpu/drm/radeon/r300.c 	rdev->mc.gtt_base_align = 0;
mc                498 drivers/gpu/drm/radeon/r300.c 		radeon_gtt_location(rdev, &rdev->mc);
mc               1344 drivers/gpu/drm/radeon/r300.c 			S_00014C_MC_AGP_START(rdev->mc.gtt_start >> 16) |
mc               1345 drivers/gpu/drm/radeon/r300.c 			S_00014C_MC_AGP_TOP(rdev->mc.gtt_end >> 16));
mc               1346 drivers/gpu/drm/radeon/r300.c 		WREG32(R_000170_AGP_BASE, lower_32_bits(rdev->mc.agp_base));
mc               1348 drivers/gpu/drm/radeon/r300.c 			upper_32_bits(rdev->mc.agp_base) & 0xff);
mc               1359 drivers/gpu/drm/radeon/r300.c 		S_000148_MC_FB_START(rdev->mc.vram_start >> 16) |
mc               1360 drivers/gpu/drm/radeon/r300.c 		S_000148_MC_FB_TOP(rdev->mc.vram_end >> 16));
mc                 97 drivers/gpu/drm/radeon/r520.c 	rdev->mc.vram_width = 128;
mc                 98 drivers/gpu/drm/radeon/r520.c 	rdev->mc.vram_is_ddr = true;
mc                102 drivers/gpu/drm/radeon/r520.c 		rdev->mc.vram_width = 32;
mc                105 drivers/gpu/drm/radeon/r520.c 		rdev->mc.vram_width = 64;
mc                108 drivers/gpu/drm/radeon/r520.c 		rdev->mc.vram_width = 128;
mc                111 drivers/gpu/drm/radeon/r520.c 		rdev->mc.vram_width = 256;
mc                114 drivers/gpu/drm/radeon/r520.c 		rdev->mc.vram_width = 128;
mc                118 drivers/gpu/drm/radeon/r520.c 		rdev->mc.vram_width *= 2;
mc                126 drivers/gpu/drm/radeon/r520.c 	radeon_vram_location(rdev, &rdev->mc, 0);
mc                127 drivers/gpu/drm/radeon/r520.c 	rdev->mc.gtt_base_align = 0;
mc                129 drivers/gpu/drm/radeon/r520.c 		radeon_gtt_location(rdev, &rdev->mc);
mc                144 drivers/gpu/drm/radeon/r520.c 	WREG32(R_0000F8_CONFIG_MEMSIZE, rdev->mc.real_vram_size);
mc                147 drivers/gpu/drm/radeon/r520.c 			S_000004_MC_FB_START(rdev->mc.vram_start >> 16) |
mc                148 drivers/gpu/drm/radeon/r520.c 			S_000004_MC_FB_TOP(rdev->mc.vram_end >> 16));
mc                150 drivers/gpu/drm/radeon/r520.c 		S_000134_HDP_FB_START(rdev->mc.vram_start >> 16));
mc                153 drivers/gpu/drm/radeon/r520.c 			S_000005_MC_AGP_START(rdev->mc.gtt_start >> 16) |
mc                154 drivers/gpu/drm/radeon/r520.c 			S_000005_MC_AGP_TOP(rdev->mc.gtt_end >> 16));
mc                155 drivers/gpu/drm/radeon/r520.c 		WREG32_MC(R_000006_AGP_BASE, lower_32_bits(rdev->mc.agp_base));
mc                157 drivers/gpu/drm/radeon/r520.c 			S_000007_AGP_BASE_ADDR_2(upper_32_bits(rdev->mc.agp_base)));
mc               1095 drivers/gpu/drm/radeon/r600.c 	WREG32(VM_CONTEXT0_INVALIDATION_LOW_ADDR, rdev->mc.gtt_start >> 12);
mc               1096 drivers/gpu/drm/radeon/r600.c 	WREG32(VM_CONTEXT0_INVALIDATION_HIGH_ADDR, (rdev->mc.gtt_end - 1) >> 12);
mc               1169 drivers/gpu/drm/radeon/r600.c 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
mc               1170 drivers/gpu/drm/radeon/r600.c 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
mc               1181 drivers/gpu/drm/radeon/r600.c 		 (unsigned)(rdev->mc.gtt_size >> 20),
mc               1327 drivers/gpu/drm/radeon/r600.c 		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
mc               1330 drivers/gpu/drm/radeon/r600.c 				rdev->mc.vram_start >> 12);
mc               1332 drivers/gpu/drm/radeon/r600.c 				rdev->mc.gtt_end >> 12);
mc               1336 drivers/gpu/drm/radeon/r600.c 				rdev->mc.gtt_start >> 12);
mc               1338 drivers/gpu/drm/radeon/r600.c 				rdev->mc.vram_end >> 12);
mc               1341 drivers/gpu/drm/radeon/r600.c 		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR, rdev->mc.vram_start >> 12);
mc               1342 drivers/gpu/drm/radeon/r600.c 		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR, rdev->mc.vram_end >> 12);
mc               1345 drivers/gpu/drm/radeon/r600.c 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
mc               1346 drivers/gpu/drm/radeon/r600.c 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
mc               1348 drivers/gpu/drm/radeon/r600.c 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
mc               1352 drivers/gpu/drm/radeon/r600.c 		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 22);
mc               1353 drivers/gpu/drm/radeon/r600.c 		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 22);
mc               1354 drivers/gpu/drm/radeon/r600.c 		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
mc               1390 drivers/gpu/drm/radeon/r600.c static void r600_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc)
mc               1394 drivers/gpu/drm/radeon/r600.c 	if (mc->mc_vram_size > 0xE0000000) {
mc               1397 drivers/gpu/drm/radeon/r600.c 		mc->real_vram_size = 0xE0000000;
mc               1398 drivers/gpu/drm/radeon/r600.c 		mc->mc_vram_size = 0xE0000000;
mc               1401 drivers/gpu/drm/radeon/r600.c 		size_bf = mc->gtt_start;
mc               1402 drivers/gpu/drm/radeon/r600.c 		size_af = mc->mc_mask - mc->gtt_end;
mc               1404 drivers/gpu/drm/radeon/r600.c 			if (mc->mc_vram_size > size_bf) {
mc               1406 drivers/gpu/drm/radeon/r600.c 				mc->real_vram_size = size_bf;
mc               1407 drivers/gpu/drm/radeon/r600.c 				mc->mc_vram_size = size_bf;
mc               1409 drivers/gpu/drm/radeon/r600.c 			mc->vram_start = mc->gtt_start - mc->mc_vram_size;
mc               1411 drivers/gpu/drm/radeon/r600.c 			if (mc->mc_vram_size > size_af) {
mc               1413 drivers/gpu/drm/radeon/r600.c 				mc->real_vram_size = size_af;
mc               1414 drivers/gpu/drm/radeon/r600.c 				mc->mc_vram_size = size_af;
mc               1416 drivers/gpu/drm/radeon/r600.c 			mc->vram_start = mc->gtt_end + 1;
mc               1418 drivers/gpu/drm/radeon/r600.c 		mc->vram_end = mc->vram_start + mc->mc_vram_size - 1;
mc               1420 drivers/gpu/drm/radeon/r600.c 				mc->mc_vram_size >> 20, mc->vram_start,
mc               1421 drivers/gpu/drm/radeon/r600.c 				mc->vram_end, mc->real_vram_size >> 20);
mc               1428 drivers/gpu/drm/radeon/r600.c 		radeon_vram_location(rdev, &rdev->mc, base);
mc               1429 drivers/gpu/drm/radeon/r600.c 		rdev->mc.gtt_base_align = 0;
mc               1430 drivers/gpu/drm/radeon/r600.c 		radeon_gtt_location(rdev, mc);
mc               1442 drivers/gpu/drm/radeon/r600.c 	rdev->mc.vram_is_ddr = true;
mc               1467 drivers/gpu/drm/radeon/r600.c 	rdev->mc.vram_width = numchan * chansize;
mc               1469 drivers/gpu/drm/radeon/r600.c 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
mc               1470 drivers/gpu/drm/radeon/r600.c 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
mc               1472 drivers/gpu/drm/radeon/r600.c 	rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
mc               1473 drivers/gpu/drm/radeon/r600.c 	rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
mc               1474 drivers/gpu/drm/radeon/r600.c 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
mc               1475 drivers/gpu/drm/radeon/r600.c 	r600_vram_gtt_location(rdev, &rdev->mc);
mc               1479 drivers/gpu/drm/radeon/r600.c 		rdev->mc.igp_sideport_enabled = radeon_atombios_sideport_present(rdev);
mc               1488 drivers/gpu/drm/radeon/r600.c 			if (k8_addr + rdev->mc.visible_vram_size < 0x100000000ULL)
mc               1494 drivers/gpu/drm/radeon/r600.c 				if (rdev->mc.igp_sideport_enabled == false && radeon_fastfb == 1) {
mc               1496 drivers/gpu/drm/radeon/r600.c 						(unsigned long long)rdev->mc.aper_base, k8_addr);
mc               1497 drivers/gpu/drm/radeon/r600.c 					rdev->mc.aper_base = (resource_size_t)k8_addr;
mc               2427 drivers/gpu/drm/radeon/r600.c 		radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
mc               2774 drivers/gpu/drm/radeon/r600.c 		radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
mc                104 drivers/gpu/drm/radeon/r600_dma.c 		radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
mc                181 drivers/gpu/drm/radeon/r600_dma.c 		radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
mc               2366 drivers/gpu/drm/radeon/radeon.h 	struct radeon_mc		mc;
mc               2817 drivers/gpu/drm/radeon/radeon.h extern void radeon_vram_location(struct radeon_device *rdev, struct radeon_mc *mc, u64 base);
mc               2818 drivers/gpu/drm/radeon/radeon.h extern void radeon_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
mc                248 drivers/gpu/drm/radeon/radeon_agp.c 	rdev->mc.agp_base = rdev->ddev->agp->agp_info.aper_base;
mc                249 drivers/gpu/drm/radeon/radeon_agp.c 	rdev->mc.gtt_size = rdev->ddev->agp->agp_info.aper_size << 20;
mc                250 drivers/gpu/drm/radeon/radeon_agp.c 	rdev->mc.gtt_start = rdev->mc.agp_base;
mc                251 drivers/gpu/drm/radeon/radeon_agp.c 	rdev->mc.gtt_end = rdev->mc.gtt_start + rdev->mc.gtt_size - 1;
mc                253 drivers/gpu/drm/radeon/radeon_agp.c 		rdev->mc.gtt_size >> 20, rdev->mc.gtt_start, rdev->mc.gtt_end);
mc                177 drivers/gpu/drm/radeon/radeon_asic.c 	rdev->mc.gtt_size = radeon_gart_size * 1024 * 1024;
mc                470 drivers/gpu/drm/radeon/radeon_asic.h void r700_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
mc                620 drivers/gpu/drm/radeon/radeon_clocks.c 					if (rdev->mc.vram_width == 64) {
mc                566 drivers/gpu/drm/radeon/radeon_device.c void radeon_vram_location(struct radeon_device *rdev, struct radeon_mc *mc, u64 base)
mc                570 drivers/gpu/drm/radeon/radeon_device.c 	mc->vram_start = base;
mc                571 drivers/gpu/drm/radeon/radeon_device.c 	if (mc->mc_vram_size > (rdev->mc.mc_mask - base + 1)) {
mc                573 drivers/gpu/drm/radeon/radeon_device.c 		mc->real_vram_size = mc->aper_size;
mc                574 drivers/gpu/drm/radeon/radeon_device.c 		mc->mc_vram_size = mc->aper_size;
mc                576 drivers/gpu/drm/radeon/radeon_device.c 	mc->vram_end = mc->vram_start + mc->mc_vram_size - 1;
mc                577 drivers/gpu/drm/radeon/radeon_device.c 	if (rdev->flags & RADEON_IS_AGP && mc->vram_end > mc->gtt_start && mc->vram_start <= mc->gtt_end) {
mc                579 drivers/gpu/drm/radeon/radeon_device.c 		mc->real_vram_size = mc->aper_size;
mc                580 drivers/gpu/drm/radeon/radeon_device.c 		mc->mc_vram_size = mc->aper_size;
mc                582 drivers/gpu/drm/radeon/radeon_device.c 	mc->vram_end = mc->vram_start + mc->mc_vram_size - 1;
mc                583 drivers/gpu/drm/radeon/radeon_device.c 	if (limit && limit < mc->real_vram_size)
mc                584 drivers/gpu/drm/radeon/radeon_device.c 		mc->real_vram_size = limit;
mc                586 drivers/gpu/drm/radeon/radeon_device.c 			mc->mc_vram_size >> 20, mc->vram_start,
mc                587 drivers/gpu/drm/radeon/radeon_device.c 			mc->vram_end, mc->real_vram_size >> 20);
mc                602 drivers/gpu/drm/radeon/radeon_device.c void radeon_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc)
mc                606 drivers/gpu/drm/radeon/radeon_device.c 	size_af = ((rdev->mc.mc_mask - mc->vram_end) + mc->gtt_base_align) & ~mc->gtt_base_align;
mc                607 drivers/gpu/drm/radeon/radeon_device.c 	size_bf = mc->vram_start & ~mc->gtt_base_align;
mc                609 drivers/gpu/drm/radeon/radeon_device.c 		if (mc->gtt_size > size_bf) {
mc                611 drivers/gpu/drm/radeon/radeon_device.c 			mc->gtt_size = size_bf;
mc                613 drivers/gpu/drm/radeon/radeon_device.c 		mc->gtt_start = (mc->vram_start & ~mc->gtt_base_align) - mc->gtt_size;
mc                615 drivers/gpu/drm/radeon/radeon_device.c 		if (mc->gtt_size > size_af) {
mc                617 drivers/gpu/drm/radeon/radeon_device.c 			mc->gtt_size = size_af;
mc                619 drivers/gpu/drm/radeon/radeon_device.c 		mc->gtt_start = (mc->vram_end + 1 + mc->gtt_base_align) & ~mc->gtt_base_align;
mc                621 drivers/gpu/drm/radeon/radeon_device.c 	mc->gtt_end = mc->gtt_start + mc->gtt_size - 1;
mc                623 drivers/gpu/drm/radeon/radeon_device.c 			mc->gtt_size >> 20, mc->gtt_start, mc->gtt_end);
mc               1146 drivers/gpu/drm/radeon/radeon_device.c 	rdev->mc.gtt_size = (uint64_t)radeon_gart_size << 20;
mc               1304 drivers/gpu/drm/radeon/radeon_device.c 	rdev->mc.gtt_size = 512 * 1024 * 1024;
mc               1360 drivers/gpu/drm/radeon/radeon_device.c 		rdev->mc.mc_mask = 0xffffffffffULL; /* 40 bit MC */
mc               1362 drivers/gpu/drm/radeon/radeon_device.c 		rdev->mc.mc_mask = 0xfffffffffULL; /* 36 bit MC */
mc               1364 drivers/gpu/drm/radeon/radeon_device.c 		rdev->mc.mc_mask = 0xffffffffULL; /* 32 bit MC */
mc               1604 drivers/gpu/drm/radeon/radeon_display.c 	rdev->ddev->mode_config.fb_base = rdev->mc.aper_base;
mc                268 drivers/gpu/drm/radeon/radeon_fb.c 	tmp = radeon_bo_gpu_offset(rbo) - rdev->mc.vram_start;
mc                269 drivers/gpu/drm/radeon/radeon_fb.c 	info->fix.smem_start = rdev->mc.aper_base + tmp;
mc                278 drivers/gpu/drm/radeon/radeon_fb.c 	info->apertures->ranges[0].size = rdev->mc.aper_size;
mc                288 drivers/gpu/drm/radeon/radeon_fb.c 	DRM_INFO("vram apper at 0x%lX\n",  (unsigned long)rdev->mc.aper_base);
mc                341 drivers/gpu/drm/radeon/radeon_fb.c 	if (rdev->mc.real_vram_size <= (8*1024*1024))
mc                344 drivers/gpu/drm/radeon/radeon_fb.c 		 rdev->mc.real_vram_size <= (32*1024*1024))
mc                348 drivers/gpu/drm/radeon/radeon_gart.c 	rdev->gart.num_cpu_pages = rdev->mc.gtt_size / PAGE_SIZE;
mc                349 drivers/gpu/drm/radeon/radeon_gart.c 	rdev->gart.num_gpu_pages = rdev->mc.gtt_size / RADEON_GPU_PAGE_SIZE;
mc                 65 drivers/gpu/drm/radeon/radeon_gem.c 	max_size = rdev->mc.gtt_size - rdev->gart_pin_size;
mc                231 drivers/gpu/drm/radeon/radeon_gem.c 	args->vram_visible = rdev->mc.visible_vram_size;
mc                233 drivers/gpu/drm/radeon/radeon_gem.c 	args->gart_size = rdev->mc.gtt_size;
mc                471 drivers/gpu/drm/radeon/radeon_legacy_crtc.c 	radeon_crtc->legacy_display_base_addr = rdev->mc.vram_start;
mc                112 drivers/gpu/drm/radeon/radeon_object.c 		    rbo->rdev->mc.visible_vram_size < rbo->rdev->mc.real_vram_size) {
mc                114 drivers/gpu/drm/radeon/radeon_object.c 				rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT;
mc                177 drivers/gpu/drm/radeon/radeon_object.c 				rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT;
mc                346 drivers/gpu/drm/radeon/radeon_object.c 				domain_start = bo->rdev->mc.vram_start;
mc                348 drivers/gpu/drm/radeon/radeon_object.c 				domain_start = bo->rdev->mc.gtt_start;
mc                365 drivers/gpu/drm/radeon/radeon_object.c 		    (!max_offset || max_offset > bo->rdev->mc.visible_vram_size))
mc                367 drivers/gpu/drm/radeon/radeon_object.c 				bo->rdev->mc.visible_vram_size >> PAGE_SHIFT;
mc                427 drivers/gpu/drm/radeon/radeon_object.c 		if (rdev->mc.igp_sideport_enabled == false)
mc                458 drivers/gpu/drm/radeon/radeon_object.c 	arch_io_reserve_memtype_wc(rdev->mc.aper_base,
mc                459 drivers/gpu/drm/radeon/radeon_object.c 				   rdev->mc.aper_size);
mc                463 drivers/gpu/drm/radeon/radeon_object.c 		rdev->mc.vram_mtrr = arch_phys_wc_add(rdev->mc.aper_base,
mc                464 drivers/gpu/drm/radeon/radeon_object.c 						      rdev->mc.aper_size);
mc                467 drivers/gpu/drm/radeon/radeon_object.c 		rdev->mc.mc_vram_size >> 20,
mc                468 drivers/gpu/drm/radeon/radeon_object.c 		(unsigned long long)rdev->mc.aper_size >> 20);
mc                470 drivers/gpu/drm/radeon/radeon_object.c 			rdev->mc.vram_width, rdev->mc.vram_is_ddr ? 'D' : 'S');
mc                477 drivers/gpu/drm/radeon/radeon_object.c 	arch_phys_wc_del(rdev->mc.vram_mtrr);
mc                478 drivers/gpu/drm/radeon/radeon_object.c 	arch_io_free_memtype_wc(rdev->mc.aper_base, rdev->mc.aper_size);
mc                485 drivers/gpu/drm/radeon/radeon_object.c 	u64 real_vram_size = rdev->mc.real_vram_size;
mc                815 drivers/gpu/drm/radeon/radeon_object.c 	if ((offset + size) <= rdev->mc.visible_vram_size)
mc                824 drivers/gpu/drm/radeon/radeon_object.c 	lpfn =	rdev->mc.visible_vram_size >> PAGE_SHIFT;
mc                841 drivers/gpu/drm/radeon/radeon_object.c 	if ((offset + size) > rdev->mc.visible_vram_size)
mc                 60 drivers/gpu/drm/radeon/radeon_test.c 	n = rdev->mc.gtt_size - rdev->gart_pin_size;
mc                158 drivers/gpu/drm/radeon/radeon_test.c 					  (gtt_addr - rdev->mc.gtt_start +
mc                161 drivers/gpu/drm/radeon/radeon_test.c 					  (vram_addr - rdev->mc.vram_start +
mc                209 drivers/gpu/drm/radeon/radeon_test.c 					  (vram_addr - rdev->mc.vram_start +
mc                212 drivers/gpu/drm/radeon/radeon_test.c 					  (gtt_addr - rdev->mc.gtt_start +
mc                222 drivers/gpu/drm/radeon/radeon_test.c 			 gtt_addr - rdev->mc.gtt_start);
mc                 90 drivers/gpu/drm/radeon/radeon_ttm.c 		man->gpu_offset = rdev->mc.gtt_start;
mc                112 drivers/gpu/drm/radeon/radeon_ttm.c 		man->gpu_offset = rdev->mc.vram_start;
mc                148 drivers/gpu/drm/radeon/radeon_ttm.c 		else if (rbo->rdev->mc.visible_vram_size < rbo->rdev->mc.real_vram_size &&
mc                149 drivers/gpu/drm/radeon/radeon_ttm.c 			 bo->mem.start < (rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT)) {
mc                150 drivers/gpu/drm/radeon/radeon_ttm.c 			unsigned fpfn = rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT;
mc                219 drivers/gpu/drm/radeon/radeon_ttm.c 		old_start += rdev->mc.vram_start;
mc                222 drivers/gpu/drm/radeon/radeon_ttm.c 		old_start += rdev->mc.gtt_start;
mc                230 drivers/gpu/drm/radeon/radeon_ttm.c 		new_start += rdev->mc.vram_start;
mc                233 drivers/gpu/drm/radeon/radeon_ttm.c 		new_start += rdev->mc.gtt_start;
mc                423 drivers/gpu/drm/radeon/radeon_ttm.c 			mem->bus.base = rdev->mc.agp_base;
mc                431 drivers/gpu/drm/radeon/radeon_ttm.c 		if ((mem->bus.offset + mem->bus.size) > rdev->mc.visible_vram_size)
mc                433 drivers/gpu/drm/radeon/radeon_ttm.c 		mem->bus.base = rdev->mc.aper_base;
mc                804 drivers/gpu/drm/radeon/radeon_ttm.c 				rdev->mc.real_vram_size >> PAGE_SHIFT);
mc                810 drivers/gpu/drm/radeon/radeon_ttm.c 	radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
mc                828 drivers/gpu/drm/radeon/radeon_ttm.c 		 (unsigned) (rdev->mc.real_vram_size / (1024 * 1024)));
mc                830 drivers/gpu/drm/radeon/radeon_ttm.c 				rdev->mc.gtt_size >> PAGE_SHIFT);
mc                836 drivers/gpu/drm/radeon/radeon_ttm.c 		 (unsigned)(rdev->mc.gtt_size / (1024 * 1024)));
mc                956 drivers/gpu/drm/radeon/radeon_ttm.c 	i_size_write(inode, rdev->mc.mc_vram_size);
mc                975 drivers/gpu/drm/radeon/radeon_ttm.c 		if (*pos >= rdev->mc.mc_vram_size)
mc               1008 drivers/gpu/drm/radeon/radeon_ttm.c 	i_size_write(inode, rdev->mc.gtt_size);
mc                212 drivers/gpu/drm/radeon/radeon_ucode.h 	struct mc_firmware_header_v1_0 mc;
mc                 46 drivers/gpu/drm/radeon/rs400.c 	switch (rdev->mc.gtt_size/(1024*1024)) {
mc                 57 drivers/gpu/drm/radeon/rs400.c 			  (unsigned)(rdev->mc.gtt_size >> 20));
mc                 60 drivers/gpu/drm/radeon/rs400.c 		rdev->mc.gtt_size = 32 * 1024 * 1024;
mc                 90 drivers/gpu/drm/radeon/rs400.c 	switch(rdev->mc.gtt_size / (1024 * 1024)) {
mc                121 drivers/gpu/drm/radeon/rs400.c 	switch(rdev->mc.gtt_size / (1024 * 1024)) {
mc                154 drivers/gpu/drm/radeon/rs400.c 	tmp = REG_SET(RS690_MC_AGP_TOP, rdev->mc.gtt_end >> 16);
mc                155 drivers/gpu/drm/radeon/rs400.c 	tmp |= REG_SET(RS690_MC_AGP_START, rdev->mc.gtt_start >> 16);
mc                193 drivers/gpu/drm/radeon/rs400.c 		 (unsigned)(rdev->mc.gtt_size >> 20),
mc                273 drivers/gpu/drm/radeon/rs400.c 	rdev->mc.igp_sideport_enabled = radeon_combios_sideport_present(rdev);
mc                275 drivers/gpu/drm/radeon/rs400.c 	rdev->mc.vram_is_ddr = true;
mc                276 drivers/gpu/drm/radeon/rs400.c 	rdev->mc.vram_width = 128;
mc                279 drivers/gpu/drm/radeon/rs400.c 	radeon_vram_location(rdev, &rdev->mc, base);
mc                280 drivers/gpu/drm/radeon/rs400.c 	rdev->mc.gtt_base_align = rdev->mc.gtt_size - 1;
mc                281 drivers/gpu/drm/radeon/rs400.c 	radeon_gtt_location(rdev, &rdev->mc);
mc                404 drivers/gpu/drm/radeon/rs400.c 		S_000148_MC_FB_START(rdev->mc.vram_start >> 16) |
mc                405 drivers/gpu/drm/radeon/rs400.c 		S_000148_MC_FB_TOP(rdev->mc.vram_end >> 16));
mc                599 drivers/gpu/drm/radeon/rs600.c 	WREG32_MC(R_00013C_MC_PT0_CONTEXT0_FLAT_START_ADDR, rdev->mc.gtt_start);
mc                600 drivers/gpu/drm/radeon/rs600.c 	WREG32_MC(R_00014C_MC_PT0_CONTEXT0_FLAT_END_ADDR, rdev->mc.gtt_end);
mc                604 drivers/gpu/drm/radeon/rs600.c 	WREG32_MC(R_000112_MC_PT0_SYSTEM_APERTURE_LOW_ADDR, rdev->mc.vram_start);
mc                605 drivers/gpu/drm/radeon/rs600.c 	WREG32_MC(R_000114_MC_PT0_SYSTEM_APERTURE_HIGH_ADDR, rdev->mc.vram_end);
mc                614 drivers/gpu/drm/radeon/rs600.c 		 (unsigned)(rdev->mc.gtt_size >> 20),
mc                877 drivers/gpu/drm/radeon/rs600.c 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
mc                878 drivers/gpu/drm/radeon/rs600.c 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
mc                879 drivers/gpu/drm/radeon/rs600.c 	rdev->mc.vram_is_ddr = true;
mc                880 drivers/gpu/drm/radeon/rs600.c 	rdev->mc.vram_width = 128;
mc                881 drivers/gpu/drm/radeon/rs600.c 	rdev->mc.real_vram_size = RREG32(RADEON_CONFIG_MEMSIZE);
mc                882 drivers/gpu/drm/radeon/rs600.c 	rdev->mc.mc_vram_size = rdev->mc.real_vram_size;
mc                883 drivers/gpu/drm/radeon/rs600.c 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
mc                884 drivers/gpu/drm/radeon/rs600.c 	rdev->mc.igp_sideport_enabled = radeon_atombios_sideport_present(rdev);
mc                887 drivers/gpu/drm/radeon/rs600.c 	radeon_vram_location(rdev, &rdev->mc, base);
mc                888 drivers/gpu/drm/radeon/rs600.c 	rdev->mc.gtt_base_align = 0;
mc                889 drivers/gpu/drm/radeon/rs600.c 	radeon_gtt_location(rdev, &rdev->mc);
mc                977 drivers/gpu/drm/radeon/rs600.c 			S_000004_MC_FB_START(rdev->mc.vram_start >> 16) |
mc                978 drivers/gpu/drm/radeon/rs600.c 			S_000004_MC_FB_TOP(rdev->mc.vram_end >> 16));
mc                980 drivers/gpu/drm/radeon/rs600.c 		S_000134_HDP_FB_START(rdev->mc.vram_start >> 16));
mc                157 drivers/gpu/drm/radeon/rs690.c 	rdev->mc.vram_is_ddr = true;
mc                158 drivers/gpu/drm/radeon/rs690.c 	rdev->mc.vram_width = 128;
mc                159 drivers/gpu/drm/radeon/rs690.c 	rdev->mc.real_vram_size = RREG32(RADEON_CONFIG_MEMSIZE);
mc                160 drivers/gpu/drm/radeon/rs690.c 	rdev->mc.mc_vram_size = rdev->mc.real_vram_size;
mc                161 drivers/gpu/drm/radeon/rs690.c 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
mc                162 drivers/gpu/drm/radeon/rs690.c 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
mc                163 drivers/gpu/drm/radeon/rs690.c 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
mc                166 drivers/gpu/drm/radeon/rs690.c 	rdev->mc.igp_sideport_enabled = radeon_atombios_sideport_present(rdev);
mc                171 drivers/gpu/drm/radeon/rs690.c 	if (rdev->mc.igp_sideport_enabled &&
mc                172 drivers/gpu/drm/radeon/rs690.c 	    (rdev->mc.real_vram_size == (384 * 1024 * 1024))) {
mc                174 drivers/gpu/drm/radeon/rs690.c 		rdev->mc.real_vram_size -= 128 * 1024 * 1024;
mc                175 drivers/gpu/drm/radeon/rs690.c 		rdev->mc.mc_vram_size = rdev->mc.real_vram_size;
mc                184 drivers/gpu/drm/radeon/rs690.c 	if (k8_addr + rdev->mc.visible_vram_size < 0x100000000ULL)	
mc                190 drivers/gpu/drm/radeon/rs690.c 		if (rdev->mc.igp_sideport_enabled == false && radeon_fastfb == 1) {
mc                192 drivers/gpu/drm/radeon/rs690.c 					(unsigned long long)rdev->mc.aper_base, k8_addr);
mc                193 drivers/gpu/drm/radeon/rs690.c 			rdev->mc.aper_base = (resource_size_t)k8_addr;
mc                199 drivers/gpu/drm/radeon/rs690.c 	radeon_vram_location(rdev, &rdev->mc, base);
mc                200 drivers/gpu/drm/radeon/rs690.c 	rdev->mc.gtt_base_align = rdev->mc.gtt_size - 1;
mc                201 drivers/gpu/drm/radeon/rs690.c 	radeon_gtt_location(rdev, &rdev->mc);
mc                366 drivers/gpu/drm/radeon/rs690.c 	if (rdev->mc.igp_sideport_enabled) {
mc                688 drivers/gpu/drm/radeon/rs690.c 			S_000100_MC_FB_START(rdev->mc.vram_start >> 16) |
mc                689 drivers/gpu/drm/radeon/rs690.c 			S_000100_MC_FB_TOP(rdev->mc.vram_end >> 16));
mc                691 drivers/gpu/drm/radeon/rs690.c 		S_000134_HDP_FB_START(rdev->mc.vram_start >> 16));
mc                184 drivers/gpu/drm/radeon/rv515.c 	rdev->mc.vram_width = 128;
mc                185 drivers/gpu/drm/radeon/rv515.c 	rdev->mc.vram_is_ddr = true;
mc                189 drivers/gpu/drm/radeon/rv515.c 		rdev->mc.vram_width = 64;
mc                192 drivers/gpu/drm/radeon/rv515.c 		rdev->mc.vram_width = 128;
mc                195 drivers/gpu/drm/radeon/rv515.c 		rdev->mc.vram_width = 128;
mc                205 drivers/gpu/drm/radeon/rv515.c 	radeon_vram_location(rdev, &rdev->mc, 0);
mc                206 drivers/gpu/drm/radeon/rv515.c 	rdev->mc.gtt_base_align = 0;
mc                208 drivers/gpu/drm/radeon/rv515.c 		radeon_gtt_location(rdev, &rdev->mc);
mc                390 drivers/gpu/drm/radeon/rv515.c 				       upper_32_bits(rdev->mc.vram_start));
mc                392 drivers/gpu/drm/radeon/rv515.c 				       upper_32_bits(rdev->mc.vram_start));
mc                395 drivers/gpu/drm/radeon/rv515.c 				       upper_32_bits(rdev->mc.vram_start));
mc                397 drivers/gpu/drm/radeon/rv515.c 				       upper_32_bits(rdev->mc.vram_start));
mc                401 drivers/gpu/drm/radeon/rv515.c 		       (u32)rdev->mc.vram_start);
mc                403 drivers/gpu/drm/radeon/rv515.c 		       (u32)rdev->mc.vram_start);
mc                405 drivers/gpu/drm/radeon/rv515.c 	WREG32(R_000310_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
mc                481 drivers/gpu/drm/radeon/rv515.c 	WREG32(R_0000F8_CONFIG_MEMSIZE, rdev->mc.real_vram_size);
mc                484 drivers/gpu/drm/radeon/rv515.c 			S_000001_MC_FB_START(rdev->mc.vram_start >> 16) |
mc                485 drivers/gpu/drm/radeon/rv515.c 			S_000001_MC_FB_TOP(rdev->mc.vram_end >> 16));
mc                487 drivers/gpu/drm/radeon/rv515.c 		S_000134_HDP_FB_START(rdev->mc.vram_start >> 16));
mc                490 drivers/gpu/drm/radeon/rv515.c 			S_000002_MC_AGP_START(rdev->mc.gtt_start >> 16) |
mc                491 drivers/gpu/drm/radeon/rv515.c 			S_000002_MC_AGP_TOP(rdev->mc.gtt_end >> 16));
mc                492 drivers/gpu/drm/radeon/rv515.c 		WREG32_MC(R_000003_MC_AGP_BASE, lower_32_bits(rdev->mc.agp_base));
mc                494 drivers/gpu/drm/radeon/rv515.c 			S_000004_AGP_BASE_ADDR_2(upper_32_bits(rdev->mc.agp_base)));
mc                929 drivers/gpu/drm/radeon/rv770.c 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
mc                930 drivers/gpu/drm/radeon/rv770.c 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
mc                941 drivers/gpu/drm/radeon/rv770.c 		 (unsigned)(rdev->mc.gtt_size >> 20),
mc               1035 drivers/gpu/drm/radeon/rv770.c 		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
mc               1038 drivers/gpu/drm/radeon/rv770.c 				rdev->mc.vram_start >> 12);
mc               1040 drivers/gpu/drm/radeon/rv770.c 				rdev->mc.gtt_end >> 12);
mc               1044 drivers/gpu/drm/radeon/rv770.c 				rdev->mc.gtt_start >> 12);
mc               1046 drivers/gpu/drm/radeon/rv770.c 				rdev->mc.vram_end >> 12);
mc               1050 drivers/gpu/drm/radeon/rv770.c 			rdev->mc.vram_start >> 12);
mc               1052 drivers/gpu/drm/radeon/rv770.c 			rdev->mc.vram_end >> 12);
mc               1055 drivers/gpu/drm/radeon/rv770.c 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
mc               1056 drivers/gpu/drm/radeon/rv770.c 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
mc               1058 drivers/gpu/drm/radeon/rv770.c 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
mc               1062 drivers/gpu/drm/radeon/rv770.c 		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
mc               1063 drivers/gpu/drm/radeon/rv770.c 		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
mc               1064 drivers/gpu/drm/radeon/rv770.c 		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
mc               1086 drivers/gpu/drm/radeon/rv770.c 		radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
mc               1606 drivers/gpu/drm/radeon/rv770.c void r700_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc)
mc               1610 drivers/gpu/drm/radeon/rv770.c 	if (mc->mc_vram_size > 0xE0000000) {
mc               1613 drivers/gpu/drm/radeon/rv770.c 		mc->real_vram_size = 0xE0000000;
mc               1614 drivers/gpu/drm/radeon/rv770.c 		mc->mc_vram_size = 0xE0000000;
mc               1617 drivers/gpu/drm/radeon/rv770.c 		size_bf = mc->gtt_start;
mc               1618 drivers/gpu/drm/radeon/rv770.c 		size_af = mc->mc_mask - mc->gtt_end;
mc               1620 drivers/gpu/drm/radeon/rv770.c 			if (mc->mc_vram_size > size_bf) {
mc               1622 drivers/gpu/drm/radeon/rv770.c 				mc->real_vram_size = size_bf;
mc               1623 drivers/gpu/drm/radeon/rv770.c 				mc->mc_vram_size = size_bf;
mc               1625 drivers/gpu/drm/radeon/rv770.c 			mc->vram_start = mc->gtt_start - mc->mc_vram_size;
mc               1627 drivers/gpu/drm/radeon/rv770.c 			if (mc->mc_vram_size > size_af) {
mc               1629 drivers/gpu/drm/radeon/rv770.c 				mc->real_vram_size = size_af;
mc               1630 drivers/gpu/drm/radeon/rv770.c 				mc->mc_vram_size = size_af;
mc               1632 drivers/gpu/drm/radeon/rv770.c 			mc->vram_start = mc->gtt_end + 1;
mc               1634 drivers/gpu/drm/radeon/rv770.c 		mc->vram_end = mc->vram_start + mc->mc_vram_size - 1;
mc               1636 drivers/gpu/drm/radeon/rv770.c 				mc->mc_vram_size >> 20, mc->vram_start,
mc               1637 drivers/gpu/drm/radeon/rv770.c 				mc->vram_end, mc->real_vram_size >> 20);
mc               1639 drivers/gpu/drm/radeon/rv770.c 		radeon_vram_location(rdev, &rdev->mc, 0);
mc               1640 drivers/gpu/drm/radeon/rv770.c 		rdev->mc.gtt_base_align = 0;
mc               1641 drivers/gpu/drm/radeon/rv770.c 		radeon_gtt_location(rdev, mc);
mc               1651 drivers/gpu/drm/radeon/rv770.c 	rdev->mc.vram_is_ddr = true;
mc               1676 drivers/gpu/drm/radeon/rv770.c 	rdev->mc.vram_width = numchan * chansize;
mc               1678 drivers/gpu/drm/radeon/rv770.c 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
mc               1679 drivers/gpu/drm/radeon/rv770.c 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
mc               1681 drivers/gpu/drm/radeon/rv770.c 	rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
mc               1682 drivers/gpu/drm/radeon/rv770.c 	rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
mc               1683 drivers/gpu/drm/radeon/rv770.c 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
mc               1684 drivers/gpu/drm/radeon/rv770.c 	r700_vram_gtt_location(rdev, &rdev->mc);
mc               3470 drivers/gpu/drm/radeon/si.c 			radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
mc               3769 drivers/gpu/drm/radeon/si.c 		radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
mc               4166 drivers/gpu/drm/radeon/si.c 	       rdev->mc.vram_start >> 12);
mc               4168 drivers/gpu/drm/radeon/si.c 	       rdev->mc.vram_end >> 12);
mc               4171 drivers/gpu/drm/radeon/si.c 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
mc               4172 drivers/gpu/drm/radeon/si.c 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
mc               4175 drivers/gpu/drm/radeon/si.c 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
mc               4193 drivers/gpu/drm/radeon/si.c 			  struct radeon_mc *mc)
mc               4195 drivers/gpu/drm/radeon/si.c 	if (mc->mc_vram_size > 0xFFC0000000ULL) {
mc               4198 drivers/gpu/drm/radeon/si.c 		mc->real_vram_size = 0xFFC0000000ULL;
mc               4199 drivers/gpu/drm/radeon/si.c 		mc->mc_vram_size = 0xFFC0000000ULL;
mc               4201 drivers/gpu/drm/radeon/si.c 	radeon_vram_location(rdev, &rdev->mc, 0);
mc               4202 drivers/gpu/drm/radeon/si.c 	rdev->mc.gtt_base_align = 0;
mc               4203 drivers/gpu/drm/radeon/si.c 	radeon_gtt_location(rdev, mc);
mc               4212 drivers/gpu/drm/radeon/si.c 	rdev->mc.vram_is_ddr = true;
mc               4252 drivers/gpu/drm/radeon/si.c 	rdev->mc.vram_width = numchan * chansize;
mc               4254 drivers/gpu/drm/radeon/si.c 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
mc               4255 drivers/gpu/drm/radeon/si.c 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
mc               4264 drivers/gpu/drm/radeon/si.c 	rdev->mc.mc_vram_size = tmp * 1024ULL * 1024ULL;
mc               4265 drivers/gpu/drm/radeon/si.c 	rdev->mc.real_vram_size = rdev->mc.mc_vram_size;
mc               4266 drivers/gpu/drm/radeon/si.c 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
mc               4267 drivers/gpu/drm/radeon/si.c 	si_vram_gtt_location(rdev, &rdev->mc);
mc               4316 drivers/gpu/drm/radeon/si.c 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
mc               4317 drivers/gpu/drm/radeon/si.c 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
mc               4367 drivers/gpu/drm/radeon/si.c 		 (unsigned)(rdev->mc.gtt_size >> 20),
mc                390 drivers/infiniband/core/cma.c 	struct cma_multicast	*mc;
mc                521 drivers/infiniband/core/cma.c 	struct cma_multicast *mc = container_of(kref, struct cma_multicast, mcref);
mc                523 drivers/infiniband/core/cma.c 	kfree(mc->multicast.ib);
mc                524 drivers/infiniband/core/cma.c 	kfree(mc);
mc               1803 drivers/infiniband/core/cma.c 				    struct cma_multicast *mc)
mc               1811 drivers/infiniband/core/cma.c 		cma_igmp_send(ndev, &mc->multicast.ib->rec.mgid, false);
mc               1814 drivers/infiniband/core/cma.c 	kref_put(&mc->mcref, release_mc);
mc               1819 drivers/infiniband/core/cma.c 	struct cma_multicast *mc;
mc               1822 drivers/infiniband/core/cma.c 		mc = container_of(id_priv->mc_list.next,
mc               1824 drivers/infiniband/core/cma.c 		list_del(&mc->list);
mc               1827 drivers/infiniband/core/cma.c 			ib_sa_free_multicast(mc->multicast.ib);
mc               1828 drivers/infiniband/core/cma.c 			kfree(mc);
mc               1830 drivers/infiniband/core/cma.c 			cma_leave_roce_mc_group(id_priv, mc);
mc               4152 drivers/infiniband/core/cma.c 	struct cma_multicast *mc = multicast->context;
mc               4156 drivers/infiniband/core/cma.c 	id_priv = mc->id_priv;
mc               4178 drivers/infiniband/core/cma.c 	event.param.ud.private_data = mc->context;
mc               4250 drivers/infiniband/core/cma.c 				 struct cma_multicast *mc)
mc               4267 drivers/infiniband/core/cma.c 	cma_set_mgid(id_priv, (struct sockaddr *) &mc->addr, &rec.mgid);
mc               4271 drivers/infiniband/core/cma.c 	rec.join_state = mc->join_state;
mc               4297 drivers/infiniband/core/cma.c 	mc->multicast.ib = ib_sa_join_multicast(&sa_client, id_priv->id.device,
mc               4300 drivers/infiniband/core/cma.c 						cma_ib_mc_handler, mc);
mc               4301 drivers/infiniband/core/cma.c 	return PTR_ERR_OR_ZERO(mc->multicast.ib);
mc               4307 drivers/infiniband/core/cma.c 	struct cma_multicast *mc = mw->mc;
mc               4308 drivers/infiniband/core/cma.c 	struct ib_sa_multicast *m = mc->multicast.ib;
mc               4310 drivers/infiniband/core/cma.c 	mc->multicast.ib->context = mc;
mc               4312 drivers/infiniband/core/cma.c 	kref_put(&mc->mcref, release_mc);
mc               4346 drivers/infiniband/core/cma.c 				   struct cma_multicast *mc)
mc               4351 drivers/infiniband/core/cma.c 	struct sockaddr *addr = (struct sockaddr *)&mc->addr;
mc               4356 drivers/infiniband/core/cma.c 	send_only = mc->join_state == BIT(SENDONLY_FULLMEMBER_JOIN);
mc               4358 drivers/infiniband/core/cma.c 	if (cma_zero_addr((struct sockaddr *)&mc->addr))
mc               4365 drivers/infiniband/core/cma.c 	mc->multicast.ib = kzalloc(sizeof(struct ib_sa_multicast), GFP_KERNEL);
mc               4366 drivers/infiniband/core/cma.c 	if (!mc->multicast.ib) {
mc               4373 drivers/infiniband/core/cma.c 	cma_iboe_set_mgid(addr, &mc->multicast.ib->rec.mgid, gid_type);
mc               4375 drivers/infiniband/core/cma.c 	mc->multicast.ib->rec.pkey = cpu_to_be16(0xffff);
mc               4377 drivers/infiniband/core/cma.c 		mc->multicast.ib->rec.qkey = cpu_to_be32(RDMA_UDP_QKEY);
mc               4385 drivers/infiniband/core/cma.c 	mc->multicast.ib->rec.rate = iboe_get_rate(ndev);
mc               4386 drivers/infiniband/core/cma.c 	mc->multicast.ib->rec.hop_limit = 1;
mc               4387 drivers/infiniband/core/cma.c 	mc->multicast.ib->rec.mtu = iboe_get_mtu(ndev->mtu);
mc               4391 drivers/infiniband/core/cma.c 			mc->multicast.ib->rec.hop_limit = IPV6_DEFAULT_HOPLIMIT;
mc               4393 drivers/infiniband/core/cma.c 				err = cma_igmp_send(ndev, &mc->multicast.ib->rec.mgid,
mc               4402 drivers/infiniband/core/cma.c 	if (err || !mc->multicast.ib->rec.mtu) {
mc               4408 drivers/infiniband/core/cma.c 		    &mc->multicast.ib->rec.port_gid);
mc               4410 drivers/infiniband/core/cma.c 	work->mc = mc;
mc               4412 drivers/infiniband/core/cma.c 	kref_get(&mc->mcref);
mc               4418 drivers/infiniband/core/cma.c 	kfree(mc->multicast.ib);
mc               4428 drivers/infiniband/core/cma.c 	struct cma_multicast *mc;
mc               4439 drivers/infiniband/core/cma.c 	mc = kmalloc(sizeof *mc, GFP_KERNEL);
mc               4440 drivers/infiniband/core/cma.c 	if (!mc)
mc               4443 drivers/infiniband/core/cma.c 	memcpy(&mc->addr, addr, rdma_addr_size(addr));
mc               4444 drivers/infiniband/core/cma.c 	mc->context = context;
mc               4445 drivers/infiniband/core/cma.c 	mc->id_priv = id_priv;
mc               4446 drivers/infiniband/core/cma.c 	mc->join_state = join_state;
mc               4449 drivers/infiniband/core/cma.c 		kref_init(&mc->mcref);
mc               4450 drivers/infiniband/core/cma.c 		ret = cma_iboe_join_multicast(id_priv, mc);
mc               4454 drivers/infiniband/core/cma.c 		ret = cma_join_ib_multicast(id_priv, mc);
mc               4463 drivers/infiniband/core/cma.c 	list_add(&mc->list, &id_priv->mc_list);
mc               4468 drivers/infiniband/core/cma.c 	kfree(mc);
mc               4476 drivers/infiniband/core/cma.c 	struct cma_multicast *mc;
mc               4480 drivers/infiniband/core/cma.c 	list_for_each_entry(mc, &id_priv->mc_list, list) {
mc               4481 drivers/infiniband/core/cma.c 		if (!memcmp(&mc->addr, addr, rdma_addr_size(addr))) {
mc               4482 drivers/infiniband/core/cma.c 			list_del(&mc->list);
mc               4487 drivers/infiniband/core/cma.c 						&mc->multicast.ib->rec.mgid,
mc               4488 drivers/infiniband/core/cma.c 						be16_to_cpu(mc->multicast.ib->rec.mlid));
mc               4493 drivers/infiniband/core/cma.c 				ib_sa_free_multicast(mc->multicast.ib);
mc               4494 drivers/infiniband/core/cma.c 				kfree(mc);
mc               4496 drivers/infiniband/core/cma.c 				cma_leave_roce_mc_group(id_priv, mc);
mc                121 drivers/infiniband/core/ucma.c 	struct ucma_multicast	*mc;
mc                235 drivers/infiniband/core/ucma.c 	struct ucma_multicast *mc;
mc                237 drivers/infiniband/core/ucma.c 	mc = kzalloc(sizeof(*mc), GFP_KERNEL);
mc                238 drivers/infiniband/core/ucma.c 	if (!mc)
mc                241 drivers/infiniband/core/ucma.c 	mc->ctx = ctx;
mc                242 drivers/infiniband/core/ucma.c 	if (xa_alloc(&multicast_table, &mc->id, NULL, xa_limit_32b, GFP_KERNEL))
mc                245 drivers/infiniband/core/ucma.c 	list_add_tail(&mc->list, &ctx->mc_list);
mc                246 drivers/infiniband/core/ucma.c 	return mc;
mc                249 drivers/infiniband/core/ucma.c 	kfree(mc);
mc                290 drivers/infiniband/core/ucma.c 		uevent->mc = (struct ucma_multicast *)
mc                292 drivers/infiniband/core/ucma.c 		uevent->resp.uid = uevent->mc->uid;
mc                293 drivers/infiniband/core/ucma.c 		uevent->resp.id = uevent->mc->id;
mc                450 drivers/infiniband/core/ucma.c 	if (uevent->mc)
mc                451 drivers/infiniband/core/ucma.c 		uevent->mc->events_reported++;
mc                533 drivers/infiniband/core/ucma.c 	struct ucma_multicast *mc, *tmp;
mc                536 drivers/infiniband/core/ucma.c 	list_for_each_entry_safe(mc, tmp, &ctx->mc_list, list) {
mc                537 drivers/infiniband/core/ucma.c 		list_del(&mc->list);
mc                538 drivers/infiniband/core/ucma.c 		xa_erase(&multicast_table, mc->id);
mc                539 drivers/infiniband/core/ucma.c 		kfree(mc);
mc                544 drivers/infiniband/core/ucma.c static void ucma_cleanup_mc_events(struct ucma_multicast *mc)
mc                548 drivers/infiniband/core/ucma.c 	list_for_each_entry_safe(uevent, tmp, &mc->ctx->file->event_list, list) {
mc                549 drivers/infiniband/core/ucma.c 		if (uevent->mc != mc)
mc               1423 drivers/infiniband/core/ucma.c 	struct ucma_multicast *mc;
mc               1447 drivers/infiniband/core/ucma.c 	mc = ucma_alloc_multicast(ctx);
mc               1448 drivers/infiniband/core/ucma.c 	if (!mc) {
mc               1452 drivers/infiniband/core/ucma.c 	mc->join_state = join_state;
mc               1453 drivers/infiniband/core/ucma.c 	mc->uid = cmd->uid;
mc               1454 drivers/infiniband/core/ucma.c 	memcpy(&mc->addr, addr, cmd->addr_size);
mc               1456 drivers/infiniband/core/ucma.c 	ret = rdma_join_multicast(ctx->cm_id, (struct sockaddr *)&mc->addr,
mc               1457 drivers/infiniband/core/ucma.c 				  join_state, mc);
mc               1462 drivers/infiniband/core/ucma.c 	resp.id = mc->id;
mc               1469 drivers/infiniband/core/ucma.c 	xa_store(&multicast_table, mc->id, mc, 0);
mc               1476 drivers/infiniband/core/ucma.c 	rdma_leave_multicast(ctx->cm_id, (struct sockaddr *) &mc->addr);
mc               1477 drivers/infiniband/core/ucma.c 	ucma_cleanup_mc_events(mc);
mc               1479 drivers/infiniband/core/ucma.c 	xa_erase(&multicast_table, mc->id);
mc               1480 drivers/infiniband/core/ucma.c 	list_del(&mc->list);
mc               1481 drivers/infiniband/core/ucma.c 	kfree(mc);
mc               1532 drivers/infiniband/core/ucma.c 	struct ucma_multicast *mc;
mc               1542 drivers/infiniband/core/ucma.c 	mc = xa_load(&multicast_table, cmd.id);
mc               1543 drivers/infiniband/core/ucma.c 	if (!mc)
mc               1544 drivers/infiniband/core/ucma.c 		mc = ERR_PTR(-ENOENT);
mc               1545 drivers/infiniband/core/ucma.c 	else if (mc->ctx->file != file)
mc               1546 drivers/infiniband/core/ucma.c 		mc = ERR_PTR(-EINVAL);
mc               1547 drivers/infiniband/core/ucma.c 	else if (!atomic_inc_not_zero(&mc->ctx->ref))
mc               1548 drivers/infiniband/core/ucma.c 		mc = ERR_PTR(-ENXIO);
mc               1550 drivers/infiniband/core/ucma.c 		__xa_erase(&multicast_table, mc->id);
mc               1553 drivers/infiniband/core/ucma.c 	if (IS_ERR(mc)) {
mc               1554 drivers/infiniband/core/ucma.c 		ret = PTR_ERR(mc);
mc               1558 drivers/infiniband/core/ucma.c 	mutex_lock(&mc->ctx->mutex);
mc               1559 drivers/infiniband/core/ucma.c 	rdma_leave_multicast(mc->ctx->cm_id, (struct sockaddr *) &mc->addr);
mc               1560 drivers/infiniband/core/ucma.c 	mutex_unlock(&mc->ctx->mutex);
mc               1562 drivers/infiniband/core/ucma.c 	mutex_lock(&mc->ctx->file->mut);
mc               1563 drivers/infiniband/core/ucma.c 	ucma_cleanup_mc_events(mc);
mc               1564 drivers/infiniband/core/ucma.c 	list_del(&mc->list);
mc               1565 drivers/infiniband/core/ucma.c 	mutex_unlock(&mc->ctx->file->mut);
mc               1567 drivers/infiniband/core/ucma.c 	ucma_put_ctx(mc->ctx);
mc               1568 drivers/infiniband/core/ucma.c 	resp.events_reported = mc->events_reported;
mc               1569 drivers/infiniband/core/ucma.c 	kfree(mc);
mc                163 drivers/infiniband/ulp/ipoib/ipoib.h 	struct ib_sa_multicast	 *mc;
mc                459 drivers/infiniband/ulp/ipoib/ipoib_multicast.c 		mcast->mc = NULL;
mc                461 drivers/infiniband/ulp/ipoib/ipoib_multicast.c 		mcast->mc = multicast;
mc                623 drivers/infiniband/ulp/ipoib/ipoib_multicast.c 		if (IS_ERR_OR_NULL(priv->broadcast->mc) &&
mc                640 drivers/infiniband/ulp/ipoib/ipoib_multicast.c 		if (IS_ERR_OR_NULL(mcast->mc) &&
mc                704 drivers/infiniband/ulp/ipoib/ipoib_multicast.c 	if (!IS_ERR_OR_NULL(mcast->mc))
mc                705 drivers/infiniband/ulp/ipoib/ipoib_multicast.c 		ib_sa_free_multicast(mcast->mc);
mc                244 drivers/infiniband/ulp/opa_vnic/opa_vnic_netdev.c 		hw_list = &netdev->mc;
mc                331 drivers/iommu/tegra-gart.c struct gart_device *tegra_gart_probe(struct device *dev, struct tegra_mc *mc)
mc                353 drivers/iommu/tegra-gart.c 	gart->regs = mc->regs + GART_REG_BASE;
mc                 30 drivers/iommu/tegra-smmu.c 	struct tegra_mc *mc;
mc                177 drivers/iommu/tegra-smmu.c 	offset &= ~(smmu->mc->soc->atom_size - 1);
mc                179 drivers/iommu/tegra-smmu.c 	if (smmu->mc->soc->num_address_bits > 32) {
mc                720 drivers/iommu/tegra-smmu.c 	struct tegra_mc *mc;
mc                726 drivers/iommu/tegra-smmu.c 	mc = platform_get_drvdata(pdev);
mc                727 drivers/iommu/tegra-smmu.c 	if (!mc)
mc                730 drivers/iommu/tegra-smmu.c 	return mc->smmu;
mc                997 drivers/iommu/tegra-smmu.c 				    struct tegra_mc *mc)
mc               1016 drivers/iommu/tegra-smmu.c 	mc->smmu = smmu;
mc               1027 drivers/iommu/tegra-smmu.c 	smmu->regs = mc->regs;
mc               1030 drivers/iommu/tegra-smmu.c 	smmu->mc = mc;
mc               1032 drivers/iommu/tegra-smmu.c 	smmu->pfn_mask = BIT_MASK(mc->soc->num_address_bits - PAGE_SHIFT) - 1;
mc               1034 drivers/iommu/tegra-smmu.c 		mc->soc->num_address_bits, smmu->pfn_mask);
mc                 64 drivers/mailbox/mailbox-xgene-slimpro.c 	struct slimpro_mbox_chan	mc[MBOX_CNT];
mc                191 drivers/mailbox/mailbox-xgene-slimpro.c 		ctx->mc[i].irq = platform_get_irq(pdev, i);
mc                192 drivers/mailbox/mailbox-xgene-slimpro.c 		if (ctx->mc[i].irq < 0) {
mc                201 drivers/mailbox/mailbox-xgene-slimpro.c 		ctx->mc[i].dev = &pdev->dev;
mc                202 drivers/mailbox/mailbox-xgene-slimpro.c 		ctx->mc[i].reg = mb_base + i * MBOX_REG_SET_OFFSET;
mc                203 drivers/mailbox/mailbox-xgene-slimpro.c 		ctx->mc[i].chan = &ctx->chans[i];
mc                204 drivers/mailbox/mailbox-xgene-slimpro.c 		ctx->chans[i].con_priv = &ctx->mc[i];
mc               1558 drivers/media/dvb-frontends/stv0910.c 		u32 mc;
mc               1580 drivers/media/dvb-frontends/stv0910.c 		mc = ((tmp & 0x7c) >> 2);
mc               1582 drivers/media/dvb-frontends/stv0910.c 		p->modulation = modcod2mod[mc];
mc               1583 drivers/media/dvb-frontends/stv0910.c 		p->fec_inner = modcod2fec[mc];
mc                693 drivers/media/tuners/mt2063.c 	u32 ma, mb, mc, md, me, mf;
mc                731 drivers/media/tuners/mt2063.c 		mc = (n * ((f_LO1 + hgcs) / gc_Scale) -
mc                733 drivers/media/tuners/mt2063.c 		if (mc != md) {
mc                735 drivers/media/tuners/mt2063.c 			f_nsLO2 = (s32) (mc * (f_LO2 / gc_Scale));
mc                738 drivers/media/tuners/mt2063.c 			    n * (f_LO1 % gc_Scale) - mc * (f_LO2 % gc_Scale);
mc                740 drivers/media/tuners/mt2063.c 			*fp = ((f_Spur - (s32) c) / (mc - n)) + 1;
mc                741 drivers/media/tuners/mt2063.c 			*fm = (((s32) d - f_Spur) / (mc - n)) + 1;
mc                 77 drivers/memory/tegra/mc.c static int tegra_mc_block_dma_common(struct tegra_mc *mc,
mc                 83 drivers/memory/tegra/mc.c 	spin_lock_irqsave(&mc->lock, flags);
mc                 85 drivers/memory/tegra/mc.c 	value = mc_readl(mc, rst->control) | BIT(rst->bit);
mc                 86 drivers/memory/tegra/mc.c 	mc_writel(mc, value, rst->control);
mc                 88 drivers/memory/tegra/mc.c 	spin_unlock_irqrestore(&mc->lock, flags);
mc                 93 drivers/memory/tegra/mc.c static bool tegra_mc_dma_idling_common(struct tegra_mc *mc,
mc                 96 drivers/memory/tegra/mc.c 	return (mc_readl(mc, rst->status) & BIT(rst->bit)) != 0;
mc                 99 drivers/memory/tegra/mc.c static int tegra_mc_unblock_dma_common(struct tegra_mc *mc,
mc                105 drivers/memory/tegra/mc.c 	spin_lock_irqsave(&mc->lock, flags);
mc                107 drivers/memory/tegra/mc.c 	value = mc_readl(mc, rst->control) & ~BIT(rst->bit);
mc                108 drivers/memory/tegra/mc.c 	mc_writel(mc, value, rst->control);
mc                110 drivers/memory/tegra/mc.c 	spin_unlock_irqrestore(&mc->lock, flags);
mc                115 drivers/memory/tegra/mc.c static int tegra_mc_reset_status_common(struct tegra_mc *mc,
mc                118 drivers/memory/tegra/mc.c 	return (mc_readl(mc, rst->control) & BIT(rst->bit)) != 0;
mc                133 drivers/memory/tegra/mc.c static const struct tegra_mc_reset *tegra_mc_reset_find(struct tegra_mc *mc,
mc                138 drivers/memory/tegra/mc.c 	for (i = 0; i < mc->soc->num_resets; i++)
mc                139 drivers/memory/tegra/mc.c 		if (mc->soc->resets[i].id == id)
mc                140 drivers/memory/tegra/mc.c 			return &mc->soc->resets[i];
mc                148 drivers/memory/tegra/mc.c 	struct tegra_mc *mc = reset_to_mc(rcdev);
mc                154 drivers/memory/tegra/mc.c 	rst = tegra_mc_reset_find(mc, id);
mc                158 drivers/memory/tegra/mc.c 	rst_ops = mc->soc->reset_ops;
mc                164 drivers/memory/tegra/mc.c 		err = rst_ops->block_dma(mc, rst);
mc                166 drivers/memory/tegra/mc.c 			dev_err(mc->dev, "failed to block %s DMA: %d\n",
mc                174 drivers/memory/tegra/mc.c 		while (!rst_ops->dma_idling(mc, rst)) {
mc                176 drivers/memory/tegra/mc.c 				dev_err(mc->dev, "failed to flush %s DMA\n",
mc                187 drivers/memory/tegra/mc.c 		err = rst_ops->hotreset_assert(mc, rst);
mc                189 drivers/memory/tegra/mc.c 			dev_err(mc->dev, "failed to hot reset %s: %d\n",
mc                201 drivers/memory/tegra/mc.c 	struct tegra_mc *mc = reset_to_mc(rcdev);
mc                206 drivers/memory/tegra/mc.c 	rst = tegra_mc_reset_find(mc, id);
mc                210 drivers/memory/tegra/mc.c 	rst_ops = mc->soc->reset_ops;
mc                216 drivers/memory/tegra/mc.c 		err = rst_ops->hotreset_deassert(mc, rst);
mc                218 drivers/memory/tegra/mc.c 			dev_err(mc->dev, "failed to deassert hot reset %s: %d\n",
mc                226 drivers/memory/tegra/mc.c 		err = rst_ops->unblock_dma(mc, rst);
mc                228 drivers/memory/tegra/mc.c 			dev_err(mc->dev, "failed to unblock %s DMA : %d\n",
mc                240 drivers/memory/tegra/mc.c 	struct tegra_mc *mc = reset_to_mc(rcdev);
mc                244 drivers/memory/tegra/mc.c 	rst = tegra_mc_reset_find(mc, id);
mc                248 drivers/memory/tegra/mc.c 	rst_ops = mc->soc->reset_ops;
mc                252 drivers/memory/tegra/mc.c 	return rst_ops->reset_status(mc, rst);
mc                261 drivers/memory/tegra/mc.c static int tegra_mc_reset_setup(struct tegra_mc *mc)
mc                265 drivers/memory/tegra/mc.c 	mc->reset.ops = &tegra_mc_reset_ops;
mc                266 drivers/memory/tegra/mc.c 	mc->reset.owner = THIS_MODULE;
mc                267 drivers/memory/tegra/mc.c 	mc->reset.of_node = mc->dev->of_node;
mc                268 drivers/memory/tegra/mc.c 	mc->reset.of_reset_n_cells = 1;
mc                269 drivers/memory/tegra/mc.c 	mc->reset.nr_resets = mc->soc->num_resets;
mc                271 drivers/memory/tegra/mc.c 	err = reset_controller_register(&mc->reset);
mc                278 drivers/memory/tegra/mc.c static int tegra_mc_setup_latency_allowance(struct tegra_mc *mc)
mc                285 drivers/memory/tegra/mc.c 	tick = (unsigned long long)mc->tick * clk_get_rate(mc->clk);
mc                288 drivers/memory/tegra/mc.c 	value = mc_readl(mc, MC_EMEM_ARB_CFG);
mc                291 drivers/memory/tegra/mc.c 	mc_writel(mc, value, MC_EMEM_ARB_CFG);
mc                294 drivers/memory/tegra/mc.c 	for (i = 0; i < mc->soc->num_clients; i++) {
mc                295 drivers/memory/tegra/mc.c 		const struct tegra_mc_la *la = &mc->soc->clients[i].la;
mc                298 drivers/memory/tegra/mc.c 		value = mc_readl(mc, la->reg);
mc                301 drivers/memory/tegra/mc.c 		mc_writel(mc, value, la->reg);
mc                305 drivers/memory/tegra/mc.c 	mc_writel(mc, MC_TIMING_UPDATE, MC_TIMING_CONTROL);
mc                310 drivers/memory/tegra/mc.c void tegra_mc_write_emem_configuration(struct tegra_mc *mc, unsigned long rate)
mc                315 drivers/memory/tegra/mc.c 	for (i = 0; i < mc->num_timings; i++) {
mc                316 drivers/memory/tegra/mc.c 		if (mc->timings[i].rate == rate) {
mc                317 drivers/memory/tegra/mc.c 			timing = &mc->timings[i];
mc                323 drivers/memory/tegra/mc.c 		dev_err(mc->dev, "no memory timing registered for rate %lu\n",
mc                328 drivers/memory/tegra/mc.c 	for (i = 0; i < mc->soc->num_emem_regs; ++i)
mc                329 drivers/memory/tegra/mc.c 		mc_writel(mc, timing->emem_data[i], mc->soc->emem_regs[i]);
mc                332 drivers/memory/tegra/mc.c unsigned int tegra_mc_get_emem_device_count(struct tegra_mc *mc)
mc                336 drivers/memory/tegra/mc.c 	dram_count = mc_readl(mc, MC_EMEM_ADR_CFG);
mc                343 drivers/memory/tegra/mc.c static int load_one_timing(struct tegra_mc *mc,
mc                352 drivers/memory/tegra/mc.c 		dev_err(mc->dev,
mc                358 drivers/memory/tegra/mc.c 	timing->emem_data = devm_kcalloc(mc->dev, mc->soc->num_emem_regs,
mc                365 drivers/memory/tegra/mc.c 					 mc->soc->num_emem_regs);
mc                367 drivers/memory/tegra/mc.c 		dev_err(mc->dev,
mc                376 drivers/memory/tegra/mc.c static int load_timings(struct tegra_mc *mc, struct device_node *node)
mc                383 drivers/memory/tegra/mc.c 	mc->timings = devm_kcalloc(mc->dev, child_count, sizeof(*timing),
mc                385 drivers/memory/tegra/mc.c 	if (!mc->timings)
mc                388 drivers/memory/tegra/mc.c 	mc->num_timings = child_count;
mc                391 drivers/memory/tegra/mc.c 		timing = &mc->timings[i++];
mc                393 drivers/memory/tegra/mc.c 		err = load_one_timing(mc, timing, child);
mc                403 drivers/memory/tegra/mc.c static int tegra_mc_setup_timings(struct tegra_mc *mc)
mc                411 drivers/memory/tegra/mc.c 	mc->num_timings = 0;
mc                413 drivers/memory/tegra/mc.c 	for_each_child_of_node(mc->dev->of_node, node) {
mc                419 drivers/memory/tegra/mc.c 		err = load_timings(mc, node);
mc                426 drivers/memory/tegra/mc.c 	if (mc->num_timings == 0)
mc                427 drivers/memory/tegra/mc.c 		dev_warn(mc->dev,
mc                456 drivers/memory/tegra/mc.c 	struct tegra_mc *mc = data;
mc                461 drivers/memory/tegra/mc.c 	status = mc_readl(mc, MC_INTSTATUS) & mc->soc->intmask;
mc                475 drivers/memory/tegra/mc.c 		value = mc_readl(mc, MC_ERR_STATUS);
mc                478 drivers/memory/tegra/mc.c 		if (mc->soc->num_address_bits > 32) {
mc                495 drivers/memory/tegra/mc.c 		id = value & mc->soc->client_id_mask;
mc                497 drivers/memory/tegra/mc.c 		for (i = 0; i < mc->soc->num_clients; i++) {
mc                498 drivers/memory/tegra/mc.c 			if (mc->soc->clients[i].id == id) {
mc                499 drivers/memory/tegra/mc.c 				client = mc->soc->clients[i].name;
mc                537 drivers/memory/tegra/mc.c 		value = mc_readl(mc, MC_ERR_ADR);
mc                540 drivers/memory/tegra/mc.c 		dev_err_ratelimited(mc->dev, "%s: %s%s @%pa: %s (%s%s)\n",
mc                546 drivers/memory/tegra/mc.c 	mc_writel(mc, status, MC_INTSTATUS);
mc                553 drivers/memory/tegra/mc.c 	struct tegra_mc *mc = data;
mc                558 drivers/memory/tegra/mc.c 	status = mc_readl(mc, MC_INTSTATUS) & mc->soc->intmask;
mc                573 drivers/memory/tegra/mc.c 			value = mc_readl(mc, reg);
mc                575 drivers/memory/tegra/mc.c 			id = value & mc->soc->client_id_mask;
mc                584 drivers/memory/tegra/mc.c 			value = mc_readl(mc, reg);
mc                586 drivers/memory/tegra/mc.c 			id = (value >> 1) & mc->soc->client_id_mask;
mc                595 drivers/memory/tegra/mc.c 			value = mc_readl(mc, reg);
mc                597 drivers/memory/tegra/mc.c 			id = value & mc->soc->client_id_mask;
mc                610 drivers/memory/tegra/mc.c 		client = mc->soc->clients[id].name;
mc                611 drivers/memory/tegra/mc.c 		addr = mc_readl(mc, reg + sizeof(u32));
mc                613 drivers/memory/tegra/mc.c 		dev_err_ratelimited(mc->dev, "%s: %s%s @%pa: %s (%s)\n",
mc                619 drivers/memory/tegra/mc.c 	mc_writel(mc, status, MC_INTSTATUS);
mc                627 drivers/memory/tegra/mc.c 	struct tegra_mc *mc;
mc                631 drivers/memory/tegra/mc.c 	mc = devm_kzalloc(&pdev->dev, sizeof(*mc), GFP_KERNEL);
mc                632 drivers/memory/tegra/mc.c 	if (!mc)
mc                635 drivers/memory/tegra/mc.c 	platform_set_drvdata(pdev, mc);
mc                636 drivers/memory/tegra/mc.c 	spin_lock_init(&mc->lock);
mc                637 drivers/memory/tegra/mc.c 	mc->soc = of_device_get_match_data(&pdev->dev);
mc                638 drivers/memory/tegra/mc.c 	mc->dev = &pdev->dev;
mc                641 drivers/memory/tegra/mc.c 	mc->tick = 30;
mc                644 drivers/memory/tegra/mc.c 	mc->regs = devm_ioremap_resource(&pdev->dev, res);
mc                645 drivers/memory/tegra/mc.c 	if (IS_ERR(mc->regs))
mc                646 drivers/memory/tegra/mc.c 		return PTR_ERR(mc->regs);
mc                648 drivers/memory/tegra/mc.c 	mc->clk = devm_clk_get(&pdev->dev, "mc");
mc                649 drivers/memory/tegra/mc.c 	if (IS_ERR(mc->clk)) {
mc                651 drivers/memory/tegra/mc.c 			PTR_ERR(mc->clk));
mc                652 drivers/memory/tegra/mc.c 		return PTR_ERR(mc->clk);
mc                656 drivers/memory/tegra/mc.c 	if (mc->soc == &tegra20_mc_soc) {
mc                661 drivers/memory/tegra/mc.c 		err = tegra_mc_setup_latency_allowance(mc);
mc                671 drivers/memory/tegra/mc.c 		err = tegra_mc_setup_timings(mc);
mc                679 drivers/memory/tegra/mc.c 	mc->irq = platform_get_irq(pdev, 0);
mc                680 drivers/memory/tegra/mc.c 	if (mc->irq < 0) {
mc                682 drivers/memory/tegra/mc.c 		return mc->irq;
mc                685 drivers/memory/tegra/mc.c 	WARN(!mc->soc->client_id_mask, "missing client ID mask for this SoC\n");
mc                687 drivers/memory/tegra/mc.c 	mc_writel(mc, mc->soc->intmask, MC_INTMASK);
mc                689 drivers/memory/tegra/mc.c 	err = devm_request_irq(&pdev->dev, mc->irq, isr, 0,
mc                690 drivers/memory/tegra/mc.c 			       dev_name(&pdev->dev), mc);
mc                692 drivers/memory/tegra/mc.c 		dev_err(&pdev->dev, "failed to request IRQ#%u: %d\n", mc->irq,
mc                697 drivers/memory/tegra/mc.c 	err = tegra_mc_reset_setup(mc);
mc                702 drivers/memory/tegra/mc.c 	if (IS_ENABLED(CONFIG_TEGRA_IOMMU_SMMU) && mc->soc->smmu) {
mc                703 drivers/memory/tegra/mc.c 		mc->smmu = tegra_smmu_probe(&pdev->dev, mc->soc->smmu, mc);
mc                704 drivers/memory/tegra/mc.c 		if (IS_ERR(mc->smmu)) {
mc                706 drivers/memory/tegra/mc.c 				PTR_ERR(mc->smmu));
mc                707 drivers/memory/tegra/mc.c 			mc->smmu = NULL;
mc                711 drivers/memory/tegra/mc.c 	if (IS_ENABLED(CONFIG_TEGRA_IOMMU_GART) && !mc->soc->smmu) {
mc                712 drivers/memory/tegra/mc.c 		mc->gart = tegra_gart_probe(&pdev->dev, mc);
mc                713 drivers/memory/tegra/mc.c 		if (IS_ERR(mc->gart)) {
mc                715 drivers/memory/tegra/mc.c 				PTR_ERR(mc->gart));
mc                716 drivers/memory/tegra/mc.c 			mc->gart = NULL;
mc                725 drivers/memory/tegra/mc.c 	struct tegra_mc *mc = dev_get_drvdata(dev);
mc                728 drivers/memory/tegra/mc.c 	if (IS_ENABLED(CONFIG_TEGRA_IOMMU_GART) && mc->gart) {
mc                729 drivers/memory/tegra/mc.c 		err = tegra_gart_suspend(mc->gart);
mc                739 drivers/memory/tegra/mc.c 	struct tegra_mc *mc = dev_get_drvdata(dev);
mc                742 drivers/memory/tegra/mc.c 	if (IS_ENABLED(CONFIG_TEGRA_IOMMU_GART) && mc->gart) {
mc                743 drivers/memory/tegra/mc.c 		err = tegra_gart_resume(mc->gart);
mc                 24 drivers/memory/tegra/mc.h static inline u32 mc_readl(struct tegra_mc *mc, unsigned long offset)
mc                 26 drivers/memory/tegra/mc.h 	return readl_relaxed(mc->regs + offset);
mc                 29 drivers/memory/tegra/mc.h static inline void mc_writel(struct tegra_mc *mc, u32 value,
mc                 32 drivers/memory/tegra/mc.h 	writel_relaxed(value, mc->regs + offset);
mc                466 drivers/memory/tegra/tegra124-emc.c 	struct tegra_mc *mc;
mc                665 drivers/memory/tegra/tegra124-emc.c 	tegra_mc_write_emem_configuration(emc->mc, timing->rate);
mc                867 drivers/memory/tegra/tegra124-emc.c 	emc->dram_num = tegra_mc_get_emem_device_count(emc->mc);
mc               1085 drivers/memory/tegra/tegra124-emc.c 	struct platform_device *mc;
mc               1109 drivers/memory/tegra/tegra124-emc.c 	mc = of_find_device_by_node(np);
mc               1111 drivers/memory/tegra/tegra124-emc.c 	if (!mc)
mc               1114 drivers/memory/tegra/tegra124-emc.c 	emc->mc = platform_get_drvdata(mc);
mc               1115 drivers/memory/tegra/tegra124-emc.c 	if (!emc->mc)
mc                538 drivers/memory/tegra/tegra186.c 	struct tegra_mc *mc;
mc                542 drivers/memory/tegra/tegra186.c 	mc = devm_kzalloc(&pdev->dev, sizeof(*mc), GFP_KERNEL);
mc                543 drivers/memory/tegra/tegra186.c 	if (!mc)
mc                547 drivers/memory/tegra/tegra186.c 	mc->regs = devm_ioremap_resource(&pdev->dev, res);
mc                548 drivers/memory/tegra/tegra186.c 	if (IS_ERR(mc->regs))
mc                549 drivers/memory/tegra/tegra186.c 		return PTR_ERR(mc->regs);
mc                551 drivers/memory/tegra/tegra186.c 	mc->dev = &pdev->dev;
mc                557 drivers/memory/tegra/tegra186.c 		override = readl(mc->regs + client->regs.override);
mc                558 drivers/memory/tegra/tegra186.c 		security = readl(mc->regs + client->regs.security);
mc                565 drivers/memory/tegra/tegra186.c 		writel(client->sid, mc->regs + client->regs.override);
mc                567 drivers/memory/tegra/tegra186.c 		override = readl(mc->regs + client->regs.override);
mc                568 drivers/memory/tegra/tegra186.c 		security = readl(mc->regs + client->regs.security);
mc                574 drivers/memory/tegra/tegra186.c 	platform_set_drvdata(pdev, mc);
mc                198 drivers/memory/tegra/tegra20.c static int tegra20_mc_hotreset_assert(struct tegra_mc *mc,
mc                204 drivers/memory/tegra/tegra20.c 	spin_lock_irqsave(&mc->lock, flags);
mc                206 drivers/memory/tegra/tegra20.c 	value = mc_readl(mc, rst->reset);
mc                207 drivers/memory/tegra/tegra20.c 	mc_writel(mc, value & ~BIT(rst->bit), rst->reset);
mc                209 drivers/memory/tegra/tegra20.c 	spin_unlock_irqrestore(&mc->lock, flags);
mc                214 drivers/memory/tegra/tegra20.c static int tegra20_mc_hotreset_deassert(struct tegra_mc *mc,
mc                220 drivers/memory/tegra/tegra20.c 	spin_lock_irqsave(&mc->lock, flags);
mc                222 drivers/memory/tegra/tegra20.c 	value = mc_readl(mc, rst->reset);
mc                223 drivers/memory/tegra/tegra20.c 	mc_writel(mc, value | BIT(rst->bit), rst->reset);
mc                225 drivers/memory/tegra/tegra20.c 	spin_unlock_irqrestore(&mc->lock, flags);
mc                230 drivers/memory/tegra/tegra20.c static int tegra20_mc_block_dma(struct tegra_mc *mc,
mc                236 drivers/memory/tegra/tegra20.c 	spin_lock_irqsave(&mc->lock, flags);
mc                238 drivers/memory/tegra/tegra20.c 	value = mc_readl(mc, rst->control) & ~BIT(rst->bit);
mc                239 drivers/memory/tegra/tegra20.c 	mc_writel(mc, value, rst->control);
mc                241 drivers/memory/tegra/tegra20.c 	spin_unlock_irqrestore(&mc->lock, flags);
mc                246 drivers/memory/tegra/tegra20.c static bool tegra20_mc_dma_idling(struct tegra_mc *mc,
mc                249 drivers/memory/tegra/tegra20.c 	return mc_readl(mc, rst->status) == 0;
mc                252 drivers/memory/tegra/tegra20.c static int tegra20_mc_reset_status(struct tegra_mc *mc,
mc                255 drivers/memory/tegra/tegra20.c 	return (mc_readl(mc, rst->reset) & BIT(rst->bit)) == 0;
mc                258 drivers/memory/tegra/tegra20.c static int tegra20_mc_unblock_dma(struct tegra_mc *mc,
mc                264 drivers/memory/tegra/tegra20.c 	spin_lock_irqsave(&mc->lock, flags);
mc                266 drivers/memory/tegra/tegra20.c 	value = mc_readl(mc, rst->control) | BIT(rst->bit);
mc                267 drivers/memory/tegra/tegra20.c 	mc_writel(mc, value, rst->control);
mc                269 drivers/memory/tegra/tegra20.c 	spin_unlock_irqrestore(&mc->lock, flags);
mc                 51 drivers/misc/pti.c 	struct pti_masterchannel *mc;
mc                 99 drivers/misc/pti.c static void pti_write_to_aperture(struct pti_masterchannel *mc,
mc                114 drivers/misc/pti.c 	aperture = drv_data->pti_ioaddr + (mc->master << 15)
mc                115 drivers/misc/pti.c 		+ (mc->channel << 8);
mc                157 drivers/misc/pti.c static void pti_control_frame_built_and_sent(struct pti_masterchannel *mc,
mc                187 drivers/misc/pti.c 	snprintf(control_frame, CONTROL_FRAME_LEN, control_format, mc->master,
mc                188 drivers/misc/pti.c 		mc->channel, thread_name_p);
mc                206 drivers/misc/pti.c static void pti_write_full_frame_to_aperture(struct pti_masterchannel *mc,
mc                210 drivers/misc/pti.c 	pti_control_frame_built_and_sent(mc, NULL);
mc                211 drivers/misc/pti.c 	pti_write_to_aperture(mc, (u8 *)buf, len);
mc                238 drivers/misc/pti.c 	struct pti_masterchannel *mc;
mc                241 drivers/misc/pti.c 	mc = kmalloc(sizeof(struct pti_masterchannel), GFP_KERNEL);
mc                242 drivers/misc/pti.c 	if (mc == NULL)
mc                250 drivers/misc/pti.c 		kfree(mc);
mc                263 drivers/misc/pti.c 	mc->master  = base_id;
mc                264 drivers/misc/pti.c 	mc->channel = ((i & 0xf)<<3) + j;
mc                266 drivers/misc/pti.c 	pti_control_frame_built_and_sent(mc, thread_name);
mc                267 drivers/misc/pti.c 	return mc;
mc                299 drivers/misc/pti.c 	struct pti_masterchannel *mc;
mc                306 drivers/misc/pti.c 		mc = get_id(drv_data->ia_app, MAX_APP_IDS,
mc                311 drivers/misc/pti.c 		mc = get_id(drv_data->ia_os, MAX_OS_IDS,
mc                316 drivers/misc/pti.c 		mc = get_id(drv_data->ia_modem, MAX_MODEM_IDS,
mc                320 drivers/misc/pti.c 		mc = NULL;
mc                324 drivers/misc/pti.c 	return mc;
mc                336 drivers/misc/pti.c void pti_release_masterchannel(struct pti_masterchannel *mc)
mc                342 drivers/misc/pti.c 	if (mc) {
mc                343 drivers/misc/pti.c 		master = mc->master;
mc                344 drivers/misc/pti.c 		channel = mc->channel;
mc                357 drivers/misc/pti.c 		kfree(mc);
mc                375 drivers/misc/pti.c void pti_writedata(struct pti_masterchannel *mc, u8 *buf, int count)
mc                382 drivers/misc/pti.c 	if ((mc != NULL) && (buf != NULL) && (count > 0))
mc                383 drivers/misc/pti.c 		pti_write_to_aperture(mc, buf, count);
mc                465 drivers/misc/pti.c 			pti_tty_data->mc = pti_request_masterchannel(0, NULL);
mc                467 drivers/misc/pti.c 			pti_tty_data->mc = pti_request_masterchannel(2, NULL);
mc                469 drivers/misc/pti.c 		if (pti_tty_data->mc == NULL) {
mc                490 drivers/misc/pti.c 	pti_release_masterchannel(pti_tty_data->mc);
mc                512 drivers/misc/pti.c 	if ((pti_tty_data != NULL) && (pti_tty_data->mc != NULL)) {
mc                513 drivers/misc/pti.c 		pti_write_to_aperture(pti_tty_data->mc, (u8 *)buf, len);
mc                548 drivers/misc/pti.c 	struct pti_masterchannel *mc;
mc                556 drivers/misc/pti.c 	mc = pti_request_masterchannel(0, NULL);
mc                557 drivers/misc/pti.c 	if (mc == NULL)
mc                559 drivers/misc/pti.c 	filp->private_data = mc;
mc                604 drivers/misc/pti.c 	struct pti_masterchannel *mc;
mc                611 drivers/misc/pti.c 	mc = filp->private_data;
mc                631 drivers/misc/pti.c 		pti_write_to_aperture(mc, kbuf, size);
mc                672 drivers/misc/pti.c 	static struct pti_masterchannel mc = {.master  = CONSOLE_ID,
mc                675 drivers/misc/pti.c 	mc.channel = pti_console_channel;
mc                678 drivers/misc/pti.c 	pti_write_full_frame_to_aperture(&mc, buf, len);
mc                343 drivers/net/can/usb/peak_usb/pcan_usb.c static int pcan_usb_update_ts(struct pcan_usb_msg_context *mc)
mc                347 drivers/net/can/usb/peak_usb/pcan_usb.c 	if ((mc->ptr+2) > mc->end)
mc                350 drivers/net/can/usb/peak_usb/pcan_usb.c 	memcpy(&tmp16, mc->ptr, 2);
mc                352 drivers/net/can/usb/peak_usb/pcan_usb.c 	mc->ts16 = le16_to_cpu(tmp16);
mc                354 drivers/net/can/usb/peak_usb/pcan_usb.c 	if (mc->rec_idx > 0)
mc                355 drivers/net/can/usb/peak_usb/pcan_usb.c 		peak_usb_update_ts_now(&mc->pdev->time_ref, mc->ts16);
mc                357 drivers/net/can/usb/peak_usb/pcan_usb.c 		peak_usb_set_ts_now(&mc->pdev->time_ref, mc->ts16);
mc                365 drivers/net/can/usb/peak_usb/pcan_usb.c static int pcan_usb_decode_ts(struct pcan_usb_msg_context *mc, u8 first_packet)
mc                371 drivers/net/can/usb/peak_usb/pcan_usb.c 		if ((mc->ptr + 2) > mc->end)
mc                374 drivers/net/can/usb/peak_usb/pcan_usb.c 		memcpy(&tmp16, mc->ptr, 2);
mc                375 drivers/net/can/usb/peak_usb/pcan_usb.c 		mc->ptr += 2;
mc                377 drivers/net/can/usb/peak_usb/pcan_usb.c 		mc->ts16 = le16_to_cpu(tmp16);
mc                378 drivers/net/can/usb/peak_usb/pcan_usb.c 		mc->prev_ts8 = mc->ts16 & 0x00ff;
mc                382 drivers/net/can/usb/peak_usb/pcan_usb.c 		if ((mc->ptr + 1) > mc->end)
mc                385 drivers/net/can/usb/peak_usb/pcan_usb.c 		ts8 = *mc->ptr++;
mc                387 drivers/net/can/usb/peak_usb/pcan_usb.c 		if (ts8 < mc->prev_ts8)
mc                388 drivers/net/can/usb/peak_usb/pcan_usb.c 			mc->ts16 += 0x100;
mc                390 drivers/net/can/usb/peak_usb/pcan_usb.c 		mc->ts16 &= 0xff00;
mc                391 drivers/net/can/usb/peak_usb/pcan_usb.c 		mc->ts16 |= ts8;
mc                392 drivers/net/can/usb/peak_usb/pcan_usb.c 		mc->prev_ts8 = ts8;
mc                398 drivers/net/can/usb/peak_usb/pcan_usb.c static int pcan_usb_decode_error(struct pcan_usb_msg_context *mc, u8 n,
mc                407 drivers/net/can/usb/peak_usb/pcan_usb.c 		if (!mc->pdev->time_ref.tick_count)
mc                410 drivers/net/can/usb/peak_usb/pcan_usb.c 	new_state = mc->pdev->dev.can.state;
mc                412 drivers/net/can/usb/peak_usb/pcan_usb.c 	switch (mc->pdev->dev.can.state) {
mc                475 drivers/net/can/usb/peak_usb/pcan_usb.c 	if (mc->pdev->dev.can.state == new_state)
mc                479 drivers/net/can/usb/peak_usb/pcan_usb.c 	skb = alloc_can_err_skb(mc->netdev, &cf);
mc                486 drivers/net/can/usb/peak_usb/pcan_usb.c 		mc->pdev->dev.can.can_stats.bus_off++;
mc                487 drivers/net/can/usb/peak_usb/pcan_usb.c 		can_bus_off(mc->netdev);
mc                494 drivers/net/can/usb/peak_usb/pcan_usb.c 		mc->pdev->dev.can.can_stats.error_passive++;
mc                501 drivers/net/can/usb/peak_usb/pcan_usb.c 		mc->pdev->dev.can.can_stats.error_warning++;
mc                513 drivers/net/can/usb/peak_usb/pcan_usb.c 		mc->netdev->stats.rx_over_errors++;
mc                514 drivers/net/can/usb/peak_usb/pcan_usb.c 		mc->netdev->stats.rx_errors++;
mc                516 drivers/net/can/usb/peak_usb/pcan_usb.c 		new_state = mc->pdev->dev.can.state;
mc                520 drivers/net/can/usb/peak_usb/pcan_usb.c 	mc->pdev->dev.can.state = new_state;
mc                525 drivers/net/can/usb/peak_usb/pcan_usb.c 		peak_usb_get_ts_time(&mc->pdev->time_ref, mc->ts16,
mc                529 drivers/net/can/usb/peak_usb/pcan_usb.c 	mc->netdev->stats.rx_packets++;
mc                530 drivers/net/can/usb/peak_usb/pcan_usb.c 	mc->netdev->stats.rx_bytes += cf->can_dlc;
mc                539 drivers/net/can/usb/peak_usb/pcan_usb.c static int pcan_usb_decode_status(struct pcan_usb_msg_context *mc,
mc                547 drivers/net/can/usb/peak_usb/pcan_usb.c 	if ((mc->ptr + 2) > mc->end)
mc                550 drivers/net/can/usb/peak_usb/pcan_usb.c 	f = mc->ptr[PCAN_USB_CMD_FUNC];
mc                551 drivers/net/can/usb/peak_usb/pcan_usb.c 	n = mc->ptr[PCAN_USB_CMD_NUM];
mc                552 drivers/net/can/usb/peak_usb/pcan_usb.c 	mc->ptr += PCAN_USB_CMD_ARGS;
mc                555 drivers/net/can/usb/peak_usb/pcan_usb.c 		int err = pcan_usb_decode_ts(mc, !mc->rec_ts_idx);
mc                563 drivers/net/can/usb/peak_usb/pcan_usb.c 		mc->rec_ts_idx++;
mc                568 drivers/net/can/usb/peak_usb/pcan_usb.c 		err = pcan_usb_decode_error(mc, n, status_len);
mc                585 drivers/net/can/usb/peak_usb/pcan_usb.c 		if (pcan_usb_update_ts(mc))
mc                592 drivers/net/can/usb/peak_usb/pcan_usb.c 			netdev_dbg(mc->netdev, "device Tx queue full)\n");
mc                595 drivers/net/can/usb/peak_usb/pcan_usb.c 		netdev_err(mc->netdev, "unexpected function %u\n", f);
mc                599 drivers/net/can/usb/peak_usb/pcan_usb.c 	if ((mc->ptr + rec_len) > mc->end)
mc                602 drivers/net/can/usb/peak_usb/pcan_usb.c 	mc->ptr += rec_len;
mc                610 drivers/net/can/usb/peak_usb/pcan_usb.c static int pcan_usb_decode_data(struct pcan_usb_msg_context *mc, u8 status_len)
mc                617 drivers/net/can/usb/peak_usb/pcan_usb.c 	skb = alloc_can_skb(mc->netdev, &cf);
mc                624 drivers/net/can/usb/peak_usb/pcan_usb.c 		if ((mc->ptr + 4) > mc->end)
mc                627 drivers/net/can/usb/peak_usb/pcan_usb.c 		memcpy(&tmp32, mc->ptr, 4);
mc                628 drivers/net/can/usb/peak_usb/pcan_usb.c 		mc->ptr += 4;
mc                634 drivers/net/can/usb/peak_usb/pcan_usb.c 		if ((mc->ptr + 2) > mc->end)
mc                637 drivers/net/can/usb/peak_usb/pcan_usb.c 		memcpy(&tmp16, mc->ptr, 2);
mc                638 drivers/net/can/usb/peak_usb/pcan_usb.c 		mc->ptr += 2;
mc                646 drivers/net/can/usb/peak_usb/pcan_usb.c 	if (pcan_usb_decode_ts(mc, !mc->rec_ts_idx))
mc                650 drivers/net/can/usb/peak_usb/pcan_usb.c 	mc->rec_ts_idx++;
mc                657 drivers/net/can/usb/peak_usb/pcan_usb.c 		if ((mc->ptr + rec_len) > mc->end)
mc                660 drivers/net/can/usb/peak_usb/pcan_usb.c 		memcpy(cf->data, mc->ptr, cf->can_dlc);
mc                661 drivers/net/can/usb/peak_usb/pcan_usb.c 		mc->ptr += rec_len;
mc                666 drivers/net/can/usb/peak_usb/pcan_usb.c 	peak_usb_get_ts_time(&mc->pdev->time_ref, mc->ts16, &hwts->hwtstamp);
mc                669 drivers/net/can/usb/peak_usb/pcan_usb.c 	mc->netdev->stats.rx_packets++;
mc                670 drivers/net/can/usb/peak_usb/pcan_usb.c 	mc->netdev->stats.rx_bytes += cf->can_dlc;
mc                686 drivers/net/can/usb/peak_usb/pcan_usb.c 	struct pcan_usb_msg_context mc = {
mc                695 drivers/net/can/usb/peak_usb/pcan_usb.c 	for (err = 0; mc.rec_idx < mc.rec_cnt && !err; mc.rec_idx++) {
mc                696 drivers/net/can/usb/peak_usb/pcan_usb.c 		u8 sl = *mc.ptr++;
mc                700 drivers/net/can/usb/peak_usb/pcan_usb.c 			err = pcan_usb_decode_status(&mc, sl);
mc                703 drivers/net/can/usb/peak_usb/pcan_usb.c 			err = pcan_usb_decode_data(&mc, sl);
mc               1802 drivers/net/dsa/b53/b53_common.c 	u16 uc, mc;
mc               1811 drivers/net/dsa/b53/b53_common.c 	b53_read16(dev, B53_CTRL_PAGE, B53_MC_FLOOD_MASK, &mc);
mc               1813 drivers/net/dsa/b53/b53_common.c 		mc |= BIT(port);
mc               1815 drivers/net/dsa/b53/b53_common.c 		mc &= ~BIT(port);
mc               1816 drivers/net/dsa/b53/b53_common.c 	b53_write16(dev, B53_CTRL_PAGE, B53_MC_FLOOD_MASK, mc);
mc               1818 drivers/net/dsa/b53/b53_common.c 	b53_read16(dev, B53_CTRL_PAGE, B53_IPMC_FLOOD_MASK, &mc);
mc               1820 drivers/net/dsa/b53/b53_common.c 		mc |= BIT(port);
mc               1822 drivers/net/dsa/b53/b53_common.c 		mc &= ~BIT(port);
mc               1823 drivers/net/dsa/b53/b53_common.c 	b53_write16(dev, B53_CTRL_PAGE, B53_IPMC_FLOOD_MASK, mc);
mc                268 drivers/net/eql.c static int eql_g_master_cfg(struct net_device *dev, master_config_t __user *mc);
mc                269 drivers/net/eql.c static int eql_s_master_cfg(struct net_device *dev, master_config_t __user *mc);
mc                546 drivers/net/eql.c 	master_config_t mc;
mc                548 drivers/net/eql.c 	memset(&mc, 0, sizeof(master_config_t));
mc                552 drivers/net/eql.c 		mc.max_slaves = eql->max_slaves;
mc                553 drivers/net/eql.c 		mc.min_slaves = eql->min_slaves;
mc                554 drivers/net/eql.c 		if (copy_to_user(mcp, &mc, sizeof (master_config_t)))
mc                564 drivers/net/eql.c 	master_config_t mc;
mc                566 drivers/net/eql.c 	if (copy_from_user(&mc, mcp, sizeof (master_config_t)))
mc                571 drivers/net/eql.c 		eql->max_slaves = mc.max_slaves;
mc                572 drivers/net/eql.c 		eql->min_slaves = mc.min_slaves;
mc                561 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c 	struct bnx2x_mcast_list_elem *mc = NULL;
mc                575 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c 		mc = kcalloc(mc_num, sizeof(struct bnx2x_mcast_list_elem),
mc                577 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c 		if (!mc) {
mc                586 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c 			mc[i].mac = mcasts[i];
mc                587 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c 			list_add_tail(&mc[i].link,
mc                603 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c 	kfree(mc);
mc               2119 drivers/net/ethernet/brocade/bna/bfa_ioc.c 	int	mc;
mc               2122 drivers/net/ethernet/brocade/bna/bfa_ioc.c 	for (mc = 0; mc < BFI_MC_MAX; mc++) {
mc               2123 drivers/net/ethernet/brocade/bna/bfa_ioc.c 		mod->mbhdlr[mc].cbfn = NULL;
mc               2124 drivers/net/ethernet/brocade/bna/bfa_ioc.c 		mod->mbhdlr[mc].cbarg = ioc->bfa;
mc               2627 drivers/net/ethernet/brocade/bna/bfa_ioc.c bfa_nw_ioc_mbox_regisr(struct bfa_ioc *ioc, enum bfi_mclass mc,
mc               2632 drivers/net/ethernet/brocade/bna/bfa_ioc.c 	mod->mbhdlr[mc].cbfn	= cbfn;
mc               2633 drivers/net/ethernet/brocade/bna/bfa_ioc.c 	mod->mbhdlr[mc].cbarg = cbarg;
mc               2685 drivers/net/ethernet/brocade/bna/bfa_ioc.c 	int				mc;
mc               2691 drivers/net/ethernet/brocade/bna/bfa_ioc.c 		mc = m.mh.msg_class;
mc               2692 drivers/net/ethernet/brocade/bna/bfa_ioc.c 		if (mc == BFI_MC_IOC) {
mc               2697 drivers/net/ethernet/brocade/bna/bfa_ioc.c 		if ((mc >= BFI_MC_MAX) || (mod->mbhdlr[mc].cbfn == NULL))
mc               2700 drivers/net/ethernet/brocade/bna/bfa_ioc.c 		mod->mbhdlr[mc].cbfn(mod->mbhdlr[mc].cbarg, &m);
mc                248 drivers/net/ethernet/brocade/bna/bfa_ioc.h void bfa_nw_ioc_mbox_regisr(struct bfa_ioc *ioc, enum bfi_mclass mc,
mc                465 drivers/net/ethernet/brocade/bna/bfa_msgq.c 	int mc;
mc                475 drivers/net/ethernet/brocade/bna/bfa_msgq.c 		mc = msghdr->msg_class;
mc                478 drivers/net/ethernet/brocade/bna/bfa_msgq.c 		if ((mc >= BFI_MC_MAX) || (rspq->rsphdlr[mc].cbfn == NULL))
mc                481 drivers/net/ethernet/brocade/bna/bfa_msgq.c 		(rspq->rsphdlr[mc].cbfn)(rspq->rsphdlr[mc].cbarg, msghdr);
mc                616 drivers/net/ethernet/brocade/bna/bfa_msgq.c bfa_msgq_regisr(struct bfa_msgq *msgq, enum bfi_mclass mc,
mc                619 drivers/net/ethernet/brocade/bna/bfa_msgq.c 	msgq->rspq.rsphdlr[mc].cbfn	= cbfn;
mc                620 drivers/net/ethernet/brocade/bna/bfa_msgq.c 	msgq->rspq.rsphdlr[mc].cbarg	= cbarg;
mc                117 drivers/net/ethernet/brocade/bna/bfa_msgq.h void bfa_msgq_regisr(struct bfa_msgq *msgq, enum bfi_mclass mc,
mc               1976 drivers/net/ethernet/cavium/liquidio/lio_main.c 	u64 *mc;
mc               1993 drivers/net/ethernet/cavium/liquidio/lio_main.c 	mc = &nctrl.udd[0];
mc               1995 drivers/net/ethernet/cavium/liquidio/lio_main.c 		*mc = 0;
mc               1996 drivers/net/ethernet/cavium/liquidio/lio_main.c 		memcpy(((u8 *)mc) + 2, ha->addr, ETH_ALEN);
mc               1999 drivers/net/ethernet/cavium/liquidio/lio_main.c 		if (++mc > &nctrl.udd[mc_count])
mc               1075 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	u64 *mc;
mc               1090 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	mc = &nctrl.udd[0];
mc               1092 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		*mc = 0;
mc               1093 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		ether_addr_copy(((u8 *)mc) + 2, ha->addr);
mc               1095 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		if (++mc > &nctrl.udd[mc_count])
mc                267 drivers/net/ethernet/cavium/thunder/nic.h 	u64              mc[];
mc                273 drivers/net/ethernet/cavium/thunder/nic.h 	struct xcast_addr_list *mc;
mc               2021 drivers/net/ethernet/cavium/thunder/nicvf_main.c 			mbx.xcast.mac = mc_addrs->mc[idx];
mc               2042 drivers/net/ethernet/cavium/thunder/nicvf_main.c 	struct xcast_addr_list *mc;
mc               2052 drivers/net/ethernet/cavium/thunder/nicvf_main.c 	mc = vf_work->mc;
mc               2053 drivers/net/ethernet/cavium/thunder/nicvf_main.c 	vf_work->mc = NULL;
mc               2056 drivers/net/ethernet/cavium/thunder/nicvf_main.c 	__nicvf_set_rx_mode_task(mode, mc, nic);
mc               2079 drivers/net/ethernet/cavium/thunder/nicvf_main.c 							   mc[netdev_mc_count(netdev)]),
mc               2084 drivers/net/ethernet/cavium/thunder/nicvf_main.c 				netdev_hw_addr_list_for_each(ha, &netdev->mc) {
mc               2085 drivers/net/ethernet/cavium/thunder/nicvf_main.c 					mc_list->mc[mc_list->count] =
mc               2093 drivers/net/ethernet/cavium/thunder/nicvf_main.c 	kfree(nic->rx_mode_work.mc);
mc               2094 drivers/net/ethernet/cavium/thunder/nicvf_main.c 	nic->rx_mode_work.mc = mc_list;
mc                778 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 	u8 mc, found = 0;
mc                782 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 	rc = cudbg_meminfo_get_mem_index(padap, meminfo, mem_type, &mc);
mc                808 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 			if (meminfo->mem[i].base < meminfo->avail[mc].base &&
mc                809 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 			    meminfo->mem[i].limit < meminfo->avail[mc].base)
mc                812 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 			if (meminfo->mem[i].base > meminfo->avail[mc].limit)
mc                513 drivers/net/ethernet/dec/tulip/de4x5.c     u_int mc;               /* Media Capabilities                        */
mc                523 drivers/net/ethernet/dec/tulip/de4x5.c     u_char mc;              /* Media Code                                */
mc               4632 drivers/net/ethernet/dec/tulip/de4x5.c 	lp->phy[lp->active].mc  = get_unaligned_le16(p); p += 2;
mc               4713 drivers/net/ethernet/dec/tulip/de4x5.c 	lp->phy[lp->active].mc  = get_unaligned_le16(p); p += 2;
mc               4052 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 	list = &ndev->mc;
mc               4078 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c 	list = &netdev->mc;
mc                337 drivers/net/ethernet/ibm/emac/mal.c 		struct mal_commac *mc = list_entry(l, struct mal_commac, list);
mc                338 drivers/net/ethernet/ibm/emac/mal.c 		if (deir & mc->rx_chan_mask) {
mc                339 drivers/net/ethernet/ibm/emac/mal.c 			set_bit(MAL_COMMAC_RX_STOPPED, &mc->flags);
mc                340 drivers/net/ethernet/ibm/emac/mal.c 			mc->ops->rxde(mc->dev);
mc                403 drivers/net/ethernet/ibm/emac/mal.c 		struct mal_commac *mc =
mc                405 drivers/net/ethernet/ibm/emac/mal.c 		mc->ops->poll_tx(mc->dev);
mc                414 drivers/net/ethernet/ibm/emac/mal.c 		struct mal_commac *mc =
mc                417 drivers/net/ethernet/ibm/emac/mal.c 		if (unlikely(test_bit(MAL_COMMAC_POLL_DISABLED, &mc->flags)))
mc                419 drivers/net/ethernet/ibm/emac/mal.c 		n = mc->ops->poll_rx(mc->dev, budget - received);
mc                436 drivers/net/ethernet/ibm/emac/mal.c 		struct mal_commac *mc =
mc                438 drivers/net/ethernet/ibm/emac/mal.c 		if (unlikely(test_bit(MAL_COMMAC_POLL_DISABLED, &mc->flags)))
mc                440 drivers/net/ethernet/ibm/emac/mal.c 		if (unlikely(mc->ops->peek_rx(mc->dev) ||
mc                441 drivers/net/ethernet/ibm/emac/mal.c 			     test_bit(MAL_COMMAC_RX_STOPPED, &mc->flags))) {
mc                450 drivers/net/ethernet/ibm/emac/mal.c 		mc->ops->poll_tx(mc->dev);
mc               3815 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c 	    mvpp2_prs_mac_da_accept_list(port, &dev->mc))
mc                226 drivers/net/ethernet/mellanox/mlx5/core/en_arfs.c 	u8 *mc;
mc                237 drivers/net/ethernet/mellanox/mlx5/core/en_arfs.c 	mc = MLX5_ADDR_OF(create_flow_group_in, in, match_criteria);
mc                238 drivers/net/ethernet/mellanox/mlx5/core/en_arfs.c 	outer_headers_c = MLX5_ADDR_OF(fte_match_param, mc,
mc                915 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	u8 *mc;
mc                928 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	mc = MLX5_ADDR_OF(create_flow_group_in, in, match_criteria);
mc                929 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	MLX5_SET_TO_ONES(fte_match_param, mc, outer_headers.ip_protocol);
mc                931 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 		MLX5_SET_TO_ONES(fte_match_param, mc, outer_headers.ip_version);
mc                933 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 		MLX5_SET_TO_ONES(fte_match_param, mc, outer_headers.ethertype);
mc                944 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	MLX5_SET(fte_match_param, mc, outer_headers.ip_protocol, 0);
mc               1056 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	u8 *mc;
mc               1068 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	mc = MLX5_ADDR_OF(create_flow_group_in, in, match_criteria);
mc               1069 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	MLX5_SET_TO_ONES(fte_match_param, mc, inner_headers.ip_protocol);
mc               1070 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	MLX5_SET_TO_ONES(fte_match_param, mc, inner_headers.ip_version);
mc               1081 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	MLX5_SET(fte_match_param, mc, inner_headers.ip_protocol, 0);
mc               1290 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	u8 *mc;
mc               1301 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	mc = MLX5_ADDR_OF(create_flow_group_in, in, match_criteria);
mc               1302 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	mc_dmac = MLX5_ADDR_OF(fte_match_param, mc,
mc               1400 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	u8 *mc = MLX5_ADDR_OF(create_flow_group_in, in, match_criteria);
mc               1404 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	MLX5_SET_TO_ONES(fte_match_param, mc, outer_headers.cvlan_tag);
mc               1405 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	MLX5_SET_TO_ONES(fte_match_param, mc, outer_headers.first_vid);
mc               1416 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	MLX5_SET_TO_ONES(fte_match_param, mc, outer_headers.svlan_tag);
mc               1417 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	MLX5_SET_TO_ONES(fte_match_param, mc, outer_headers.first_vid);
mc               1428 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	MLX5_SET_TO_ONES(fte_match_param, mc, outer_headers.cvlan_tag);
mc               1439 drivers/net/ethernet/mellanox/mlx5/core/en_fs.c 	MLX5_SET_TO_ONES(fte_match_param, mc, outer_headers.svlan_tag);
mc               3829 drivers/net/ethernet/mellanox/mlxsw/reg.h MLXSW_ITEM32(reg, qtctm, mc, 0x04, 0, 1);
mc               3832 drivers/net/ethernet/mellanox/mlxsw/reg.h mlxsw_reg_qtctm_pack(char *payload, u8 local_port, bool mc)
mc               3836 drivers/net/ethernet/mellanox/mlxsw/reg.h 	mlxsw_reg_qtctm_mc_set(payload, mc);
mc                831 drivers/net/ethernet/mellanox/mlxsw/spectrum_buffers.c 		const struct mlxsw_sp_sb_mm *mc;
mc                834 drivers/net/ethernet/mellanox/mlxsw/spectrum_buffers.c 		mc = &mlxsw_sp->sb_vals->mms[i];
mc                835 drivers/net/ethernet/mellanox/mlxsw/spectrum_buffers.c 		des = &mlxsw_sp->sb_vals->pool_dess[mc->pool_index];
mc                839 drivers/net/ethernet/mellanox/mlxsw/spectrum_buffers.c 		min_buff = mlxsw_sp_bytes_cells(mlxsw_sp, mc->min_buff);
mc                840 drivers/net/ethernet/mellanox/mlxsw/spectrum_buffers.c 		mlxsw_reg_sbmm_pack(sbmm_pl, i, min_buff, mc->max_buff,
mc               1286 drivers/net/ethernet/mscc/ocelot.c static void ocelot_port_attr_mc_set(struct ocelot_port *port, bool mc)
mc               1292 drivers/net/ethernet/mscc/ocelot.c 	if (mc)
mc               1372 drivers/net/ethernet/mscc/ocelot.c 	struct ocelot_multicast *mc;
mc               1374 drivers/net/ethernet/mscc/ocelot.c 	list_for_each_entry(mc, &ocelot->multicast, list) {
mc               1375 drivers/net/ethernet/mscc/ocelot.c 		if (ether_addr_equal(mc->addr, addr) && mc->vid == vid)
mc               1376 drivers/net/ethernet/mscc/ocelot.c 			return mc;
mc               1388 drivers/net/ethernet/mscc/ocelot.c 	struct ocelot_multicast *mc;
mc               1396 drivers/net/ethernet/mscc/ocelot.c 	mc = ocelot_multicast_get(ocelot, mdb->addr, vid);
mc               1397 drivers/net/ethernet/mscc/ocelot.c 	if (!mc) {
mc               1398 drivers/net/ethernet/mscc/ocelot.c 		mc = devm_kzalloc(ocelot->dev, sizeof(*mc), GFP_KERNEL);
mc               1399 drivers/net/ethernet/mscc/ocelot.c 		if (!mc)
mc               1402 drivers/net/ethernet/mscc/ocelot.c 		memcpy(mc->addr, mdb->addr, ETH_ALEN);
mc               1403 drivers/net/ethernet/mscc/ocelot.c 		mc->vid = vid;
mc               1405 drivers/net/ethernet/mscc/ocelot.c 		list_add_tail(&mc->list, &ocelot->multicast);
mc               1409 drivers/net/ethernet/mscc/ocelot.c 	memcpy(addr, mc->addr, ETH_ALEN);
mc               1413 drivers/net/ethernet/mscc/ocelot.c 		addr[2] = mc->ports << 0;
mc               1414 drivers/net/ethernet/mscc/ocelot.c 		addr[1] = mc->ports << 8;
mc               1418 drivers/net/ethernet/mscc/ocelot.c 	mc->ports |= BIT(port->chip_port);
mc               1419 drivers/net/ethernet/mscc/ocelot.c 	addr[2] = mc->ports << 0;
mc               1420 drivers/net/ethernet/mscc/ocelot.c 	addr[1] = mc->ports << 8;
mc               1430 drivers/net/ethernet/mscc/ocelot.c 	struct ocelot_multicast *mc;
mc               1437 drivers/net/ethernet/mscc/ocelot.c 	mc = ocelot_multicast_get(ocelot, mdb->addr, vid);
mc               1438 drivers/net/ethernet/mscc/ocelot.c 	if (!mc)
mc               1441 drivers/net/ethernet/mscc/ocelot.c 	memcpy(addr, mc->addr, ETH_ALEN);
mc               1442 drivers/net/ethernet/mscc/ocelot.c 	addr[2] = mc->ports << 0;
mc               1443 drivers/net/ethernet/mscc/ocelot.c 	addr[1] = mc->ports << 8;
mc               1447 drivers/net/ethernet/mscc/ocelot.c 	mc->ports &= ~BIT(port->chip_port);
mc               1448 drivers/net/ethernet/mscc/ocelot.c 	if (!mc->ports) {
mc               1449 drivers/net/ethernet/mscc/ocelot.c 		list_del(&mc->list);
mc               1450 drivers/net/ethernet/mscc/ocelot.c 		devm_kfree(ocelot->dev, mc);
mc               1454 drivers/net/ethernet/mscc/ocelot.c 	addr[2] = mc->ports << 0;
mc               1455 drivers/net/ethernet/mscc/ocelot.c 	addr[1] = mc->ports << 8;
mc               1100 drivers/net/ethernet/nxp/lpc_eth.c 	struct netdev_hw_addr_list *mcptr = &ndev->mc;
mc                 82 drivers/net/ethernet/sfc/ef10.c 	u16 mc[EFX_EF10_FILTER_DEV_MC_MAX];
mc               5200 drivers/net/ethernet/sfc/ef10.c 		efx_ef10_filter_mark_one_old(efx, &vlan->mc[i]);
mc               5246 drivers/net/ethernet/sfc/ef10.c 	struct netdev_hw_addr *mc;
mc               5253 drivers/net/ethernet/sfc/ef10.c 	netdev_for_each_mc_addr(mc, net_dev) {
mc               5259 drivers/net/ethernet/sfc/ef10.c 		ether_addr_copy(table->dev_mc_list[i].addr, mc->addr);
mc               5283 drivers/net/ethernet/sfc/ef10.c 		ids = vlan->mc;
mc               5787 drivers/net/ethernet/sfc/ef10.c 	for (i = 0; i < ARRAY_SIZE(vlan->mc); i++)
mc               5788 drivers/net/ethernet/sfc/ef10.c 		vlan->mc[i] = EFX_EF10_FILTER_ID_INVALID;
mc               5814 drivers/net/ethernet/sfc/ef10.c 	for (i = 0; i < ARRAY_SIZE(vlan->mc); i++)
mc               5816 drivers/net/ethernet/sfc/ef10.c 					      vlan->mc[i]);
mc                148 drivers/net/ethernet/ti/cpsw.c 			__hw_addr_ref_unsync_dev(&ndev->mc, ndev, NULL);
mc                331 drivers/net/ethernet/ti/cpsw.c 	__hw_addr_ref_sync_dev(&ndev->mc, ndev, cpsw_add_mc_addr,
mc               1772 drivers/net/ethernet/ti/cpsw.c 	__hw_addr_ref_unsync_dev(&ndev->mc, ndev, cpsw_purge_all_mc);
mc               1076 drivers/net/fddi/skfp/fplustm.c 	for (i = 0, tb = smc->hw.fp.mc.table ; i < FPMAX_MULTICAST ; i++, tb++){
mc               1107 drivers/net/fddi/skfp/fplustm.c 	for (i = 0, tb = smc->hw.fp.mc.table ; i < FPMAX_MULTICAST ; i++, tb++){
mc               1230 drivers/net/fddi/skfp/fplustm.c 	for (i = 0, tb = smc->hw.fp.mc.table; i < FPMAX_MULTICAST; i++, tb++) {
mc                223 drivers/net/fddi/skfp/h/fplustm.h 	} mc ;
mc                759 drivers/net/fddi/skfp/h/smt.h 	struct smt_p_mac_counter mc ;		/* MAC counter */
mc                 89 drivers/net/fddi/skfp/smt.c static void smt_fill_mac_counter(struct s_smc *smc, struct smt_p_mac_counter *mc);
mc               1075 drivers/net/fddi/skfp/smt.c 	smt_fill_mac_counter(smc,&sif->mc) ; /* set mac counter field */
mc               1467 drivers/net/fddi/skfp/smt.c static void smt_fill_mac_counter(struct s_smc *smc, struct smt_p_mac_counter *mc)
mc               1469 drivers/net/fddi/skfp/smt.c 	SMTSETPARA(mc,SMT_P_MAC_COUNTER) ;
mc               1470 drivers/net/fddi/skfp/smt.c 	mc->mc_mib_index = INDEX_MAC ;
mc               1471 drivers/net/fddi/skfp/smt.c 	mc->mc_index = mac_index(smc,1) ;
mc               1472 drivers/net/fddi/skfp/smt.c 	mc->mc_receive_ct = smc->mib.m[MAC0].fddiMACCopied_Ct ;
mc               1473 drivers/net/fddi/skfp/smt.c 	mc->mc_transmit_ct =  smc->mib.m[MAC0].fddiMACTransmit_Ct ;
mc                678 drivers/net/wireless/ath/wil6210/txrx.c 	int mc = wil_rxdesc_mcast(d);
mc                680 drivers/net/wireless/ath/wil6210/txrx.c 	struct wil_tid_crypto_rx *c = mc ? &s->group_crypto_rx :
mc                688 drivers/net/wireless/ath/wil6210/txrx.c 				    cid, tid, mc, key_id);
mc                695 drivers/net/wireless/ath/wil6210/txrx.c 				    cid, tid, mc, key_id, pn, cc->pn);
mc                545 drivers/net/wireless/ath/wil6210/txrx_edma.c 	int cid, tid, key_id, mc;
mc                560 drivers/net/wireless/ath/wil6210/txrx_edma.c 	mc = wil_rx_status_get_mcast(st);
mc                562 drivers/net/wireless/ath/wil6210/txrx_edma.c 	c = mc ? &s->group_crypto_rx : &s->tid_crypto_rx[tid];
mc                569 drivers/net/wireless/ath/wil6210/txrx_edma.c 				    cid, tid, mc, key_id);
mc                576 drivers/net/wireless/ath/wil6210/txrx_edma.c 				    cid, tid, mc, key_id, pn, cc->pn);
mc                335 drivers/net/wireless/broadcom/brcm80211/brcmfmac/bcmsdh.c 			  struct mmc_command *mc, int sg_cnt, int req_sz,
mc                344 drivers/net/wireless/broadcom/brcm80211/brcmfmac/bcmsdh.c 	mc->arg |= (*addr & 0x1FFFF) << 9;	/* address */
mc                345 drivers/net/wireless/broadcom/brcm80211/brcmfmac/bcmsdh.c 	mc->arg |= md->blocks & 0x1FF;	/* block count */
mc                353 drivers/net/wireless/broadcom/brcm80211/brcmfmac/bcmsdh.c 	ret = mc->error ? mc->error : md->error;
mc               2649 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	u32 mc, mi;
mc               2664 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	mc = bcma_read32(core, D11REGOFFS(maccontrol));
mc               2666 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	if (mc == 0xffffffff) {
mc               2672 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	WARN_ON(mc & MCTL_PSM_JMP_0);
mc               2673 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	WARN_ON(!(mc & MCTL_PSM_RUN));
mc               2674 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	WARN_ON(!(mc & MCTL_EN_MAC));
mc               2701 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	mc = bcma_read32(core, D11REGOFFS(maccontrol));
mc               2702 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	if (mc == 0xffffffff) {
mc               2708 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	WARN_ON(mc & MCTL_PSM_JMP_0);
mc               2709 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	WARN_ON(!(mc & MCTL_PSM_RUN));
mc               2710 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	WARN_ON(mc & MCTL_EN_MAC);
mc               2717 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	u32 mc, mi;
mc               2729 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	mc = bcma_read32(core, D11REGOFFS(maccontrol));
mc               2730 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	WARN_ON(mc & MCTL_PSM_JMP_0);
mc               2731 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	WARN_ON(mc & MCTL_EN_MAC);
mc               2732 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	WARN_ON(!(mc & MCTL_PSM_RUN));
mc               2737 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	mc = bcma_read32(core, D11REGOFFS(maccontrol));
mc               2738 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	WARN_ON(mc & MCTL_PSM_JMP_0);
mc               2739 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	WARN_ON(!(mc & MCTL_EN_MAC));
mc               2740 drivers/net/wireless/broadcom/brcm80211/brcmsmac/main.c 	WARN_ON(!(mc & MCTL_PSM_RUN));
mc                736 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_cmn.c 	u32 mc;
mc                747 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_cmn.c 	mc = bcma_read32(pi->d11core, D11REGOFFS(maccontrol));
mc                748 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_cmn.c 	if (WARN(mc & MCTL_EN_MAC, "HW error MAC running on init"))
mc               21249 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_n.c 	u32 mc = 0;
mc               21288 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_n.c 		mc = bcma_read32(pi->d11core, D11REGOFFS(maccontrol));
mc               21289 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_n.c 		mc &= ~MCTL_GPOUT_SEL_MASK;
mc               21290 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_n.c 		bcma_write32(pi->d11core, D11REGOFFS(maccontrol), mc);
mc                132 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu.h 	u32 mc:1;
mc                184 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu.h 	u32 mc:1;
mc                274 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu.h 	u32 mc:1;
mc                320 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu.h 	u32 mc:1;
mc                751 drivers/net/wireless/realtek/rtlwifi/rtl8188ee/trx.h 	u32 mc:1;
mc                481 drivers/net/wireless/realtek/rtlwifi/rtl8192ce/trx.h 	u32 mc:1;
mc                 94 drivers/net/wireless/realtek/rtlwifi/rtl8192cu/mac.h 	u32 mc:1;
mc                673 drivers/net/wireless/realtek/rtlwifi/rtl8192de/trx.h 	u32 mc:1;
mc                687 drivers/net/wireless/realtek/rtlwifi/rtl8192ee/trx.h 	u32 mc:1;
mc                483 drivers/net/wireless/realtek/rtlwifi/rtl8723ae/trx.h 	u32 mc:1;
mc                596 drivers/net/wireless/realtek/rtlwifi/rtl8723be/trx.h 	u32 mc:1;
mc                602 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/trx.h 	u32 mc:1;
mc                 38 drivers/pcmcia/at91_cf.c struct regmap *mc;
mc                172 drivers/pcmcia/at91_cf.c 	regmap_update_bits(mc, AT91_MC_SMC_CSR(cf->board->chipselect),
mc                234 drivers/pcmcia/at91_cf.c 	mc = syscon_regmap_lookup_by_compatible("atmel,at91rm9200-sdramc");
mc                236 drivers/pcmcia/at91_cf.c 	return PTR_ERR_OR_ZERO(mc);
mc               1248 drivers/s390/char/tape_3590.c 	if (sense->mc == 0)
mc               1250 drivers/s390/char/tape_3590.c 	if ((sense->mc > 0) && (sense->mc < TAPE_3590_MAX_MSG)) {
mc               1251 drivers/s390/char/tape_3590.c 		if (tape_3590_msg[sense->mc] != NULL)
mc               1254 drivers/s390/char/tape_3590.c 				tape_3590_msg[sense->mc]);
mc               1258 drivers/s390/char/tape_3590.c 				sense->mc);
mc               1261 drivers/s390/char/tape_3590.c 	if (sense->mc == 0xf0) {
mc               1264 drivers/s390/char/tape_3590.c 			"RC=%02x-%04x-%02x\n", sense->fmt.f70.sev, sense->mc,
mc               1271 drivers/s390/char/tape_3590.c 	if (sense->mc == 0xf1) {
mc               1276 drivers/s390/char/tape_3590.c 			sense->mc, sense->fmt.f71.emc, sense->fmt.f71.smc,
mc               1282 drivers/s390/char/tape_3590.c 	if (sense->mc == 0xf2) {
mc               1287 drivers/s390/char/tape_3590.c 			sense->mc, sense->fmt.f71.emc, sense->fmt.f71.smc,
mc               1293 drivers/s390/char/tape_3590.c 	if (sense->mc == 0xf3) {
mc               1298 drivers/s390/char/tape_3590.c 		"sense message code %x\n", sense->mc);
mc                 87 drivers/s390/char/tape_3590.h 	unsigned int mc:8;
mc                151 drivers/s390/char/tape_3590.h 	__u8  mc;
mc               1984 drivers/scsi/bfa/bfa_ioc.c 	int	mc;
mc               1987 drivers/scsi/bfa/bfa_ioc.c 	for (mc = 0; mc < BFI_MC_MAX; mc++) {
mc               1988 drivers/scsi/bfa/bfa_ioc.c 		mod->mbhdlr[mc].cbfn = NULL;
mc               1989 drivers/scsi/bfa/bfa_ioc.c 		mod->mbhdlr[mc].cbarg = ioc->bfa;
mc               2538 drivers/scsi/bfa/bfa_ioc.c 	int				mc;
mc               2540 drivers/scsi/bfa/bfa_ioc.c 	for (mc = 0; mc < BFI_MC_MAX; mc++)
mc               2541 drivers/scsi/bfa/bfa_ioc.c 		mod->mbhdlr[mc].cbfn = mcfuncs[mc];
mc               2548 drivers/scsi/bfa/bfa_ioc.c bfa_ioc_mbox_regisr(struct bfa_ioc_s *ioc, enum bfi_mclass mc,
mc               2553 drivers/scsi/bfa/bfa_ioc.c 	mod->mbhdlr[mc].cbfn	= cbfn;
mc               2554 drivers/scsi/bfa/bfa_ioc.c 	mod->mbhdlr[mc].cbarg	= cbarg;
mc               2601 drivers/scsi/bfa/bfa_ioc.c 	int				mc;
mc               2607 drivers/scsi/bfa/bfa_ioc.c 		mc = m.mh.msg_class;
mc               2608 drivers/scsi/bfa/bfa_ioc.c 		if (mc == BFI_MC_IOC) {
mc               2613 drivers/scsi/bfa/bfa_ioc.c 		if ((mc >= BFI_MC_MAX) || (mod->mbhdlr[mc].cbfn == NULL))
mc               2616 drivers/scsi/bfa/bfa_ioc.c 		mod->mbhdlr[mc].cbfn(mod->mbhdlr[mc].cbarg, &m);
mc                845 drivers/scsi/bfa/bfa_ioc.h void bfa_ioc_mbox_regisr(struct bfa_ioc_s *ioc, enum bfi_mclass mc,
mc               1891 drivers/scsi/megaraid.c 	megacmd_t	mc;
mc               1897 drivers/scsi/megaraid.c 	mc.cmd = MEGA_CLUSTER_CMD;
mc               1898 drivers/scsi/megaraid.c 	mc.opcode = MEGA_RESET_RESERVATIONS;
mc               1900 drivers/scsi/megaraid.c 	if( mega_internal_command(adapter, &mc, NULL) != 0 ) {
mc               2505 drivers/scsi/megaraid.c 	megacmd_t	mc;
mc               2527 drivers/scsi/megaraid.c 	memset(&mc, 0, sizeof(megacmd_t));
mc               2554 drivers/scsi/megaraid.c 	mc.xferaddr = (u32)disk_array_dma_handle;
mc               2557 drivers/scsi/megaraid.c 		mc.cmd = FC_NEW_CONFIG;
mc               2558 drivers/scsi/megaraid.c 		mc.opcode = OP_DCMD_READ_CONFIG;
mc               2560 drivers/scsi/megaraid.c 		if( mega_internal_command(adapter, &mc, NULL) ) {
mc               2567 drivers/scsi/megaraid.c 		mc.cmd = NEW_READ_CONFIG_8LD;
mc               2569 drivers/scsi/megaraid.c 		if( mega_internal_command(adapter, &mc, NULL) ) {
mc               2570 drivers/scsi/megaraid.c 			mc.cmd = READ_CONFIG_8LD;
mc               2571 drivers/scsi/megaraid.c 			if( mega_internal_command(adapter, &mc, NULL) ) {
mc               2988 drivers/scsi/megaraid.c 	megacmd_t	mc;
mc               3128 drivers/scsi/megaraid.c 				memset(&mc, 0, sizeof(megacmd_t));
mc               3130 drivers/scsi/megaraid.c 				mc.status = rval;
mc               3132 drivers/scsi/megaraid.c 				rval = mega_n_to_m((void __user *)arg, &mc);
mc               3231 drivers/scsi/megaraid.c 			memset(&mc, 0, sizeof(megacmd_t));
mc               3233 drivers/scsi/megaraid.c 			mc.cmd = MEGA_MBOXCMD_PASSTHRU;
mc               3234 drivers/scsi/megaraid.c 			mc.xferaddr = (u32)pthru_dma_hndl;
mc               3239 drivers/scsi/megaraid.c 			mega_internal_command(adapter, &mc, pthru);
mc               3241 drivers/scsi/megaraid.c 			rval = mega_n_to_m((void __user *)arg, &mc);
mc               3316 drivers/scsi/megaraid.c 			memcpy(&mc, MBOX(uioc), sizeof(megacmd_t));
mc               3318 drivers/scsi/megaraid.c 			mc.xferaddr = (u32)data_dma_hndl;
mc               3323 drivers/scsi/megaraid.c 			mega_internal_command(adapter, &mc, NULL);
mc               3325 drivers/scsi/megaraid.c 			rval = mega_n_to_m((void __user *)arg, &mc);
mc               3517 drivers/scsi/megaraid.c mega_n_to_m(void __user *arg, megacmd_t *mc)
mc               3535 drivers/scsi/megaraid.c 		if( put_user(mc->status, (u8 __user *)&MBOX_P(uiocp)->status) )
mc               3538 drivers/scsi/megaraid.c 		if( mc->cmd == MEGA_MBOXCMD_PASSTHRU ) {
mc               3545 drivers/scsi/megaraid.c 			if( put_user(mc->status, (u8 __user *)&upthru->scsistatus))
mc               3552 drivers/scsi/megaraid.c 		if( put_user(mc->status, (u8 __user *)&uioc_mimd->mbox[17]) )
mc               3555 drivers/scsi/megaraid.c 		if( mc->cmd == MEGA_MBOXCMD_PASSTHRU ) {
mc               3562 drivers/scsi/megaraid.c 			if( put_user(mc->status, (u8 __user *)&upthru->scsistatus) )
mc               3845 drivers/scsi/megaraid.c 	megacmd_t	mc;
mc               3848 drivers/scsi/megaraid.c 	memset( &mc, 0, sizeof(megacmd_t));
mc               3850 drivers/scsi/megaraid.c 	mc.cmd = FC_DEL_LOGDRV;
mc               3851 drivers/scsi/megaraid.c 	mc.opcode = OP_DEL_LOGDRV;
mc               3852 drivers/scsi/megaraid.c 	mc.subopcode = logdrv;
mc               3854 drivers/scsi/megaraid.c 	rval = mega_internal_command(adapter, &mc, NULL);
mc               3972 drivers/scsi/megaraid.c 	megacmd_t	mc;
mc               3974 drivers/scsi/megaraid.c 	memset(&mc, 0, sizeof(megacmd_t));
mc               3977 drivers/scsi/megaraid.c 		mc.cmd = FC_NEW_CONFIG;
mc               3978 drivers/scsi/megaraid.c 		mc.opcode = NC_SUBOP_ENQUIRY3;
mc               3979 drivers/scsi/megaraid.c 		mc.subopcode = ENQ3_GET_SOLICITED_FULL;
mc               3982 drivers/scsi/megaraid.c 		mc.cmd = MEGA_MBOXCMD_ADPEXTINQ;
mc               3985 drivers/scsi/megaraid.c 	mc.xferaddr = (u32)dma_handle;
mc               3987 drivers/scsi/megaraid.c 	if ( mega_internal_command(adapter, &mc, NULL) != 0 ) {
mc               4009 drivers/scsi/megaraid.c 	megacmd_t	mc;
mc               4050 drivers/scsi/megaraid.c 	memset(&mc, 0, sizeof(megacmd_t));
mc               4052 drivers/scsi/megaraid.c 	mc.cmd = MEGA_MBOXCMD_PASSTHRU;
mc               4053 drivers/scsi/megaraid.c 	mc.xferaddr = (u32)pthru_dma_handle;
mc               4055 drivers/scsi/megaraid.c 	rval = mega_internal_command(adapter, &mc, pthru);
mc               4079 drivers/scsi/megaraid.c mega_internal_command(adapter_t *adapter, megacmd_t *mc, mega_passthru *pthru)
mc               4098 drivers/scsi/megaraid.c 	memcpy(scb->raw_mbox, mc, sizeof(megacmd_t));
mc               4103 drivers/scsi/megaraid.c 	if (mc->cmd == MEGA_MBOXCMD_PASSTHRU)
mc               4119 drivers/scsi/megaraid.c 	mc->status = rval = adapter->int_status;
mc               4127 drivers/scsi/megaraid.c 			mc->cmd, mc->opcode, mc->subopcode, rval);
mc                600 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc                601 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc                691 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc                692 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc                825 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t	mc;
mc                826 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t	*mcp = &mc;
mc                881 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t	mc;
mc                882 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t	*mcp = &mc;
mc                934 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t	mc;
mc                935 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t	*mcp = &mc;
mc                990 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t	mc;
mc                991 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t	*mcp = &mc;
mc               1042 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t	mc;
mc               1043 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t	*mcp = &mc;
mc               1178 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               1179 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               1226 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               1227 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               1286 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               1287 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               1346 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               1347 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               1405 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t	mc;
mc               1406 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t	*mcp = &mc;
mc               1467 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t	mc;
mc               1468 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t	*mcp = &mc;
mc               1523 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t  mc;
mc               1524 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t  *mcp = &mc;
mc               1571 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t  mc;
mc               1572 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t  *mcp = &mc;
mc               1637 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               1638 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               1733 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               1734 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               1788 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               1789 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               1878 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               1879 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               2068 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               2069 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               2134 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               2135 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               2198 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               2199 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               2250 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               2251 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               2321 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               2322 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               2499 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               2500 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               2580 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               2581 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               2714 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               2715 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               2767 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               2768 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               2812 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               2813 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               2873 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               2874 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               2934 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               2935 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               3002 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               3003 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               3061 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               3062 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               3068 drivers/scsi/qla2xxx/qla_mbx.c 	memset(&mc, 0, sizeof(mc));
mc               3069 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[0] = MBC_GET_LINK_PRIV_STATS;
mc               3070 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[2] = MSW(stats_dma);
mc               3071 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[3] = LSW(stats_dma);
mc               3072 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[6] = MSW(MSD(stats_dma));
mc               3073 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[7] = LSW(MSD(stats_dma));
mc               3074 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[8] = sizeof(struct link_statistics) / 4;
mc               3075 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[9] = cpu_to_le16(vha->vp_idx);
mc               3076 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[10] = cpu_to_le16(options);
mc               3078 drivers/scsi/qla2xxx/qla_mbx.c 	rval = qla24xx_send_mb_cmd(vha, &mc);
mc               3313 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               3314 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               3344 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               3345 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               3383 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               3384 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               3422 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               3423 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               3458 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               3459 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               3504 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               3505 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               3538 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               3539 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               3572 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               3573 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               3613 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               3614 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               3649 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               3650 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               3701 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               3702 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               3750 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               3751 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               3793 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               3794 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               4158 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               4159 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               4188 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               4189 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               4339 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               4340 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               4412 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               4413 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               4483 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               4484 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               4512 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               4513 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               4547 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               4548 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               4582 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               4583 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               4620 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               4621 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               4656 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               4657 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               4685 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               4686 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               4732 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               4733 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               4792 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               4793 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               4833 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               4834 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               4867 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               4868 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               4918 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               4919 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               4965 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               4966 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5006 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5007 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5044 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5045 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5078 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5079 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5140 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5141 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5202 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5203 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5229 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5230 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5327 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5328 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5386 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5387 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5422 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5423 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5457 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5458 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5488 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5489 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5580 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5581 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5615 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5616 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5649 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5650 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5693 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5694 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5742 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5743 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5798 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5799 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5842 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5843 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5886 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5887 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5924 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5925 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5961 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5962 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               5997 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               5998 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               6048 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               6049 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               6081 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               6082 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               6135 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               6136 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               6181 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               6182 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               6313 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               6325 drivers/scsi/qla2xxx/qla_mbx.c 	memset(&mc, 0, sizeof(mc));
mc               6326 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[0] = MBC_GET_PORT_DATABASE;
mc               6327 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[1] = cpu_to_le16(fcport->loop_id);
mc               6328 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[2] = MSW(pd_dma);
mc               6329 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[3] = LSW(pd_dma);
mc               6330 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[6] = MSW(MSD(pd_dma));
mc               6331 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[7] = LSW(MSD(pd_dma));
mc               6332 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[9] = cpu_to_le16(vha->vp_idx);
mc               6333 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[10] = cpu_to_le16((uint16_t)opt);
mc               6335 drivers/scsi/qla2xxx/qla_mbx.c 	rval = qla24xx_send_mb_cmd(vha, &mc);
mc               6432 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               6437 drivers/scsi/qla2xxx/qla_mbx.c 	memset(&mc, 0, sizeof(mc));
mc               6438 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[0] = MBC_GET_ID_LIST;
mc               6439 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[2] = MSW(id_list_dma);
mc               6440 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[3] = LSW(id_list_dma);
mc               6441 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[6] = MSW(MSD(id_list_dma));
mc               6442 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[7] = LSW(MSD(id_list_dma));
mc               6443 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[8] = 0;
mc               6444 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[9] = cpu_to_le16(vha->vp_idx);
mc               6446 drivers/scsi/qla2xxx/qla_mbx.c 	rval = qla24xx_send_mb_cmd(vha, &mc);
mc               6451 drivers/scsi/qla2xxx/qla_mbx.c 		*entries = mc.mb[1];
mc               6462 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t	mc;
mc               6463 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t	*mcp = &mc;
mc               6488 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t	mc;
mc               6489 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t	*mcp = &mc;
mc               6504 drivers/scsi/qla2xxx/qla_mbx.c 		*value = mc.mb[2];
mc               6568 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               6573 drivers/scsi/qla2xxx/qla_mbx.c 	memset(&mc, 0, sizeof(mc));
mc               6574 drivers/scsi/qla2xxx/qla_mbx.c 	mc.mb[0] = MBC_GET_RESOURCE_COUNTS;
mc               6576 drivers/scsi/qla2xxx/qla_mbx.c 	rval = qla24xx_send_mb_cmd(vha, &mc);
mc               6582 drivers/scsi/qla2xxx/qla_mbx.c 			memcpy(out_mb, mc.mb, out_mb_sz);
mc               6584 drivers/scsi/qla2xxx/qla_mbx.c 			memcpy(out_mb, mc.mb, SIZEOF_IOCB_MB_REG);
mc               6598 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               6599 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               6631 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               6632 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc               6663 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t mc;
mc               6664 drivers/scsi/qla2xxx/qla_mbx.c 	mbx_cmd_t *mcp = &mc;
mc                303 drivers/scsi/qla2xxx/qla_mr.c 	struct mbx_cmd_32 mc;
mc                304 drivers/scsi/qla2xxx/qla_mr.c 	struct mbx_cmd_32 *mcp = &mc;
mc                349 drivers/scsi/qla2xxx/qla_mr.c 	struct mbx_cmd_32 mc;
mc                350 drivers/scsi/qla2xxx/qla_mr.c 	struct mbx_cmd_32 *mcp = &mc;
mc                396 drivers/scsi/qla2xxx/qla_mr.c 	struct mbx_cmd_32 mc;
mc                397 drivers/scsi/qla2xxx/qla_mr.c 	struct mbx_cmd_32 *mcp = &mc;
mc                433 drivers/scsi/qla2xxx/qla_mr.c 	struct mbx_cmd_32 mc;
mc                434 drivers/scsi/qla2xxx/qla_mr.c 	struct mbx_cmd_32 *mcp = &mc;
mc                122 drivers/slimbus/messaging.c 		 txn->mc >= SLIM_MSG_MC_BEGIN_RECONFIGURATION &&
mc                123 drivers/slimbus/messaging.c 		 txn->mc <= SLIM_MSG_MC_RECONFIGURE_NOW))
mc                135 drivers/slimbus/messaging.c 	need_tid = slim_tid_txn(txn->mt, txn->mc);
mc                163 drivers/slimbus/messaging.c 			txn->mt, txn->mc, txn->la, ret);
mc                179 drivers/slimbus/messaging.c 			       struct slim_val_inf *msg, u8 mc)
mc                184 drivers/slimbus/messaging.c 	switch (mc) {
mc                206 drivers/slimbus/messaging.c 			msg->start_offset, mc);
mc                233 drivers/slimbus/messaging.c 		  u8 mc)
mc                235 drivers/slimbus/messaging.c 	DEFINE_SLIM_LDEST_TXN(txn_stack, mc, 6, sbdev->laddr, msg);
mc                244 drivers/slimbus/messaging.c 	ret = slim_val_inf_sanity(ctrl, msg, mc);
mc                251 drivers/slimbus/messaging.c 		msg->start_offset, msg->num_bytes, mc, sl);
mc                255 drivers/slimbus/messaging.c 	switch (mc) {
mc                265 drivers/slimbus/messaging.c 	if (slim_tid_txn(txn->mt, txn->mc))
mc                 64 drivers/slimbus/qcom-ctrl.c #define SLIM_MSG_ASM_FIRST_WORD(l, mt, mc, dt, ad) \
mc                 65 drivers/slimbus/qcom-ctrl.c 		((l) | ((mt) << 5) | ((mc) << 8) | ((dt) << 15) | ((ad) << 16))
mc                212 drivers/slimbus/qcom-ctrl.c 	u8 mc, mt, len;
mc                217 drivers/slimbus/qcom-ctrl.c 	mc = SLIM_HEADER_GET_MC(pkt[0]>>8);
mc                223 drivers/slimbus/qcom-ctrl.c 	if (mt == SLIM_MSG_MT_CORE && mc == SLIM_MSG_MC_REPORT_PRESENT) {
mc                239 drivers/slimbus/qcom-ctrl.c 	switch (mc) {
mc                251 drivers/slimbus/qcom-ctrl.c 			mc, mt);
mc                357 drivers/slimbus/qcom-ctrl.c 						txn->mc, 0, la);
mc                361 drivers/slimbus/qcom-ctrl.c 						txn->mc, 1, la);
mc                365 drivers/slimbus/qcom-ctrl.c 	if (slim_tid_txn(txn->mt, txn->mc))
mc                368 drivers/slimbus/qcom-ctrl.c 	if (slim_ec_txn(txn->mt, txn->mc)) {
mc                380 drivers/slimbus/qcom-ctrl.c 		dev_err(ctrl->dev, "TX timed out:MC:0x%x,mt:0x%x", txn->mc,
mc                442 drivers/slimbus/qcom-ctrl.c 	u8 mc, mt;
mc                449 drivers/slimbus/qcom-ctrl.c 		mc = SLIM_HEADER_GET_MC(buf[1]);
mc                451 drivers/slimbus/qcom-ctrl.c 			mc == SLIM_MSG_MC_REPORT_PRESENT) {
mc                467 drivers/slimbus/qcom-ctrl.c 				mc, mt);
mc                 91 drivers/slimbus/qcom-ngd-ctrl.c #define SLIM_MSG_ASM_FIRST_WORD(l, mt, mc, dt, ad) \
mc                 92 drivers/slimbus/qcom-ngd-ctrl.c 		((l) | ((mt) << 5) | ((mc) << 8) | ((dt) << 15) | ((ad) << 16))
mc                594 drivers/slimbus/qcom-ngd-ctrl.c 	u8 mc, mt, len;
mc                598 drivers/slimbus/qcom-ngd-ctrl.c 	mc = SLIM_HEADER_GET_MC(buf[1]);
mc                600 drivers/slimbus/qcom-ngd-ctrl.c 	if (mc == SLIM_USR_MC_MASTER_CAPABILITY &&
mc                604 drivers/slimbus/qcom-ngd-ctrl.c 	if (mc == SLIM_MSG_MC_REPLY_INFORMATION ||
mc                605 drivers/slimbus/qcom-ngd-ctrl.c 	    mc == SLIM_MSG_MC_REPLY_VALUE || (mc == SLIM_USR_MC_ADDR_REPLY &&
mc                607 drivers/slimbus/qcom-ngd-ctrl.c 		(mc == SLIM_USR_MC_GENERIC_ACK &&
mc                781 drivers/slimbus/qcom-ngd-ctrl.c 		(txn->mc >= SLIM_MSG_MC_BEGIN_RECONFIGURATION &&
mc                782 drivers/slimbus/qcom-ngd-ctrl.c 		 txn->mc <= SLIM_MSG_MC_RECONFIGURE_NOW))
mc                801 drivers/slimbus/qcom-ngd-ctrl.c 		(txn->mc == SLIM_MSG_MC_CONNECT_SOURCE ||
mc                802 drivers/slimbus/qcom-ngd-ctrl.c 		txn->mc == SLIM_MSG_MC_CONNECT_SINK ||
mc                803 drivers/slimbus/qcom-ngd-ctrl.c 		txn->mc == SLIM_MSG_MC_DISCONNECT_PORT)) {
mc                805 drivers/slimbus/qcom-ngd-ctrl.c 		switch (txn->mc) {
mc                807 drivers/slimbus/qcom-ngd-ctrl.c 			txn->mc = SLIM_USR_MC_CONNECT_SRC;
mc                810 drivers/slimbus/qcom-ngd-ctrl.c 			txn->mc = SLIM_USR_MC_CONNECT_SINK;
mc                813 drivers/slimbus/qcom-ngd-ctrl.c 			txn->mc = SLIM_USR_MC_DISCONNECT_PORT;
mc                824 drivers/slimbus/qcom-ngd-ctrl.c 		if (txn->mc != SLIM_USR_MC_DISCONNECT_PORT)
mc                847 drivers/slimbus/qcom-ngd-ctrl.c 		*pbuf = SLIM_MSG_ASM_FIRST_WORD(txn->rl, txn->mt, txn->mc, 0,
mc                851 drivers/slimbus/qcom-ngd-ctrl.c 		*pbuf = SLIM_MSG_ASM_FIRST_WORD(txn->rl, txn->mt, txn->mc, 1,
mc                856 drivers/slimbus/qcom-ngd-ctrl.c 	if (slim_tid_txn(txn->mt, txn->mc))
mc                859 drivers/slimbus/qcom-ngd-ctrl.c 	if (slim_ec_txn(txn->mt, txn->mc)) {
mc                873 drivers/slimbus/qcom-ngd-ctrl.c 		dev_err(sctrl->dev, "TX timed out:MC:0x%x,mt:0x%x", txn->mc,
mc                882 drivers/slimbus/qcom-ngd-ctrl.c 				txn->mc, txn->mt);
mc                906 drivers/slimbus/qcom-ngd-ctrl.c 		dev_err(ctrl->dev, "TX timed out:MC:0x%x,mt:0x%x", txn->mc,
mc                968 drivers/slimbus/qcom-ngd-ctrl.c 	txn.mc = SLIM_USR_MC_DEF_ACT_CHAN;
mc                973 drivers/slimbus/qcom-ngd-ctrl.c 		dev_err(&sdev->dev, "TX timed out:MC:0x%x,mt:0x%x", txn.mc,
mc                978 drivers/slimbus/qcom-ngd-ctrl.c 	txn.mc = SLIM_USR_MC_RECONFIG_NOW;
mc                993 drivers/slimbus/qcom-ngd-ctrl.c 		dev_err(&sdev->dev, "TX timed out:MC:0x%x,mt:0x%x", txn.mc,
mc               1015 drivers/slimbus/qcom-ngd-ctrl.c 	txn.mc = SLIM_USR_MC_ADDR_QUERY;
mc               1161 drivers/slimbus/qcom-ngd-ctrl.c 	txn.mc = SLIM_USR_MC_REPORT_SATELLITE;
mc                 96 drivers/slimbus/sched.c 	txn.mc = SLIM_MSG_MC_NEXT_PAUSE_CLOCK;
mc                104 drivers/slimbus/sched.c 	txn.mc = SLIM_MSG_MC_RECONFIGURE_NOW;
mc                130 drivers/slimbus/slimbus.h 	u8			mc;
mc                140 drivers/slimbus/slimbus.h #define DEFINE_SLIM_LDEST_TXN(name, mc, rl, la, msg) \
mc                141 drivers/slimbus/slimbus.h 	struct slim_msg_txn name = { rl, 0, mc, SLIM_MSG_DEST_LOGICALADDR, 0,\
mc                144 drivers/slimbus/slimbus.h #define DEFINE_SLIM_BCAST_TXN(name, mc, rl, la, msg) \
mc                145 drivers/slimbus/slimbus.h 	struct slim_msg_txn name = { rl, 0, mc, SLIM_MSG_DEST_BROADCAST, 0,\
mc                148 drivers/slimbus/slimbus.h #define DEFINE_SLIM_EDEST_TXN(name, mc, rl, la, msg) \
mc                149 drivers/slimbus/slimbus.h 	struct slim_msg_txn name = { rl, 0, mc, SLIM_MSG_DEST_ENUMADDR, 0,\
mc                436 drivers/slimbus/slimbus.h static inline bool slim_tid_txn(u8 mt, u8 mc)
mc                439 drivers/slimbus/slimbus.h 		(mc == SLIM_MSG_MC_REQUEST_INFORMATION ||
mc                440 drivers/slimbus/slimbus.h 		 mc == SLIM_MSG_MC_REQUEST_CLEAR_INFORMATION ||
mc                441 drivers/slimbus/slimbus.h 		 mc == SLIM_MSG_MC_REQUEST_VALUE ||
mc                442 drivers/slimbus/slimbus.h 		 mc == SLIM_MSG_MC_REQUEST_CHANGE_VALUE));
mc                445 drivers/slimbus/slimbus.h static inline bool slim_ec_txn(u8 mt, u8 mc)
mc                448 drivers/slimbus/slimbus.h 		((mc >= SLIM_MSG_MC_REQUEST_INFORMATION &&
mc                449 drivers/slimbus/slimbus.h 		  mc <= SLIM_MSG_MC_REPORT_INFORMATION) ||
mc                450 drivers/slimbus/slimbus.h 		 (mc >= SLIM_MSG_MC_REQUEST_VALUE &&
mc                451 drivers/slimbus/slimbus.h 		  mc <= SLIM_MSG_MC_CHANGE_VALUE)));
mc                129 drivers/slimbus/stream.c 	u8 mc = SLIM_MSG_MC_CONNECT_SOURCE;
mc                130 drivers/slimbus/stream.c 	DEFINE_SLIM_LDEST_TXN(txn, mc, 6, stream->dev->laddr, &msg);
mc                133 drivers/slimbus/stream.c 		txn.mc = SLIM_MSG_MC_CONNECT_SINK;
mc                149 drivers/slimbus/stream.c 	u8 mc = SLIM_MSG_MC_DISCONNECT_PORT;
mc                150 drivers/slimbus/stream.c 	DEFINE_SLIM_LDEST_TXN(txn, mc, 5, stream->dev->laddr, &msg);
mc                165 drivers/slimbus/stream.c 	u8 mc = SLIM_MSG_MC_NEXT_DEACTIVATE_CHANNEL;
mc                166 drivers/slimbus/stream.c 	DEFINE_SLIM_LDEST_TXN(txn, mc, 5, stream->dev->laddr, &msg);
mc                174 drivers/slimbus/stream.c 	txn.mc = SLIM_MSG_MC_NEXT_REMOVE_CHANNEL;
mc                269 drivers/slimbus/stream.c 	u8 mc = SLIM_MSG_MC_NEXT_DEFINE_CONTENT;
mc                270 drivers/slimbus/stream.c 	DEFINE_SLIM_LDEST_TXN(txn, mc, 8, stream->dev->laddr, &msg);
mc                304 drivers/slimbus/stream.c 	u8 mc = SLIM_MSG_MC_NEXT_DEFINE_CHANNEL;
mc                305 drivers/slimbus/stream.c 	DEFINE_SLIM_LDEST_TXN(txn, mc, 8, stream->dev->laddr, &msg);
mc                328 drivers/slimbus/stream.c 	u8 mc = SLIM_MSG_MC_NEXT_ACTIVATE_CHANNEL;
mc                329 drivers/slimbus/stream.c 	DEFINE_SLIM_LDEST_TXN(txn, mc, 5, stream->dev->laddr, &msg);
mc                386 drivers/slimbus/stream.c 	txn.mc = SLIM_MSG_MC_RECONFIGURE_NOW;
mc                420 drivers/slimbus/stream.c 	txn.mc = SLIM_MSG_MC_RECONFIGURE_NOW;
mc                165 drivers/soc/fsl/dpio/qbman-portal.c 	p->mc.valid_bit = QB_VALID_BIT;
mc                331 drivers/soc/fsl/dpio/qbman-portal.c 		*v = cmd_verb | p->mc.valid_bit;
mc                333 drivers/soc/fsl/dpio/qbman-portal.c 		*v = cmd_verb | p->mc.valid_bit;
mc                348 drivers/soc/fsl/dpio/qbman-portal.c 		ret = qbman_get_cmd(p, QBMAN_CENA_SWP_RR(p->mc.valid_bit));
mc                355 drivers/soc/fsl/dpio/qbman-portal.c 		p->mc.valid_bit ^= QB_VALID_BIT;
mc                111 drivers/soc/fsl/dpio/qbman-portal.h 	} mc;
mc                187 drivers/soc/fsl/qbman/bman.c 	struct bm_mc mc;
mc                430 drivers/soc/fsl/qbman/bman.c 	struct bm_mc *mc = &portal->mc;
mc                432 drivers/soc/fsl/qbman/bman.c 	mc->cr = portal->addr.ce + BM_CL_CR;
mc                433 drivers/soc/fsl/qbman/bman.c 	mc->rr = portal->addr.ce + BM_CL_RR0;
mc                434 drivers/soc/fsl/qbman/bman.c 	mc->rridx = (mc->cr->_ncw_verb & BM_MCC_VERB_VBIT) ?
mc                436 drivers/soc/fsl/qbman/bman.c 	mc->vbit = mc->rridx ? BM_MCC_VERB_VBIT : 0;
mc                438 drivers/soc/fsl/qbman/bman.c 	mc->state = mc_idle;
mc                446 drivers/soc/fsl/qbman/bman.c 	struct bm_mc *mc = &portal->mc;
mc                448 drivers/soc/fsl/qbman/bman.c 	DPAA_ASSERT(mc->state == mc_idle);
mc                449 drivers/soc/fsl/qbman/bman.c 	if (mc->state != mc_idle)
mc                456 drivers/soc/fsl/qbman/bman.c 	struct bm_mc *mc = &portal->mc;
mc                458 drivers/soc/fsl/qbman/bman.c 	DPAA_ASSERT(mc->state == mc_idle);
mc                460 drivers/soc/fsl/qbman/bman.c 	mc->state = mc_user;
mc                462 drivers/soc/fsl/qbman/bman.c 	dpaa_zero(mc->cr);
mc                463 drivers/soc/fsl/qbman/bman.c 	return mc->cr;
mc                468 drivers/soc/fsl/qbman/bman.c 	struct bm_mc *mc = &portal->mc;
mc                469 drivers/soc/fsl/qbman/bman.c 	union bm_mc_result *rr = mc->rr + mc->rridx;
mc                471 drivers/soc/fsl/qbman/bman.c 	DPAA_ASSERT(mc->state == mc_user);
mc                473 drivers/soc/fsl/qbman/bman.c 	mc->cr->_ncw_verb = myverb | mc->vbit;
mc                474 drivers/soc/fsl/qbman/bman.c 	dpaa_flush(mc->cr);
mc                477 drivers/soc/fsl/qbman/bman.c 	mc->state = mc_hw;
mc                483 drivers/soc/fsl/qbman/bman.c 	struct bm_mc *mc = &portal->mc;
mc                484 drivers/soc/fsl/qbman/bman.c 	union bm_mc_result *rr = mc->rr + mc->rridx;
mc                486 drivers/soc/fsl/qbman/bman.c 	DPAA_ASSERT(mc->state == mc_hw);
mc                496 drivers/soc/fsl/qbman/bman.c 	mc->rridx ^= 1;
mc                497 drivers/soc/fsl/qbman/bman.c 	mc->vbit ^= BM_MCC_VERB_VBIT;
mc                499 drivers/soc/fsl/qbman/bman.c 	mc->state = mc_idle;
mc                359 drivers/soc/fsl/qbman/qman.c 	struct qm_mc mc;
mc                862 drivers/soc/fsl/qbman/qman.c 	struct qm_mc *mc = &portal->mc;
mc                864 drivers/soc/fsl/qbman/qman.c 	mc->cr = portal->addr.ce + QM_CL_CR;
mc                865 drivers/soc/fsl/qbman/qman.c 	mc->rr = portal->addr.ce + QM_CL_RR0;
mc                873 drivers/soc/fsl/qbman/qman.c 	rr0 = mc->rr->verb;
mc                874 drivers/soc/fsl/qbman/qman.c 	rr1 = (mc->rr+1)->verb;
mc                876 drivers/soc/fsl/qbman/qman.c 		mc->rridx = 1;
mc                878 drivers/soc/fsl/qbman/qman.c 		mc->rridx = 0;
mc                879 drivers/soc/fsl/qbman/qman.c 	mc->vbit = mc->rridx ? QM_MCC_VERB_VBIT : 0;
mc                881 drivers/soc/fsl/qbman/qman.c 	mc->state = qman_mc_idle;
mc                889 drivers/soc/fsl/qbman/qman.c 	struct qm_mc *mc = &portal->mc;
mc                891 drivers/soc/fsl/qbman/qman.c 	DPAA_ASSERT(mc->state == qman_mc_idle);
mc                892 drivers/soc/fsl/qbman/qman.c 	if (mc->state != qman_mc_idle)
mc                899 drivers/soc/fsl/qbman/qman.c 	struct qm_mc *mc = &portal->mc;
mc                901 drivers/soc/fsl/qbman/qman.c 	DPAA_ASSERT(mc->state == qman_mc_idle);
mc                903 drivers/soc/fsl/qbman/qman.c 	mc->state = qman_mc_user;
mc                905 drivers/soc/fsl/qbman/qman.c 	dpaa_zero(mc->cr);
mc                906 drivers/soc/fsl/qbman/qman.c 	return mc->cr;
mc                911 drivers/soc/fsl/qbman/qman.c 	struct qm_mc *mc = &portal->mc;
mc                912 drivers/soc/fsl/qbman/qman.c 	union qm_mc_result *rr = mc->rr + mc->rridx;
mc                914 drivers/soc/fsl/qbman/qman.c 	DPAA_ASSERT(mc->state == qman_mc_user);
mc                916 drivers/soc/fsl/qbman/qman.c 	mc->cr->_ncw_verb = myverb | mc->vbit;
mc                917 drivers/soc/fsl/qbman/qman.c 	dpaa_flush(mc->cr);
mc                920 drivers/soc/fsl/qbman/qman.c 	mc->state = qman_mc_hw;
mc                926 drivers/soc/fsl/qbman/qman.c 	struct qm_mc *mc = &portal->mc;
mc                927 drivers/soc/fsl/qbman/qman.c 	union qm_mc_result *rr = mc->rr + mc->rridx;
mc                929 drivers/soc/fsl/qbman/qman.c 	DPAA_ASSERT(mc->state == qman_mc_hw);
mc                939 drivers/soc/fsl/qbman/qman.c 	mc->rridx ^= 1;
mc                940 drivers/soc/fsl/qbman/qman.c 	mc->vbit ^= QM_MCC_VERB_VBIT;
mc                942 drivers/soc/fsl/qbman/qman.c 	mc->state = qman_mc_idle;
mc                 37 drivers/soc/qcom/llcc-sdm845.c #define SCT_ENTRY(uid, sid, mc, p, fs, bway, rway, cmod, ptw, dca, rp, a) \
mc                 41 drivers/soc/qcom/llcc-sdm845.c 		.max_cap = mc,			\
mc                844 drivers/ssb/main.c 	u32 n1, n2, clock, m1, m2, m3, mc;
mc                887 drivers/ssb/main.c 	mc = ((m & SSB_CHIPCO_CLK_MC) >> SSB_CHIPCO_CLK_MC_SHIFT);
mc                902 drivers/ssb/main.c 		switch (mc) {
mc                923 drivers/ssb/main.c 		if (!(mc & SSB_CHIPCO_CLK_T2MC_M1BYP))
mc                925 drivers/ssb/main.c 		if (!(mc & SSB_CHIPCO_CLK_T2MC_M2BYP))
mc                927 drivers/ssb/main.c 		if (!(mc & SSB_CHIPCO_CLK_T2MC_M3BYP))
mc                915 drivers/staging/fsl-dpaa2/ethsw/ethsw.c 	struct netdev_hw_addr_list *list = (is_uc) ? &netdev->uc : &netdev->mc;
mc                263 drivers/staging/isdn/gigaset/interface.c 	unsigned mc;
mc                275 drivers/staging/isdn/gigaset/interface.c 		mc = (cs->control_state | set) & ~clear & (TIOCM_RTS | TIOCM_DTR);
mc                276 drivers/staging/isdn/gigaset/interface.c 		retval = cs->ops->set_modem_ctrl(cs, cs->control_state, mc);
mc                277 drivers/staging/isdn/gigaset/interface.c 		cs->control_state = mc;
mc                 37 drivers/staging/most/core.c } mc;
mc                461 drivers/staging/most/core.c 	list_for_each_entry(comp, &mc.comp_list, list) {
mc                507 drivers/staging/most/core.c 	bus_for_each_dev(&mc.bus, NULL, &d, print_links);
mc                516 drivers/staging/most/core.c 	list_for_each_entry(comp, &mc.comp_list, list) {
mc                576 drivers/staging/most/core.c 	dev = bus_find_device_by_name(&mc.bus, NULL, mdev);
mc               1310 drivers/staging/most/core.c 	list_add_tail(&comp->list, &mc.comp_list);
mc               1345 drivers/staging/most/core.c 	bus_for_each_dev(&mc.bus, NULL, comp, disconnect_channels);
mc               1397 drivers/staging/most/core.c 	iface->dev.bus = &mc.bus;
mc               1398 drivers/staging/most/core.c 	iface->dev.parent = &mc.dev;
mc               1561 drivers/staging/most/core.c 	INIT_LIST_HEAD(&mc.comp_list);
mc               1564 drivers/staging/most/core.c 	mc.bus.name = "most",
mc               1565 drivers/staging/most/core.c 	mc.bus.match = most_match,
mc               1566 drivers/staging/most/core.c 	mc.drv.name = "most_core",
mc               1567 drivers/staging/most/core.c 	mc.drv.bus = &mc.bus,
mc               1568 drivers/staging/most/core.c 	mc.drv.groups = mc_attr_groups;
mc               1570 drivers/staging/most/core.c 	err = bus_register(&mc.bus);
mc               1575 drivers/staging/most/core.c 	err = driver_register(&mc.drv);
mc               1580 drivers/staging/most/core.c 	mc.dev.init_name = "most_bus";
mc               1581 drivers/staging/most/core.c 	mc.dev.release = release_most_sub;
mc               1582 drivers/staging/most/core.c 	if (device_register(&mc.dev)) {
mc               1590 drivers/staging/most/core.c 	driver_unregister(&mc.drv);
mc               1592 drivers/staging/most/core.c 	bus_unregister(&mc.bus);
mc               1599 drivers/staging/most/core.c 	device_unregister(&mc.dev);
mc               1600 drivers/staging/most/core.c 	driver_unregister(&mc.drv);
mc               1601 drivers/staging/most/core.c 	bus_unregister(&mc.bus);
mc                 46 drivers/staging/rtl8723bs/include/rtl8723b_recv.h 	u32 mc:1;
mc                676 drivers/staging/wilc1000/wilc_netdev.c 	    dev->mc.count > WILC_MULTICAST_TABLE_SIZE) {
mc                681 drivers/staging/wilc1000/wilc_netdev.c 	if (dev->mc.count == 0) {
mc                686 drivers/staging/wilc1000/wilc_netdev.c 	mc_list = kmalloc_array(dev->mc.count, ETH_ALEN, GFP_ATOMIC);
mc                699 drivers/staging/wilc1000/wilc_netdev.c 	if (wilc_setup_multicast_filter(vif, 1, dev->mc.count, mc_list))
mc                121 drivers/thunderbolt/icm.c 	u8 mc;
mc                165 drivers/usb/dwc2/core.h 	unsigned char           mc;
mc                571 drivers/usb/dwc2/gadget.c 	max_transfer = hs_ep->ep.maxpacket * hs_ep->mc;
mc               1391 drivers/usb/dwc2/gadget.c 	    req->length > (hs_ep->mc * hs_ep->ep.maxpacket)) {
mc               2555 drivers/usb/dwc2/gadget.c 					unsigned int mc, unsigned int dir_in)
mc               2572 drivers/usb/dwc2/gadget.c 		hs_ep->mc = 1;
mc               2576 drivers/usb/dwc2/gadget.c 		hs_ep->mc = mc;
mc               2577 drivers/usb/dwc2/gadget.c 		if (mc > 3)
mc               3931 drivers/usb/dwc2/gadget.c 	u32 mc;
mc               3958 drivers/usb/dwc2/gadget.c 	mc = usb_endpoint_maxp_mult(desc);
mc               3970 drivers/usb/dwc2/gadget.c 	    !dir_in && mc > 1) {
mc               4012 drivers/usb/dwc2/gadget.c 	dwc2_hsotg_set_ep_maxpacket(hsotg, hs_ep->index, mps, mc, dir_in);
mc               4069 drivers/usb/dwc2/gadget.c 		size = hs_ep->ep.maxpacket * hs_ep->mc;
mc               1199 drivers/video/fbdev/aty/radeon_pm.c 	u32 mc;
mc               1205 drivers/video/fbdev/aty/radeon_pm.c 	mc = INREG(MC_CNTL);
mc               1207 drivers/video/fbdev/aty/radeon_pm.c 	switch (mc & 0x3) {
mc               1209 drivers/video/fbdev/aty/radeon_pm.c 		if (mc & 0x4)
mc               1220 drivers/video/fbdev/aty/radeon_pm.c 	switch (mc & 0x3) {
mc               1222 drivers/video/fbdev/aty/radeon_pm.c 		if (!(mc & 0x4))
mc                135 drivers/video/fbdev/metronomefb.c 	u8 mc;
mc                211 drivers/video/fbdev/metronomefb.c 	wfm_hdr->mc += 1;
mc                165 fs/erofs/zdata.c 				     struct address_space *mc,
mc                186 fs/erofs/zdata.c 		page = find_get_page(mc, index);
mc                994 fs/erofs/zdata.c 					       struct address_space *mc,
mc                998 fs/erofs/zdata.c 	const bool nocache = __builtin_constant_p(mc) ? !mc : false;
mc               1050 fs/erofs/zdata.c 	if (mapping && mapping != mc)
mc               1060 fs/erofs/zdata.c 	if (page->mapping == mc) {
mc               1104 fs/erofs/zdata.c 	if (add_to_page_cache_lru(page, mc, index + nr, gfp)) {
mc                448 include/linux/firewire.h 		fw_iso_mc_callback_t mc;
mc                 30 include/linux/intel-pti.h void pti_writedata(struct pti_masterchannel *mc, u8 *buf, int count);
mc                 33 include/linux/intel-pti.h void pti_release_masterchannel(struct pti_masterchannel *mc);
mc                234 include/linux/netdevice.h #define netdev_mc_count(dev) netdev_hw_addr_list_count(&(dev)->mc)
mc                235 include/linux/netdevice.h #define netdev_mc_empty(dev) netdev_hw_addr_list_empty(&(dev)->mc)
mc                237 include/linux/netdevice.h 	netdev_hw_addr_list_for_each(ha, &(dev)->mc)
mc               1900 include/linux/netdevice.h 	struct netdev_hw_addr_list	mc;
mc               4209 include/linux/netdevice.h 	return __hw_addr_sync_dev(&dev->mc, dev, sync, unsync);
mc               4223 include/linux/netdevice.h 	__hw_addr_unsync_dev(&dev->mc, dev, unsync);
mc                313 include/linux/nvme.h 	__u8			mc;
mc                195 include/linux/slimbus.h 		  u8 mc);
mc                 83 include/soc/tegra/mc.h 				    struct tegra_mc *mc);
mc                 88 include/soc/tegra/mc.h 		 struct tegra_mc *mc)
mc                 99 include/soc/tegra/mc.h struct gart_device *tegra_gart_probe(struct device *dev, struct tegra_mc *mc);
mc                104 include/soc/tegra/mc.h tegra_gart_probe(struct device *dev, struct tegra_mc *mc)
mc                130 include/soc/tegra/mc.h 	int (*hotreset_assert)(struct tegra_mc *mc,
mc                132 include/soc/tegra/mc.h 	int (*hotreset_deassert)(struct tegra_mc *mc,
mc                134 include/soc/tegra/mc.h 	int (*block_dma)(struct tegra_mc *mc,
mc                136 include/soc/tegra/mc.h 	bool (*dma_idling)(struct tegra_mc *mc,
mc                138 include/soc/tegra/mc.h 	int (*unblock_dma)(struct tegra_mc *mc,
mc                140 include/soc/tegra/mc.h 	int (*reset_status)(struct tegra_mc *mc,
mc                184 include/soc/tegra/mc.h void tegra_mc_write_emem_configuration(struct tegra_mc *mc, unsigned long rate);
mc                185 include/soc/tegra/mc.h unsigned int tegra_mc_get_emem_device_count(struct tegra_mc *mc);
mc               1224 include/sound/soc.h static inline bool snd_soc_volsw_is_stereo(struct soc_mixer_control *mc)
mc               1226 include/sound/soc.h 	if (mc->reg == mc->rreg && mc->shift == mc->rshift)
mc                 37 include/trace/events/xen.h 	    TP_PROTO(struct multicall_entry *mc, unsigned nargs),
mc                 38 include/trace/events/xen.h 	    TP_ARGS(mc, nargs),
mc                 44 include/trace/events/xen.h 	    TP_fast_assign(__entry->op = mc->op;
mc                 46 include/trace/events/xen.h 			   memcpy(__entry->args, mc->args, sizeof(ulong) * nargs);
mc                199 mm/memcontrol.c } mc = {
mc                200 mm/memcontrol.c 	.lock = __SPIN_LOCK_UNLOCKED(mc.lock),
mc                201 mm/memcontrol.c 	.waitq = __WAIT_QUEUE_HEAD_INITIALIZER(mc.waitq),
mc               1364 mm/memcontrol.c 	spin_lock(&mc.lock);
mc               1365 mm/memcontrol.c 	from = mc.from;
mc               1366 mm/memcontrol.c 	to = mc.to;
mc               1373 mm/memcontrol.c 	spin_unlock(&mc.lock);
mc               1379 mm/memcontrol.c 	if (mc.moving_task && current != mc.moving_task) {
mc               1382 mm/memcontrol.c 			prepare_to_wait(&mc.waitq, &wait, TASK_INTERRUPTIBLE);
mc               1384 mm/memcontrol.c 			if (mc.moving_task)
mc               1386 mm/memcontrol.c 			finish_wait(&mc.waitq, &wait);
mc               5343 mm/memcontrol.c 	ret = try_charge(mc.to, GFP_KERNEL & ~__GFP_DIRECT_RECLAIM, count);
mc               5345 mm/memcontrol.c 		mc.precharge += count;
mc               5351 mm/memcontrol.c 		ret = try_charge(mc.to, GFP_KERNEL | __GFP_NORETRY, 1);
mc               5354 mm/memcontrol.c 		mc.precharge++;
mc               5380 mm/memcontrol.c 		if (!(mc.flags & MOVE_ANON))
mc               5383 mm/memcontrol.c 		if (!(mc.flags & MOVE_FILE))
mc               5399 mm/memcontrol.c 	if (!(mc.flags & MOVE_ANON) || non_swap_entry(ent))
mc               5445 mm/memcontrol.c 	if (!(mc.flags & MOVE_FILE))
mc               5616 mm/memcontrol.c 		if (page->mem_cgroup == mc.from) {
mc               5631 mm/memcontrol.c 	    mem_cgroup_id(mc.from) == lookup_swap_cgroup_id(ent)) {
mc               5658 mm/memcontrol.c 	if (!(mc.flags & MOVE_ANON))
mc               5660 mm/memcontrol.c 	if (page->mem_cgroup == mc.from) {
mc               5693 mm/memcontrol.c 			mc.precharge += HPAGE_PMD_NR;
mc               5703 mm/memcontrol.c 			mc.precharge++;	/* increment precharge temporarily */
mc               5722 mm/memcontrol.c 	precharge = mc.precharge;
mc               5723 mm/memcontrol.c 	mc.precharge = 0;
mc               5732 mm/memcontrol.c 	VM_BUG_ON(mc.moving_task);
mc               5733 mm/memcontrol.c 	mc.moving_task = current;
mc               5740 mm/memcontrol.c 	struct mem_cgroup *from = mc.from;
mc               5741 mm/memcontrol.c 	struct mem_cgroup *to = mc.to;
mc               5744 mm/memcontrol.c 	if (mc.precharge) {
mc               5745 mm/memcontrol.c 		cancel_charge(mc.to, mc.precharge);
mc               5746 mm/memcontrol.c 		mc.precharge = 0;
mc               5752 mm/memcontrol.c 	if (mc.moved_charge) {
mc               5753 mm/memcontrol.c 		cancel_charge(mc.from, mc.moved_charge);
mc               5754 mm/memcontrol.c 		mc.moved_charge = 0;
mc               5757 mm/memcontrol.c 	if (mc.moved_swap) {
mc               5759 mm/memcontrol.c 		if (!mem_cgroup_is_root(mc.from))
mc               5760 mm/memcontrol.c 			page_counter_uncharge(&mc.from->memsw, mc.moved_swap);
mc               5762 mm/memcontrol.c 		mem_cgroup_id_put_many(mc.from, mc.moved_swap);
mc               5768 mm/memcontrol.c 		if (!mem_cgroup_is_root(mc.to))
mc               5769 mm/memcontrol.c 			page_counter_uncharge(&mc.to->memory, mc.moved_swap);
mc               5771 mm/memcontrol.c 		mem_cgroup_id_get_many(mc.to, mc.moved_swap);
mc               5772 mm/memcontrol.c 		css_put_many(&mc.to->css, mc.moved_swap);
mc               5774 mm/memcontrol.c 		mc.moved_swap = 0;
mc               5778 mm/memcontrol.c 	wake_up_all(&mc.waitq);
mc               5783 mm/memcontrol.c 	struct mm_struct *mm = mc.mm;
mc               5789 mm/memcontrol.c 	mc.moving_task = NULL;
mc               5791 mm/memcontrol.c 	spin_lock(&mc.lock);
mc               5792 mm/memcontrol.c 	mc.from = NULL;
mc               5793 mm/memcontrol.c 	mc.to = NULL;
mc               5794 mm/memcontrol.c 	mc.mm = NULL;
mc               5795 mm/memcontrol.c 	spin_unlock(&mc.lock);
mc               5847 mm/memcontrol.c 		VM_BUG_ON(mc.from);
mc               5848 mm/memcontrol.c 		VM_BUG_ON(mc.to);
mc               5849 mm/memcontrol.c 		VM_BUG_ON(mc.precharge);
mc               5850 mm/memcontrol.c 		VM_BUG_ON(mc.moved_charge);
mc               5851 mm/memcontrol.c 		VM_BUG_ON(mc.moved_swap);
mc               5853 mm/memcontrol.c 		spin_lock(&mc.lock);
mc               5854 mm/memcontrol.c 		mc.mm = mm;
mc               5855 mm/memcontrol.c 		mc.from = from;
mc               5856 mm/memcontrol.c 		mc.to = memcg;
mc               5857 mm/memcontrol.c 		mc.flags = move_flags;
mc               5858 mm/memcontrol.c 		spin_unlock(&mc.lock);
mc               5872 mm/memcontrol.c 	if (mc.to)
mc               5890 mm/memcontrol.c 		if (mc.precharge < HPAGE_PMD_NR) {
mc               5899 mm/memcontrol.c 							     mc.from, mc.to)) {
mc               5900 mm/memcontrol.c 					mc.precharge -= HPAGE_PMD_NR;
mc               5901 mm/memcontrol.c 					mc.moved_charge += HPAGE_PMD_NR;
mc               5909 mm/memcontrol.c 						     mc.from, mc.to)) {
mc               5910 mm/memcontrol.c 				mc.precharge -= HPAGE_PMD_NR;
mc               5911 mm/memcontrol.c 				mc.moved_charge += HPAGE_PMD_NR;
mc               5928 mm/memcontrol.c 		if (!mc.precharge)
mc               5948 mm/memcontrol.c 						mc.from, mc.to)) {
mc               5949 mm/memcontrol.c 				mc.precharge--;
mc               5951 mm/memcontrol.c 				mc.moved_charge++;
mc               5960 mm/memcontrol.c 			if (!mem_cgroup_move_swap_account(ent, mc.from, mc.to)) {
mc               5961 mm/memcontrol.c 				mc.precharge--;
mc               5963 mm/memcontrol.c 				mc.moved_swap++;
mc               6000 mm/memcontrol.c 	atomic_inc(&mc.from->moving_account);
mc               6003 mm/memcontrol.c 	if (unlikely(!down_read_trylock(&mc.mm->mmap_sem))) {
mc               6019 mm/memcontrol.c 	walk_page_range(mc.mm, 0, mc.mm->highest_vm_end, &charge_walk_ops,
mc               6022 mm/memcontrol.c 	up_read(&mc.mm->mmap_sem);
mc               6023 mm/memcontrol.c 	atomic_dec(&mc.from->moving_account);
mc               6028 mm/memcontrol.c 	if (mc.to) {
mc                743 net/core/dev_addr_lists.c 	list_for_each_entry(ha, &dev->mc.list, list) {
mc                750 net/core/dev_addr_lists.c 	err = __hw_addr_create_ex(&dev->mc, addr, dev->addr_len,
mc                766 net/core/dev_addr_lists.c 	err = __hw_addr_add_ex(&dev->mc, addr, dev->addr_len,
mc                806 net/core/dev_addr_lists.c 	err = __hw_addr_del_ex(&dev->mc, addr, dev->addr_len,
mc                862 net/core/dev_addr_lists.c 	err = __hw_addr_sync(&to->mc, &from->mc, to->addr_len);
mc                892 net/core/dev_addr_lists.c 	err = __hw_addr_sync_multiple(&to->mc, &from->mc, to->addr_len);
mc                916 net/core/dev_addr_lists.c 	__hw_addr_unsync(&to->mc, &from->mc, to->addr_len);
mc                932 net/core/dev_addr_lists.c 	__hw_addr_flush(&dev->mc);
mc                945 net/core/dev_addr_lists.c 	__hw_addr_init(&dev->mc);
mc               2303 net/core/pktgen.c 		__u32 mc;
mc               2307 net/core/pktgen.c 			mc = prandom_u32() % pkt_dev->src_mac_count;
mc               2309 net/core/pktgen.c 			mc = pkt_dev->cur_src_mac_offset++;
mc               2315 net/core/pktgen.c 		tmp = pkt_dev->src_mac[5] + (mc & 0xFF);
mc               2317 net/core/pktgen.c 		tmp = (pkt_dev->src_mac[4] + ((mc >> 8) & 0xFF) + (tmp >> 8));
mc               2319 net/core/pktgen.c 		tmp = (pkt_dev->src_mac[3] + ((mc >> 16) & 0xFF) + (tmp >> 8));
mc               2321 net/core/pktgen.c 		tmp = (pkt_dev->src_mac[2] + ((mc >> 24) & 0xFF) + (tmp >> 8));
mc               2329 net/core/pktgen.c 		__u32 mc;
mc               2333 net/core/pktgen.c 			mc = prandom_u32() % pkt_dev->dst_mac_count;
mc               2336 net/core/pktgen.c 			mc = pkt_dev->cur_dst_mac_offset++;
mc               2343 net/core/pktgen.c 		tmp = pkt_dev->dst_mac[5] + (mc & 0xFF);
mc               2345 net/core/pktgen.c 		tmp = (pkt_dev->dst_mac[4] + ((mc >> 8) & 0xFF) + (tmp >> 8));
mc               2347 net/core/pktgen.c 		tmp = (pkt_dev->dst_mac[3] + ((mc >> 16) & 0xFF) + (tmp >> 8));
mc               2349 net/core/pktgen.c 		tmp = (pkt_dev->dst_mac[2] + ((mc >> 24) & 0xFF) + (tmp >> 8));
mc               3927 net/core/rtnetlink.c 	err = nlmsg_populate_fdb(skb, cb, dev, idx, &dev->mc);
mc                624 net/ipv6/mcast.c 	struct ipv6_mc_socklist *mc;
mc                629 net/ipv6/mcast.c 	for_each_pmc_rcu(np, mc) {
mc                630 net/ipv6/mcast.c 		if (ipv6_addr_equal(&mc->addr, mc_addr))
mc                633 net/ipv6/mcast.c 	if (!mc) {
mc                637 net/ipv6/mcast.c 	read_lock(&mc->sflock);
mc                638 net/ipv6/mcast.c 	psl = mc->sflist;
mc                640 net/ipv6/mcast.c 		rv = mc->sfmode == MCAST_EXCLUDE;
mc                648 net/ipv6/mcast.c 		if (mc->sfmode == MCAST_INCLUDE && i >= psl->sl_count)
mc                650 net/ipv6/mcast.c 		if (mc->sfmode == MCAST_EXCLUDE && i < psl->sl_count)
mc                653 net/ipv6/mcast.c 	read_unlock(&mc->sflock);
mc                659 net/ipv6/mcast.c static void igmp6_group_added(struct ifmcaddr6 *mc)
mc                661 net/ipv6/mcast.c 	struct net_device *dev = mc->idev->dev;
mc                664 net/ipv6/mcast.c 	if (IPV6_ADDR_MC_SCOPE(&mc->mca_addr) <
mc                668 net/ipv6/mcast.c 	spin_lock_bh(&mc->mca_lock);
mc                669 net/ipv6/mcast.c 	if (!(mc->mca_flags&MAF_LOADED)) {
mc                670 net/ipv6/mcast.c 		mc->mca_flags |= MAF_LOADED;
mc                671 net/ipv6/mcast.c 		if (ndisc_mc_map(&mc->mca_addr, buf, dev, 0) == 0)
mc                674 net/ipv6/mcast.c 	spin_unlock_bh(&mc->mca_lock);
mc                676 net/ipv6/mcast.c 	if (!(dev->flags & IFF_UP) || (mc->mca_flags & MAF_NOREPORT))
mc                679 net/ipv6/mcast.c 	if (mld_in_v1_mode(mc->idev)) {
mc                680 net/ipv6/mcast.c 		igmp6_join_group(mc);
mc                689 net/ipv6/mcast.c 	if (mc->mca_sfmode == MCAST_EXCLUDE)
mc                690 net/ipv6/mcast.c 		mc->mca_crcount = mc->idev->mc_qrv;
mc                692 net/ipv6/mcast.c 	mld_ifc_event(mc->idev);
mc                695 net/ipv6/mcast.c static void igmp6_group_dropped(struct ifmcaddr6 *mc)
mc                697 net/ipv6/mcast.c 	struct net_device *dev = mc->idev->dev;
mc                700 net/ipv6/mcast.c 	if (IPV6_ADDR_MC_SCOPE(&mc->mca_addr) <
mc                704 net/ipv6/mcast.c 	spin_lock_bh(&mc->mca_lock);
mc                705 net/ipv6/mcast.c 	if (mc->mca_flags&MAF_LOADED) {
mc                706 net/ipv6/mcast.c 		mc->mca_flags &= ~MAF_LOADED;
mc                707 net/ipv6/mcast.c 		if (ndisc_mc_map(&mc->mca_addr, buf, dev, 0) == 0)
mc                711 net/ipv6/mcast.c 	spin_unlock_bh(&mc->mca_lock);
mc                712 net/ipv6/mcast.c 	if (mc->mca_flags & MAF_NOREPORT)
mc                715 net/ipv6/mcast.c 	if (!mc->idev->dead)
mc                716 net/ipv6/mcast.c 		igmp6_leave_group(mc);
mc                718 net/ipv6/mcast.c 	spin_lock_bh(&mc->mca_lock);
mc                719 net/ipv6/mcast.c 	if (del_timer(&mc->mca_timer))
mc                720 net/ipv6/mcast.c 		refcount_dec(&mc->mca_refcnt);
mc                721 net/ipv6/mcast.c 	spin_unlock_bh(&mc->mca_lock);
mc                837 net/ipv6/mcast.c static void mca_get(struct ifmcaddr6 *mc)
mc                839 net/ipv6/mcast.c 	refcount_inc(&mc->mca_refcnt);
mc                842 net/ipv6/mcast.c static void ma_put(struct ifmcaddr6 *mc)
mc                844 net/ipv6/mcast.c 	if (refcount_dec_and_test(&mc->mca_refcnt)) {
mc                845 net/ipv6/mcast.c 		in6_dev_put(mc->idev);
mc                846 net/ipv6/mcast.c 		kfree(mc);
mc                854 net/ipv6/mcast.c 	struct ifmcaddr6 *mc;
mc                856 net/ipv6/mcast.c 	mc = kzalloc(sizeof(*mc), GFP_ATOMIC);
mc                857 net/ipv6/mcast.c 	if (!mc)
mc                860 net/ipv6/mcast.c 	timer_setup(&mc->mca_timer, igmp6_timer_handler, 0);
mc                862 net/ipv6/mcast.c 	mc->mca_addr = *addr;
mc                863 net/ipv6/mcast.c 	mc->idev = idev; /* reference taken by caller */
mc                864 net/ipv6/mcast.c 	mc->mca_users = 1;
mc                866 net/ipv6/mcast.c 	mc->mca_cstamp = mc->mca_tstamp = jiffies;
mc                867 net/ipv6/mcast.c 	refcount_set(&mc->mca_refcnt, 1);
mc                868 net/ipv6/mcast.c 	spin_lock_init(&mc->mca_lock);
mc                870 net/ipv6/mcast.c 	mc->mca_sfmode = mode;
mc                871 net/ipv6/mcast.c 	mc->mca_sfcount[mode] = 1;
mc                873 net/ipv6/mcast.c 	if (ipv6_addr_is_ll_all_nodes(&mc->mca_addr) ||
mc                874 net/ipv6/mcast.c 	    IPV6_ADDR_MC_SCOPE(&mc->mca_addr) < IPV6_ADDR_SCOPE_LINKLOCAL)
mc                875 net/ipv6/mcast.c 		mc->mca_flags |= MAF_NOREPORT;
mc                877 net/ipv6/mcast.c 	return mc;
mc                886 net/ipv6/mcast.c 	struct ifmcaddr6 *mc;
mc                904 net/ipv6/mcast.c 	for (mc = idev->mc_list; mc; mc = mc->next) {
mc                905 net/ipv6/mcast.c 		if (ipv6_addr_equal(&mc->mca_addr, addr)) {
mc                906 net/ipv6/mcast.c 			mc->mca_users++;
mc                908 net/ipv6/mcast.c 			ip6_mc_add_src(idev, &mc->mca_addr, mode, 0, NULL, 0);
mc                914 net/ipv6/mcast.c 	mc = mca_alloc(idev, addr, mode);
mc                915 net/ipv6/mcast.c 	if (!mc) {
mc                921 net/ipv6/mcast.c 	mc->next = idev->mc_list;
mc                922 net/ipv6/mcast.c 	idev->mc_list = mc;
mc                927 net/ipv6/mcast.c 	mca_get(mc);
mc                930 net/ipv6/mcast.c 	mld_del_delrec(idev, mc);
mc                931 net/ipv6/mcast.c 	igmp6_group_added(mc);
mc                932 net/ipv6/mcast.c 	ma_put(mc);
mc                997 net/ipv6/mcast.c 	struct ifmcaddr6 *mc;
mc               1004 net/ipv6/mcast.c 		for (mc = idev->mc_list; mc; mc = mc->next) {
mc               1005 net/ipv6/mcast.c 			if (ipv6_addr_equal(&mc->mca_addr, group))
mc               1008 net/ipv6/mcast.c 		if (mc) {
mc               1012 net/ipv6/mcast.c 				spin_lock_bh(&mc->mca_lock);
mc               1013 net/ipv6/mcast.c 				for (psf = mc->mca_sources; psf; psf = psf->sf_next) {
mc               1020 net/ipv6/mcast.c 						mc->mca_sfcount[MCAST_EXCLUDE];
mc               1022 net/ipv6/mcast.c 					rv = mc->mca_sfcount[MCAST_EXCLUDE] != 0;
mc               1023 net/ipv6/mcast.c 				spin_unlock_bh(&mc->mca_lock);
mc                873 net/mac80211/iface.c 		__hw_addr_unsync(&local->mc_list, &sdata->dev->mc,
mc               1100 net/mac80211/iface.c 	__hw_addr_sync(&local->mc_list, &dev->mc, dev->addr_len);
mc                 38 net/mac80211/main.c 	u64 mc;
mc                 70 net/mac80211/main.c 	mc = drv_prepare_multicast(local, &local->mc_list);
mc                 76 net/mac80211/main.c 	drv_configure_filter(local, changed_flags, &new_flags, mc);
mc                311 sound/soc/atmel/atmel-pdmic.c 	struct soc_mixer_control *mc =
mc                314 sound/soc/atmel/atmel-pdmic.c 	int max = mc->max;
mc                270 sound/soc/codecs/88pm860x-codec.c 	struct soc_mixer_control *mc =
mc                273 sound/soc/codecs/88pm860x-codec.c 	unsigned int reg = mc->reg;
mc                274 sound/soc/codecs/88pm860x-codec.c 	unsigned int reg2 = mc->rreg;
mc                294 sound/soc/codecs/88pm860x-codec.c 	struct soc_mixer_control *mc =
mc                297 sound/soc/codecs/88pm860x-codec.c 	unsigned int reg = mc->reg;
mc                298 sound/soc/codecs/88pm860x-codec.c 	unsigned int reg2 = mc->rreg;
mc                327 sound/soc/codecs/88pm860x-codec.c 	struct soc_mixer_control *mc =
mc                330 sound/soc/codecs/88pm860x-codec.c 	unsigned int reg = mc->reg;
mc                331 sound/soc/codecs/88pm860x-codec.c 	unsigned int reg2 = mc->rreg;
mc                332 sound/soc/codecs/88pm860x-codec.c 	unsigned int shift = mc->shift;
mc                333 sound/soc/codecs/88pm860x-codec.c 	int max = mc->max, val, val2;
mc                347 sound/soc/codecs/88pm860x-codec.c 	struct soc_mixer_control *mc =
mc                350 sound/soc/codecs/88pm860x-codec.c 	unsigned int reg = mc->reg;
mc                351 sound/soc/codecs/88pm860x-codec.c 	unsigned int reg2 = mc->rreg;
mc                352 sound/soc/codecs/88pm860x-codec.c 	unsigned int shift = mc->shift;
mc                353 sound/soc/codecs/88pm860x-codec.c 	int max = mc->max;
mc                447 sound/soc/codecs/da7218.c 	struct soc_mixer_control *mc =
mc                453 sound/soc/codecs/da7218.c 	unsigned int lshift = mc->shift;
mc                454 sound/soc/codecs/da7218.c 	unsigned int rshift = mc->rshift;
mc                455 sound/soc/codecs/da7218.c 	unsigned int mask = (mc->max << lshift) | (mc->max << rshift);
mc               2149 sound/soc/codecs/madera.c 	struct soc_mixer_control *mc =
mc               2164 sound/soc/codecs/madera.c 	mask = (mc->reg - MADERA_ADC_DIGITAL_VOLUME_1L) / 4;
mc                353 sound/soc/codecs/max98090.c 	struct soc_mixer_control *mc =
mc                355 sound/soc/codecs/max98090.c 	unsigned int mask = (1 << fls(mc->max)) - 1;
mc                356 sound/soc/codecs/max98090.c 	unsigned int val = snd_soc_component_read32(component, mc->reg);
mc                359 sound/soc/codecs/max98090.c 	switch (mc->reg) {
mc                373 sound/soc/codecs/max98090.c 	val = (val >> mc->shift) & mask;
mc                393 sound/soc/codecs/max98090.c 	struct soc_mixer_control *mc =
mc                395 sound/soc/codecs/max98090.c 	unsigned int mask = (1 << fls(mc->max)) - 1;
mc                397 sound/soc/codecs/max98090.c 	unsigned int val = snd_soc_component_read32(component, mc->reg);
mc                400 sound/soc/codecs/max98090.c 	switch (mc->reg) {
mc                414 sound/soc/codecs/max98090.c 	val = (val >> mc->shift) & mask;
mc                426 sound/soc/codecs/max98090.c 	snd_soc_component_update_bits(component, mc->reg,
mc                427 sound/soc/codecs/max98090.c 		mask << mc->shift,
mc                428 sound/soc/codecs/max98090.c 		sel << mc->shift);
mc                422 sound/soc/codecs/mt6358.c 	struct soc_mixer_control *mc =
mc                431 sound/soc/codecs/mt6358.c 	switch (mc->reg) {
mc                164 sound/soc/codecs/tlv320aic3x.c 	struct soc_mixer_control *mc =
mc                166 sound/soc/codecs/tlv320aic3x.c 	unsigned int reg = mc->reg;
mc                167 sound/soc/codecs/tlv320aic3x.c 	unsigned int shift = mc->shift;
mc                168 sound/soc/codecs/tlv320aic3x.c 	int max = mc->max;
mc                170 sound/soc/codecs/tlv320aic3x.c 	unsigned int invert = mc->invert;
mc                829 sound/soc/codecs/twl4030.c 	struct soc_mixer_control *mc =
mc                832 sound/soc/codecs/twl4030.c 	unsigned int reg = mc->reg;
mc                833 sound/soc/codecs/twl4030.c 	unsigned int shift = mc->shift;
mc                834 sound/soc/codecs/twl4030.c 	unsigned int rshift = mc->rshift;
mc                835 sound/soc/codecs/twl4030.c 	int max = mc->max;
mc                858 sound/soc/codecs/twl4030.c 	struct soc_mixer_control *mc =
mc                861 sound/soc/codecs/twl4030.c 	unsigned int reg = mc->reg;
mc                862 sound/soc/codecs/twl4030.c 	unsigned int shift = mc->shift;
mc                863 sound/soc/codecs/twl4030.c 	unsigned int rshift = mc->rshift;
mc                864 sound/soc/codecs/twl4030.c 	int max = mc->max;
mc                887 sound/soc/codecs/twl4030.c 	struct soc_mixer_control *mc =
mc                890 sound/soc/codecs/twl4030.c 	unsigned int reg = mc->reg;
mc                891 sound/soc/codecs/twl4030.c 	unsigned int reg2 = mc->rreg;
mc                892 sound/soc/codecs/twl4030.c 	unsigned int shift = mc->shift;
mc                893 sound/soc/codecs/twl4030.c 	int max = mc->max;
mc                914 sound/soc/codecs/twl4030.c 	struct soc_mixer_control *mc =
mc                917 sound/soc/codecs/twl4030.c 	unsigned int reg = mc->reg;
mc                918 sound/soc/codecs/twl4030.c 	unsigned int reg2 = mc->rreg;
mc                919 sound/soc/codecs/twl4030.c 	unsigned int shift = mc->shift;
mc                920 sound/soc/codecs/twl4030.c 	int max = mc->max;
mc                408 sound/soc/codecs/wm5110.c 	struct soc_mixer_control *mc =
mc                411 sound/soc/codecs/wm5110.c 	unsigned int mask = (0x1 << mc->shift) | (0x1 << mc->rshift);
mc                412 sound/soc/codecs/wm5110.c 	unsigned int lnew = (!!ucontrol->value.integer.value[0]) << mc->shift;
mc                413 sound/soc/codecs/wm5110.c 	unsigned int rnew = (!!ucontrol->value.integer.value[1]) << mc->rshift;
mc                431 sound/soc/codecs/wm5110.c 	lold = dre & (1 << mc->shift);
mc                432 sound/soc/codecs/wm5110.c 	rold = dre & (1 << mc->rshift);
mc                434 sound/soc/codecs/wm5110.c 	lena = ena & (1 << mc->rshift);
mc                435 sound/soc/codecs/wm5110.c 	rena = ena & (1 << mc->shift);
mc                452 sound/soc/codecs/wm5110.c 		wm5110_clear_pga_volume(arizona, mc->shift);
mc                455 sound/soc/codecs/wm5110.c 		wm5110_clear_pga_volume(arizona, mc->rshift);
mc                302 sound/soc/codecs/wm8350.c 	struct soc_mixer_control *mc =
mc                305 sound/soc/codecs/wm8350.c 	unsigned int reg = mc->reg;
mc                346 sound/soc/codecs/wm8350.c 	struct soc_mixer_control *mc =
mc                348 sound/soc/codecs/wm8350.c 	unsigned int reg = mc->reg;
mc                 90 sound/soc/codecs/wm8400.c 	struct soc_mixer_control *mc =
mc                 92 sound/soc/codecs/wm8400.c 	int reg = mc->reg;
mc                323 sound/soc/codecs/wm8400.c 	struct soc_mixer_control *mc =
mc                325 sound/soc/codecs/wm8400.c 	u32 reg_shift = mc->shift;
mc                259 sound/soc/codecs/wm8580.c 	struct soc_mixer_control *mc =
mc                263 sound/soc/codecs/wm8580.c 	unsigned int reg = mc->reg;
mc                264 sound/soc/codecs/wm8580.c 	unsigned int reg2 = mc->rreg;
mc                132 sound/soc/codecs/wm8990.c 	struct soc_mixer_control *mc =
mc                134 sound/soc/codecs/wm8990.c 	int reg = mc->reg;
mc                297 sound/soc/codecs/wm8994.c 	struct soc_mixer_control *mc =
mc                303 sound/soc/codecs/wm8994.c 	if (mc->shift == WM8994_AIF1DAC1_DRC_ENA_SHIFT)
mc                309 sound/soc/codecs/wm8994.c 	ret = snd_soc_component_read32(component, mc->reg);
mc                222 sound/soc/codecs/wm9712.c 	struct soc_mixer_control *mc =
mc                228 sound/soc/codecs/wm9712.c 	mixer = mc->shift >> 8;
mc                229 sound/soc/codecs/wm9712.c 	shift = mc->shift & 0xff;
mc                265 sound/soc/codecs/wm9712.c 	struct soc_mixer_control *mc =
mc                269 sound/soc/codecs/wm9712.c 	mixer = mc->shift >> 8;
mc                270 sound/soc/codecs/wm9712.c 	shift = mc->shift & 0xff;
mc                231 sound/soc/codecs/wm9713.c 	struct soc_mixer_control *mc =
mc                237 sound/soc/codecs/wm9713.c 	mixer = mc->shift >> 8;
mc                238 sound/soc/codecs/wm9713.c 	shift = mc->shift & 0xff;
mc                274 sound/soc/codecs/wm9713.c 	struct soc_mixer_control *mc =
mc                278 sound/soc/codecs/wm9713.c 	mixer = mc->shift >> 8;
mc                279 sound/soc/codecs/wm9713.c 	shift = mc->shift & 0xff;
mc               3021 sound/soc/codecs/wm_adsp.c 	struct soc_mixer_control *mc =
mc               3023 sound/soc/codecs/wm_adsp.c 	struct wm_adsp *dsp = &dsps[mc->shift - 1];
mc               3037 sound/soc/codecs/wm_adsp.c 	struct soc_mixer_control *mc =
mc               3039 sound/soc/codecs/wm_adsp.c 	struct wm_adsp *dsp = &dsps[mc->shift - 1];
mc                 68 sound/soc/fsl/mx27vis-aic32x4.c 	struct soc_mixer_control *mc =
mc                 71 sound/soc/fsl/mx27vis-aic32x4.c 	unsigned int reg = mc->reg;
mc                 72 sound/soc/fsl/mx27vis-aic32x4.c 	int max = mc->max;
mc                 95 sound/soc/fsl/mx27vis-aic32x4.c 	struct soc_mixer_control *mc =
mc                 97 sound/soc/fsl/mx27vis-aic32x4.c 	unsigned int reg = mc->reg;
mc                368 sound/soc/intel/atom/sst-atom-controls.c 	struct sst_gain_mixer_control *mc = (void *)kcontrol->private_value;
mc                371 sound/soc/intel/atom/sst-atom-controls.c 	uinfo->count = mc->stereo ? 2 : 1;
mc                372 sound/soc/intel/atom/sst-atom-controls.c 	uinfo->value.integer.min = mc->min;
mc                373 sound/soc/intel/atom/sst-atom-controls.c 	uinfo->value.integer.max = mc->max;
mc                428 sound/soc/intel/atom/sst-atom-controls.c 	struct sst_gain_mixer_control *mc = (void *)kcontrol->private_value;
mc                429 sound/soc/intel/atom/sst-atom-controls.c 	struct sst_gain_value *gv = mc->gain_val;
mc                431 sound/soc/intel/atom/sst-atom-controls.c 	switch (mc->type) {
mc                447 sound/soc/intel/atom/sst-atom-controls.c 				mc->type);
mc                460 sound/soc/intel/atom/sst-atom-controls.c 	struct sst_gain_mixer_control *mc = (void *)kcontrol->private_value;
mc                461 sound/soc/intel/atom/sst-atom-controls.c 	struct sst_gain_value *gv = mc->gain_val;
mc                465 sound/soc/intel/atom/sst-atom-controls.c 	switch (mc->type) {
mc                470 sound/soc/intel/atom/sst-atom-controls.c 				mc->pname, gv->l_gain, gv->r_gain);
mc                475 sound/soc/intel/atom/sst-atom-controls.c 		dev_dbg(cmpnt->dev, "%s: Mute %d\n", mc->pname, gv->mute);
mc                481 sound/soc/intel/atom/sst-atom-controls.c 					mc->pname, gv->ramp_duration);
mc                487 sound/soc/intel/atom/sst-atom-controls.c 				mc->type);
mc                491 sound/soc/intel/atom/sst-atom-controls.c 	if (mc->w && mc->w->power)
mc                492 sound/soc/intel/atom/sst-atom-controls.c 		ret = sst_send_gain_cmd(drv, gv, mc->task_id,
mc                493 sound/soc/intel/atom/sst-atom-controls.c 			mc->pipe_id | mc->instance_id, mc->module_id, 0);
mc                585 sound/soc/intel/atom/sst-atom-controls.c 	struct sst_gain_mixer_control *mc;
mc                593 sound/soc/intel/atom/sst-atom-controls.c 		mc = (void *)kctl->private_value;
mc                594 sound/soc/intel/atom/sst-atom-controls.c 		gv = mc->gain_val;
mc                596 sound/soc/intel/atom/sst-atom-controls.c 		ret = sst_send_gain_cmd(drv, gv, mc->task_id,
mc                597 sound/soc/intel/atom/sst-atom-controls.c 			mc->pipe_id | mc->instance_id, mc->module_id, mute);
mc                612 sound/soc/intel/atom/sst-atom-controls.c 	struct soc_mixer_control *mc;
mc                623 sound/soc/intel/atom/sst-atom-controls.c 			mc = (struct soc_mixer_control *)(w->kcontrols[i])->private_value;
mc                624 sound/soc/intel/atom/sst-atom-controls.c 			val |= 1 << mc->shift;
mc               1382 sound/soc/intel/atom/sst-atom-controls.c 		struct sst_gain_mixer_control *mc = (void *)kctl->private_value;
mc               1384 sound/soc/intel/atom/sst-atom-controls.c 		mc->w = w;
mc               1436 sound/soc/intel/atom/sst-atom-controls.c 			struct sst_gain_mixer_control *mc =
mc               1439 sound/soc/intel/atom/sst-atom-controls.c 			mc->w = w;
mc                162 sound/soc/intel/atom/sst-mfld-platform.h 	struct soc_mixer_control mc;
mc                176 sound/soc/intel/haswell/sst-haswell-pcm.c 	struct soc_mixer_control *mc =
mc                185 sound/soc/intel/haswell/sst-haswell-pcm.c 	dai = mod_map[mc->reg].dai_id;
mc                186 sound/soc/intel/haswell/sst-haswell-pcm.c 	stream = mod_map[mc->reg].stream;
mc                225 sound/soc/intel/haswell/sst-haswell-pcm.c 	struct soc_mixer_control *mc =
mc                234 sound/soc/intel/haswell/sst-haswell-pcm.c 	dai = mod_map[mc->reg].dai_id;
mc                235 sound/soc/intel/haswell/sst-haswell-pcm.c 	stream = mod_map[mc->reg].stream;
mc                413 sound/soc/qcom/qdsp6/q6routing.c 	struct soc_mixer_control *mc =
mc                415 sound/soc/qcom/qdsp6/q6routing.c 	int session_id = mc->shift;
mc                420 sound/soc/qcom/qdsp6/q6routing.c 	if (session->port_id == mc->reg)
mc                435 sound/soc/qcom/qdsp6/q6routing.c 	struct soc_mixer_control *mc =
mc                438 sound/soc/qcom/qdsp6/q6routing.c 	int be_id = mc->reg;
mc                439 sound/soc/qcom/qdsp6/q6routing.c 	int session_id = mc->shift;
mc                354 sound/soc/soc-dapm.c 	struct soc_mixer_control *mc;
mc                369 sound/soc/soc-dapm.c 		mc = (struct soc_mixer_control *)kcontrol->private_value;
mc                371 sound/soc/soc-dapm.c 		if (mc->autodisable && snd_soc_volsw_is_stereo(mc))
mc                376 sound/soc/soc-dapm.c 		if (mc->autodisable) {
mc                387 sound/soc/soc-dapm.c 			template.reg = mc->reg;
mc                388 sound/soc/soc-dapm.c 			template.mask = (1 << fls(mc->max)) - 1;
mc                389 sound/soc/soc-dapm.c 			template.shift = mc->shift;
mc                390 sound/soc/soc-dapm.c 			if (mc->invert)
mc                391 sound/soc/soc-dapm.c 				template.off_val = mc->max;
mc                785 sound/soc/soc-dapm.c 	struct soc_mixer_control *mc = (struct soc_mixer_control *)
mc                787 sound/soc/soc-dapm.c 	unsigned int reg = mc->reg;
mc                788 sound/soc/soc-dapm.c 	unsigned int shift = mc->shift;
mc                789 sound/soc/soc-dapm.c 	unsigned int max = mc->max;
mc                791 sound/soc/soc-dapm.c 	unsigned int invert = mc->invert;
mc                808 sound/soc/soc-dapm.c 		if (snd_soc_volsw_is_stereo(mc) && nth_path > 0) {
mc                809 sound/soc/soc-dapm.c 			if (reg != mc->rreg)
mc                810 sound/soc/soc-dapm.c 				soc_dapm_read(p->sink->dapm, mc->rreg, &val);
mc                811 sound/soc/soc-dapm.c 			val = (val >> mc->rshift) & mask;
mc               3282 sound/soc/soc-dapm.c 	struct soc_mixer_control *mc =
mc               3284 sound/soc/soc-dapm.c 	int reg = mc->reg;
mc               3285 sound/soc/soc-dapm.c 	unsigned int shift = mc->shift;
mc               3286 sound/soc/soc-dapm.c 	int max = mc->max;
mc               3289 sound/soc/soc-dapm.c 	unsigned int invert = mc->invert;
mc               3298 sound/soc/soc-dapm.c 		if (ret == 0 && reg != mc->rreg)
mc               3299 sound/soc/soc-dapm.c 			ret = soc_dapm_read(dapm, mc->rreg, &reg_val);
mc               3301 sound/soc/soc-dapm.c 		if (snd_soc_volsw_is_stereo(mc))
mc               3302 sound/soc/soc-dapm.c 			rval = (reg_val >> mc->rshift) & mask;
mc               3307 sound/soc/soc-dapm.c 		if (snd_soc_volsw_is_stereo(mc))
mc               3320 sound/soc/soc-dapm.c 	if (snd_soc_volsw_is_stereo(mc)) {
mc               3345 sound/soc/soc-dapm.c 	struct soc_mixer_control *mc =
mc               3347 sound/soc/soc-dapm.c 	int reg = mc->reg;
mc               3348 sound/soc/soc-dapm.c 	unsigned int shift = mc->shift;
mc               3349 sound/soc/soc-dapm.c 	int max = mc->max;
mc               3352 sound/soc/soc-dapm.c 	unsigned int invert = mc->invert;
mc               3364 sound/soc/soc-dapm.c 	if (snd_soc_volsw_is_stereo(mc)) {
mc               3382 sound/soc/soc-dapm.c 		rval = rval << mc->rshift;
mc               3386 sound/soc/soc-dapm.c 		if (snd_soc_volsw_is_stereo(mc))
mc               3387 sound/soc/soc-dapm.c 			reg_change |= soc_dapm_test_bits(dapm, mc->rreg,
mc               3388 sound/soc/soc-dapm.c 							 mask << mc->rshift,
mc               3394 sound/soc/soc-dapm.c 			if (snd_soc_volsw_is_stereo(mc)) {
mc               3396 sound/soc/soc-dapm.c 				update.reg2 = mc->rreg;
mc               3397 sound/soc/soc-dapm.c 				update.mask2 = mask << mc->rshift;
mc                184 sound/soc/soc-ops.c 	struct soc_mixer_control *mc =
mc                188 sound/soc/soc-ops.c 	if (!mc->platform_max)
mc                189 sound/soc/soc-ops.c 		mc->platform_max = mc->max;
mc                190 sound/soc/soc-ops.c 	platform_max = mc->platform_max;
mc                197 sound/soc/soc-ops.c 	uinfo->count = snd_soc_volsw_is_stereo(mc) ? 2 : 1;
mc                199 sound/soc/soc-ops.c 	uinfo->value.integer.max = platform_max - mc->min;
mc                219 sound/soc/soc-ops.c 	struct soc_mixer_control *mc =
mc                226 sound/soc/soc-ops.c 	uinfo->value.integer.max += mc->min;
mc                246 sound/soc/soc-ops.c 	struct soc_mixer_control *mc =
mc                248 sound/soc/soc-ops.c 	unsigned int reg = mc->reg;
mc                249 sound/soc/soc-ops.c 	unsigned int reg2 = mc->rreg;
mc                250 sound/soc/soc-ops.c 	unsigned int shift = mc->shift;
mc                251 sound/soc/soc-ops.c 	unsigned int rshift = mc->rshift;
mc                252 sound/soc/soc-ops.c 	int max = mc->max;
mc                253 sound/soc/soc-ops.c 	int min = mc->min;
mc                254 sound/soc/soc-ops.c 	int sign_bit = mc->sign_bit;
mc                256 sound/soc/soc-ops.c 	unsigned int invert = mc->invert;
mc                272 sound/soc/soc-ops.c 	if (snd_soc_volsw_is_stereo(mc)) {
mc                306 sound/soc/soc-ops.c 	struct soc_mixer_control *mc =
mc                308 sound/soc/soc-ops.c 	unsigned int reg = mc->reg;
mc                309 sound/soc/soc-ops.c 	unsigned int reg2 = mc->rreg;
mc                310 sound/soc/soc-ops.c 	unsigned int shift = mc->shift;
mc                311 sound/soc/soc-ops.c 	unsigned int rshift = mc->rshift;
mc                312 sound/soc/soc-ops.c 	int max = mc->max;
mc                313 sound/soc/soc-ops.c 	int min = mc->min;
mc                314 sound/soc/soc-ops.c 	unsigned int sign_bit = mc->sign_bit;
mc                316 sound/soc/soc-ops.c 	unsigned int invert = mc->invert;
mc                330 sound/soc/soc-ops.c 	if (snd_soc_volsw_is_stereo(mc)) {
mc                368 sound/soc/soc-ops.c 	struct soc_mixer_control *mc =
mc                370 sound/soc/soc-ops.c 	unsigned int reg = mc->reg;
mc                371 sound/soc/soc-ops.c 	unsigned int reg2 = mc->rreg;
mc                372 sound/soc/soc-ops.c 	unsigned int shift = mc->shift;
mc                373 sound/soc/soc-ops.c 	unsigned int rshift = mc->rshift;
mc                374 sound/soc/soc-ops.c 	int max = mc->max;
mc                375 sound/soc/soc-ops.c 	int min = mc->min;
mc                386 sound/soc/soc-ops.c 	if (snd_soc_volsw_is_stereo(mc)) {
mc                412 sound/soc/soc-ops.c 	struct soc_mixer_control *mc =
mc                415 sound/soc/soc-ops.c 	unsigned int reg = mc->reg;
mc                416 sound/soc/soc-ops.c 	unsigned int reg2 = mc->rreg;
mc                417 sound/soc/soc-ops.c 	unsigned int shift = mc->shift;
mc                418 sound/soc/soc-ops.c 	unsigned int rshift = mc->rshift;
mc                419 sound/soc/soc-ops.c 	int max = mc->max;
mc                420 sound/soc/soc-ops.c 	int min = mc->min;
mc                433 sound/soc/soc-ops.c 	if (snd_soc_volsw_is_stereo(mc)) {
mc                458 sound/soc/soc-ops.c 	struct soc_mixer_control *mc =
mc                461 sound/soc/soc-ops.c 	int min = mc->min;
mc                463 sound/soc/soc-ops.c 	if (!mc->platform_max)
mc                464 sound/soc/soc-ops.c 		mc->platform_max = mc->max;
mc                465 sound/soc/soc-ops.c 	platform_max = mc->platform_max;
mc                468 sound/soc/soc-ops.c 	uinfo->count = snd_soc_volsw_is_stereo(mc) ? 2 : 1;
mc                488 sound/soc/soc-ops.c 	struct soc_mixer_control *mc =
mc                491 sound/soc/soc-ops.c 	unsigned int reg = mc->reg;
mc                492 sound/soc/soc-ops.c 	unsigned int rreg = mc->rreg;
mc                493 sound/soc/soc-ops.c 	unsigned int shift = mc->shift;
mc                494 sound/soc/soc-ops.c 	int min = mc->min;
mc                495 sound/soc/soc-ops.c 	int max = mc->max;
mc                497 sound/soc/soc-ops.c 	unsigned int invert = mc->invert;
mc                512 sound/soc/soc-ops.c 	if (snd_soc_volsw_is_stereo(mc)) {
mc                541 sound/soc/soc-ops.c 	struct soc_mixer_control *mc =
mc                543 sound/soc/soc-ops.c 	unsigned int reg = mc->reg;
mc                544 sound/soc/soc-ops.c 	unsigned int rreg = mc->rreg;
mc                545 sound/soc/soc-ops.c 	unsigned int shift = mc->shift;
mc                546 sound/soc/soc-ops.c 	int min = mc->min;
mc                547 sound/soc/soc-ops.c 	int max = mc->max;
mc                549 sound/soc/soc-ops.c 	unsigned int invert = mc->invert;
mc                565 sound/soc/soc-ops.c 	if (snd_soc_volsw_is_stereo(mc)) {
mc                597 sound/soc/soc-ops.c 	struct soc_mixer_control *mc;
mc                612 sound/soc/soc-ops.c 		mc = (struct soc_mixer_control *)kctl->private_value;
mc                613 sound/soc/soc-ops.c 		if (max <= mc->max) {
mc                614 sound/soc/soc-ops.c 			mc->platform_max = max;
mc                802 sound/soc/soc-ops.c 	struct soc_mreg_control *mc =
mc                806 sound/soc/soc-ops.c 	uinfo->value.integer.min = mc->min;
mc                807 sound/soc/soc-ops.c 	uinfo->value.integer.max = mc->max;
mc                830 sound/soc/soc-ops.c 	struct soc_mreg_control *mc =
mc                832 sound/soc/soc-ops.c 	unsigned int regbase = mc->regbase;
mc                833 sound/soc/soc-ops.c 	unsigned int regcount = mc->regcount;
mc                836 sound/soc/soc-ops.c 	unsigned int invert = mc->invert;
mc                837 sound/soc/soc-ops.c 	unsigned long mask = (1UL<<mc->nbits)-1;
mc                838 sound/soc/soc-ops.c 	long min = mc->min;
mc                839 sound/soc/soc-ops.c 	long max = mc->max;
mc                879 sound/soc/soc-ops.c 	struct soc_mreg_control *mc =
mc                881 sound/soc/soc-ops.c 	unsigned int regbase = mc->regbase;
mc                882 sound/soc/soc-ops.c 	unsigned int regcount = mc->regcount;
mc                885 sound/soc/soc-ops.c 	unsigned int invert = mc->invert;
mc                886 sound/soc/soc-ops.c 	unsigned long mask = (1UL<<mc->nbits)-1;
mc                887 sound/soc/soc-ops.c 	long max = mc->max;
mc                921 sound/soc/soc-ops.c 	struct soc_mixer_control *mc =
mc                923 sound/soc/soc-ops.c 	unsigned int reg = mc->reg;
mc                924 sound/soc/soc-ops.c 	unsigned int shift = mc->shift;
mc                926 sound/soc/soc-ops.c 	unsigned int invert = mc->invert != 0;
mc                958 sound/soc/soc-ops.c 	struct soc_mixer_control *mc =
mc                960 sound/soc/soc-ops.c 	unsigned int reg = mc->reg;
mc                961 sound/soc/soc-ops.c 	unsigned int shift = mc->shift;
mc                963 sound/soc/soc-ops.c 	unsigned int invert = mc->invert != 0;
mc                830 sound/soc/soc-topology.c 	struct snd_soc_tplg_mixer_control *mc;
mc                845 sound/soc/soc-topology.c 		mc = (struct snd_soc_tplg_mixer_control *)tplg->pos;
mc                848 sound/soc/soc-topology.c 		if (strnlen(mc->hdr.name, SNDRV_CTL_ELEM_ID_NAME_MAXLEN) ==
mc                856 sound/soc/soc-topology.c 			      le32_to_cpu(mc->priv.size));
mc                860 sound/soc/soc-topology.c 			mc->hdr.name, mc->hdr.access);
mc                863 sound/soc/soc-topology.c 		kc.name = mc->hdr.name;
mc                866 sound/soc/soc-topology.c 		kc.access = le32_to_cpu(mc->hdr.access);
mc                869 sound/soc/soc-topology.c 		sm->reg = tplc_chan_get_reg(tplg, mc->channel,
mc                871 sound/soc/soc-topology.c 		sm->rreg = tplc_chan_get_reg(tplg, mc->channel,
mc                873 sound/soc/soc-topology.c 		sm->shift = tplc_chan_get_shift(tplg, mc->channel,
mc                875 sound/soc/soc-topology.c 		sm->rshift = tplc_chan_get_shift(tplg, mc->channel,
mc                878 sound/soc/soc-topology.c 		sm->max = le32_to_cpu(mc->max);
mc                879 sound/soc/soc-topology.c 		sm->min = le32_to_cpu(mc->min);
mc                880 sound/soc/soc-topology.c 		sm->invert = le32_to_cpu(mc->invert);
mc                881 sound/soc/soc-topology.c 		sm->platform_max = le32_to_cpu(mc->platform_max);
mc                888 sound/soc/soc-topology.c 		err = soc_tplg_kcontrol_bind_io(&mc->hdr, &kc, tplg);
mc                890 sound/soc/soc-topology.c 			soc_control_err(tplg, &mc->hdr, mc->hdr.name);
mc                896 sound/soc/soc-topology.c 		err = soc_tplg_create_tlv(tplg, &kc, &mc->hdr);
mc                899 sound/soc/soc-topology.c 				mc->hdr.name);
mc                906 sound/soc/soc-topology.c 			(struct snd_soc_tplg_ctl_hdr *) mc);
mc                909 sound/soc/soc-topology.c 				mc->hdr.name);
mc                920 sound/soc/soc-topology.c 				mc->hdr.name);
mc               1314 sound/soc/soc-topology.c 	struct snd_soc_tplg_mixer_control *mc;
mc               1322 sound/soc/soc-topology.c 		mc = (struct snd_soc_tplg_mixer_control *)tplg->pos;
mc               1325 sound/soc/soc-topology.c 		if (strnlen(mc->hdr.name, SNDRV_CTL_ELEM_ID_NAME_MAXLEN) ==
mc               1334 sound/soc/soc-topology.c 			      le32_to_cpu(mc->priv.size));
mc               1337 sound/soc/soc-topology.c 			mc->hdr.name, i);
mc               1340 sound/soc/soc-topology.c 		kc[i].name = kstrdup(mc->hdr.name, GFP_KERNEL);
mc               1344 sound/soc/soc-topology.c 		kc[i].access = le32_to_cpu(mc->hdr.access);
mc               1347 sound/soc/soc-topology.c 		sm->reg = tplc_chan_get_reg(tplg, mc->channel,
mc               1349 sound/soc/soc-topology.c 		sm->rreg = tplc_chan_get_reg(tplg, mc->channel,
mc               1351 sound/soc/soc-topology.c 		sm->shift = tplc_chan_get_shift(tplg, mc->channel,
mc               1353 sound/soc/soc-topology.c 		sm->rshift = tplc_chan_get_shift(tplg, mc->channel,
mc               1356 sound/soc/soc-topology.c 		sm->max = le32_to_cpu(mc->max);
mc               1357 sound/soc/soc-topology.c 		sm->min = le32_to_cpu(mc->min);
mc               1358 sound/soc/soc-topology.c 		sm->invert = le32_to_cpu(mc->invert);
mc               1359 sound/soc/soc-topology.c 		sm->platform_max = le32_to_cpu(mc->platform_max);
mc               1364 sound/soc/soc-topology.c 		err = soc_tplg_kcontrol_bind_io(&mc->hdr, &kc[i], tplg);
mc               1366 sound/soc/soc-topology.c 			soc_control_err(tplg, &mc->hdr, mc->hdr.name);
mc               1371 sound/soc/soc-topology.c 		err = soc_tplg_create_tlv(tplg, &kc[i], &mc->hdr);
mc               1374 sound/soc/soc-topology.c 				mc->hdr.name);
mc               1381 sound/soc/soc-topology.c 			(struct snd_soc_tplg_ctl_hdr *)mc);
mc               1384 sound/soc/soc-topology.c 				mc->hdr.name);
mc                445 sound/soc/sof/topology.c 	struct snd_soc_tplg_mixer_control *mc =
mc                453 sound/soc/sof/topology.c 	if (le32_to_cpu(mc->num_channels) > SND_SOC_TPLG_MAX_CHAN)
mc                458 sound/soc/sof/topology.c 				     le32_to_cpu(mc->num_channels));
mc                464 sound/soc/sof/topology.c 	scontrol->min_volume_step = le32_to_cpu(mc->min);
mc                465 sound/soc/sof/topology.c 	scontrol->max_volume_step = le32_to_cpu(mc->max);
mc                466 sound/soc/sof/topology.c 	scontrol->num_channels = le32_to_cpu(mc->num_channels);
mc                469 sound/soc/sof/topology.c 	if (le32_to_cpu(mc->max) == 1) {
mc                483 sound/soc/sof/topology.c 	ret = set_up_volume_table(scontrol, tlv, le32_to_cpu(mc->max) + 1);
mc                387 sound/soc/ti/omap-mcbsp-st.c 	struct soc_mixer_control *mc =
mc                389 sound/soc/ti/omap-mcbsp-st.c 	int max = mc->max;
mc                390 sound/soc/ti/omap-mcbsp-st.c 	int min = mc->min;
mc                406 sound/soc/ti/omap-mcbsp-st.c 	struct soc_mixer_control *mc =					\
mc                408 sound/soc/ti/omap-mcbsp-st.c 	int max = mc->max;						\
mc                409 sound/soc/ti/omap-mcbsp-st.c 	int min = mc->min;						\
mc                 44 tools/testing/selftests/powerpc/math/fpu_signal.c 	mcontext_t *mc = &uc->uc_mcontext;
mc                 48 tools/testing/selftests/powerpc/math/fpu_signal.c 		if (mc->fp_regs[i] != darray[i - 14]) {
mc                 47 tools/testing/selftests/powerpc/math/vmx_signal.c 	mcontext_t *mc = &uc->uc_mcontext;
mc                 51 tools/testing/selftests/powerpc/math/vmx_signal.c 		if (memcmp(mc->v_regs->vrregs[i], &varray[i - 20], 16)) {
mc                 62 tools/testing/selftests/powerpc/math/vmx_signal.c 				printf("%d  | 0x%04x%04x%04x%04x      | 0x%04x%04x%04x%04x\n", j, mc->v_regs->vrregs[j][0],
mc                 63 tools/testing/selftests/powerpc/math/vmx_signal.c 					   mc->v_regs->vrregs[j][1], mc->v_regs->vrregs[j][2], mc->v_regs->vrregs[j][3],
mc                145 virt/kvm/arm/mmu.c static void mmu_free_memory_cache(struct kvm_mmu_memory_cache *mc)
mc                147 virt/kvm/arm/mmu.c 	while (mc->nobjs)
mc                148 virt/kvm/arm/mmu.c 		free_page((unsigned long)mc->objects[--mc->nobjs]);
mc                151 virt/kvm/arm/mmu.c static void *mmu_memory_cache_alloc(struct kvm_mmu_memory_cache *mc)
mc                155 virt/kvm/arm/mmu.c 	BUG_ON(!mc || !mc->nobjs);
mc                156 virt/kvm/arm/mmu.c 	p = mc->objects[--mc->nobjs];