ro                115 arch/s390/mm/pageattr.c 	int i, ro, nx;
ro                121 arch/s390/mm/pageattr.c 	ro = !!(pmd_val(*pmdp) & _SEGMENT_ENTRY_PROTECT);
ro                123 arch/s390/mm/pageattr.c 	prot = pgprot_val(ro ? PAGE_KERNEL_RO : PAGE_KERNEL);
ro                192 arch/s390/mm/pageattr.c 	int i, ro, nx;
ro                198 arch/s390/mm/pageattr.c 	ro = !!(pud_val(*pudp) & _REGION_ENTRY_PROTECT);
ro                200 arch/s390/mm/pageattr.c 	prot = pgprot_val(ro ? SEGMENT_KERNEL_RO : SEGMENT_KERNEL);
ro               1187 block/genhd.c  static DEVICE_ATTR(ro, 0444, disk_ro_show, NULL);
ro               1560 block/genhd.c  static void set_disk_ro_uevent(struct gendisk *gd, int ro)
ro               1565 block/genhd.c  	if (!ro)
ro                186 block/partition-generic.c static DEVICE_ATTR(ro, 0444, part_ro_show, NULL);
ro                693 drivers/block/rbd.c 	int ro;
ro                695 drivers/block/rbd.c 	if (get_user(ro, (int __user *)arg))
ro                699 drivers/block/rbd.c 	if (rbd_dev->spec->snap_id != CEPH_NOSNAP && !ro)
ro                806 drivers/block/xen-blkback/blkback.c 			 int num, bool ro)
ro                850 drivers/block/xen-blkback/blkback.c 			if (!use_persistent_gnts && ro)
ro               1668 drivers/gpu/drm/amd/powerplay/smumgr/fiji_smumgr.c 	uint32_t ro, efuse, efuse2, clock_freq, volt_without_cks,
ro               1693 drivers/gpu/drm/amd/powerplay/smumgr/fiji_smumgr.c 		ro = (2300 - 1350) * efuse / 255 + 1350;
ro               1695 drivers/gpu/drm/amd/powerplay/smumgr/fiji_smumgr.c 		ro = (2500 - 1000) * efuse / 255 + 1000;
ro               1697 drivers/gpu/drm/amd/powerplay/smumgr/fiji_smumgr.c 	if (ro >= 1660)
ro               1710 drivers/gpu/drm/amd/powerplay/smumgr/fiji_smumgr.c 			(sclk_table->entries[i].clk/100) / 10000 + 3571 + 75 - ro) * 1000 /
ro               1713 drivers/gpu/drm/amd/powerplay/smumgr/fiji_smumgr.c 			(sclk_table->entries[i].clk/100) / 10000 + 3320 + 45 - ro) * 1000 /
ro               1513 drivers/gpu/drm/amd/powerplay/smumgr/polaris10_smumgr.c 	uint32_t ro, efuse, volt_without_cks, volt_with_cks, value, max, min;
ro               1553 drivers/gpu/drm/amd/powerplay/smumgr/polaris10_smumgr.c 	ro = efuse * (max - min) / 255 + min;
ro               1560 drivers/gpu/drm/amd/powerplay/smumgr/polaris10_smumgr.c 			volt_without_cks = (uint32_t)((2753594000U + (sclk_table->entries[i].clk/100) * 136418 - (ro - 70) * 1000000) / \
ro               1562 drivers/gpu/drm/amd/powerplay/smumgr/polaris10_smumgr.c 			volt_with_cks = (uint32_t)((2797202000U + sclk_table->entries[i].clk/100 * 3232 - (ro - 65) * 1000000) / \
ro               1565 drivers/gpu/drm/amd/powerplay/smumgr/polaris10_smumgr.c 			volt_without_cks = (uint32_t)((2416794800U + (sclk_table->entries[i].clk/100) * 1476925/10 - (ro - 50) * 1000000) / \
ro               1567 drivers/gpu/drm/amd/powerplay/smumgr/polaris10_smumgr.c 			volt_with_cks = (uint32_t)((2999656000U - sclk_table->entries[i].clk/100 * 392803 - (ro - 44) * 1000000) / \
ro               1575 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 	uint32_t ro, efuse, efuse2, clock_freq, volt_without_cks,
ro               1606 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 		ro = (2300 - 1350) * efuse / 255 + 1350;
ro               1608 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 		ro = (2500 - 1000) * efuse / 255 + 1000;
ro               1610 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 	if (ro >= 1660)
ro               1624 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 			volt_without_cks = (uint32_t)((7732 + 60 - ro - 20838 *
ro               1627 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 			volt_with_cks = (uint32_t)((5250 + 51 - ro - 2404 *
ro               1632 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 				(sclk_table->entries[i].clk/100) / 10000 + 3571 + 75 - ro) * 1000 /
ro               1635 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 				(sclk_table->entries[i].clk/100) / 10000 + 3320 + 45 - ro) * 1000 /
ro               1492 drivers/gpu/drm/amd/powerplay/smumgr/vegam_smumgr.c 	uint32_t ro, efuse, volt_without_cks, volt_with_cks, value, max, min;
ro               1511 drivers/gpu/drm/amd/powerplay/smumgr/vegam_smumgr.c 	ro = efuse * (max - min) / 255 + min;
ro               1518 drivers/gpu/drm/amd/powerplay/smumgr/vegam_smumgr.c 				136418 - (ro - 70) * 1000000) /
ro               1521 drivers/gpu/drm/amd/powerplay/smumgr/vegam_smumgr.c 				3232 - (ro - 65) * 1000000) /
ro                510 drivers/gpu/drm/drm_crtc_helper.c 	int count = 0, ro, fail = 0;
ro                612 drivers/gpu/drm/drm_crtc_helper.c 	for (ro = 0; ro < set->num_connectors; ro++) {
ro                613 drivers/gpu/drm/drm_crtc_helper.c 		if (set->connectors[ro]->encoder)
ro                615 drivers/gpu/drm/drm_crtc_helper.c 		drm_connector_get(set->connectors[ro]);
ro                625 drivers/gpu/drm/drm_crtc_helper.c 		for (ro = 0; ro < set->num_connectors; ro++) {
ro                626 drivers/gpu/drm/drm_crtc_helper.c 			if (set->connectors[ro] == connector) {
ro                672 drivers/gpu/drm/drm_crtc_helper.c 		for (ro = 0; ro < set->num_connectors; ro++) {
ro                673 drivers/gpu/drm/drm_crtc_helper.c 			if (set->connectors[ro] == connector)
ro                761 drivers/gpu/drm/drm_crtc_helper.c 	for (ro = 0; ro < set->num_connectors; ro++) {
ro                762 drivers/gpu/drm/drm_crtc_helper.c 		if (set->connectors[ro]->encoder)
ro                764 drivers/gpu/drm/drm_crtc_helper.c 		drm_connector_put(set->connectors[ro]);
ro                679 drivers/gpu/drm/etnaviv/etnaviv_gem.c 					  !userptr->ro ? FOLL_WRITE : 0, pages);
ro                738 drivers/gpu/drm/etnaviv/etnaviv_gem.c 	etnaviv_obj->userptr.ro = !(flags & ETNA_USERPTR_WRITE);
ro                 20 drivers/gpu/drm/etnaviv/etnaviv_gem.h 	bool ro;
ro                 21 drivers/gpu/drm/nouveau/include/nvif/if500b.h 	__u8  ro;
ro                 16 drivers/gpu/drm/nouveau/include/nvif/if500d.h 	__u8  ro;
ro                 20 drivers/gpu/drm/nouveau/include/nvif/if900b.h 	__u8  ro;
ro                 17 drivers/gpu/drm/nouveau/include/nvif/if900d.h 	__u8  ro;
ro                 23 drivers/gpu/drm/nouveau/include/nvif/ifb00d.h 	__u8  ro;
ro                 23 drivers/gpu/drm/nouveau/include/nvif/ifc00d.h 	__u8  ro;
ro               1484 drivers/gpu/drm/nouveau/nouveau_bo.c 				args.nv50.ro = 0;
ro               1491 drivers/gpu/drm/nouveau/nouveau_bo.c 				args.gf100.ro = 0;
ro                 52 drivers/gpu/drm/nouveau/nouveau_mem.c 		args.nv50.ro = 0;
ro                 66 drivers/gpu/drm/nouveau/nouveau_mem.c 		args.gf100.ro = 0;
ro                 47 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/memgf100.c 		uvmm.ro   = args->v0.ro;
ro                 48 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/memnv50.c 		uvmm.ro   = args->v0.ro;
ro                250 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 	u8  kind, priv, ro, vol;
ro                259 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 		ro   = !!args->v0.ro;
ro                265 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 		ro   = 0;
ro                315 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 	map->type |= (u64)  ro << 2;
ro                323 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	u8  kind, priv, ro, vol;
ro                332 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 		ro   = !!args->v0.ro;
ro                338 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 		ro   = 0;
ro                385 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	map->type |= (u64)  ro << 6;
ro                238 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 	u8  aper, kind, comp, priv, ro;
ro                246 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 		ro   = !!args->v0.ro;
ro                252 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 		ro   = 0;
ro                317 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 	map->type |= (u64)ro << 3;
ro                370 drivers/gpu/ipu-v3/ipu-cpmem.c 	int bpp = 0, npb = 0, ro, go, bo, to;
ro                372 drivers/gpu/ipu-v3/ipu-cpmem.c 	ro = rgb->bits_per_pixel - rgb->red.length - rgb->red.offset;
ro                378 drivers/gpu/ipu-v3/ipu-cpmem.c 	ipu_ch_param_write_field(ch, IPU_FIELD_OFS0, ro);
ro                909 drivers/hwmon/ibmaem.c 				const struct aem_ro_sensor_template *ro,
ro                917 drivers/hwmon/ibmaem.c 	while (ro->label) {
ro                919 drivers/hwmon/ibmaem.c 		sensors->dev_attr.attr.name = ro->label;
ro                921 drivers/hwmon/ibmaem.c 		sensors->dev_attr.show = ro->show;
ro                922 drivers/hwmon/ibmaem.c 		sensors->index = ro->index;
ro                930 drivers/hwmon/ibmaem.c 		ro++;
ro               3220 drivers/md/dm-raid.c 	rs->md.ro = 1;
ro               3359 drivers/md/dm-raid.c 	     (!mddev->ro && test_bit(MD_RECOVERY_NEEDED, &recovery)))) {
ro               3705 drivers/md/dm-raid.c 	if (mddev->ro == 2) {
ro               3709 drivers/md/dm-raid.c 		mddev->ro = 0;
ro               3873 drivers/md/dm-raid.c 	int ro = mddev->ro;
ro               3876 drivers/md/dm-raid.c 	mddev->ro = 0;
ro               3878 drivers/md/dm-raid.c 	mddev->ro = ro;
ro               4011 drivers/md/dm-raid.c 		mddev->ro = 0;
ro                390 drivers/md/md.c 	if (mddev->ro == 1 && unlikely(rw == WRITE)) {
ro               2555 drivers/md/md.c 	if (mddev->ro) {
ro               3815 drivers/md/md.c 	if (mddev->ro)
ro               4019 drivers/md/md.c 		else if (mddev->ro)
ro               4120 drivers/md/md.c 		else if (mddev->ro)
ro               4243 drivers/md/md.c 		switch(mddev->ro) {
ro               4276 drivers/md/md.c static int do_md_stop(struct mddev *mddev, int ro, struct block_device *bdev);
ro               4287 drivers/md/md.c 	if (mddev->pers && (st == active || st == clean) && mddev->ro != 1) {
ro               4331 drivers/md/md.c 			mddev->ro = 1;
ro               4338 drivers/md/md.c 			if (mddev->ro == 0)
ro               4340 drivers/md/md.c 			else if (mddev->ro == 1)
ro               4343 drivers/md/md.c 				mddev->ro = 2;
ro               4347 drivers/md/md.c 			mddev->ro = 2;
ro               4372 drivers/md/md.c 			mddev->ro = 0;
ro               4658 drivers/md/md.c 	    (!mddev->ro && test_bit(MD_RECOVERY_NEEDED, &recovery))) {
ro               4730 drivers/md/md.c 	if (mddev->ro == 2) {
ro               4734 drivers/md/md.c 		mddev->ro = 0;
ro               4963 drivers/md/md.c 		    mddev->ro == 0 &&
ro               5634 drivers/md/md.c 		if (mddev->ro != 1 &&
ro               5637 drivers/md/md.c 			mddev->ro = 1;
ro               5740 drivers/md/md.c 	if (start_readonly && mddev->ro == 0)
ro               5741 drivers/md/md.c 		mddev->ro = 2; /* read-only, but switch on first write */
ro               5815 drivers/md/md.c 	} else if (mddev->ro == 2) /* auto-readonly not meaningful */
ro               5816 drivers/md/md.c 		mddev->ro = 0;
ro               5834 drivers/md/md.c 	if (mddev->degraded && !mddev->ro)
ro               5924 drivers/md/md.c 	if (!mddev->ro)
ro               5943 drivers/md/md.c 	mddev->ro = 0;
ro               5970 drivers/md/md.c 	mddev->ro = 0;
ro               6020 drivers/md/md.c 	if (mddev->ro == 0 &&
ro               6125 drivers/md/md.c 		if (mddev->ro==1)
ro               6127 drivers/md/md.c 		mddev->ro = 1;
ro               6184 drivers/md/md.c 		if (mddev->ro)
ro               6203 drivers/md/md.c 		if (mddev->ro)
ro               6204 drivers/md/md.c 			mddev->ro = 0;
ro               7017 drivers/md/md.c 	if (mddev->ro)
ro               7047 drivers/md/md.c 	if (mddev->ro)
ro               7282 drivers/md/md.c 	int ro;
ro               7472 drivers/md/md.c 		if (get_user(ro, (int __user *)(arg))) {
ro               7481 drivers/md/md.c 		if (ro)
ro               7485 drivers/md/md.c 		if (mddev->ro != 1)
ro               7494 drivers/md/md.c 				mddev->ro = 2;
ro               7505 drivers/md/md.c 	if (mddev->ro && mddev->pers) {
ro               7506 drivers/md/md.c 		if (mddev->ro == 2) {
ro               7507 drivers/md/md.c 			mddev->ro = 0;
ro               8029 drivers/md/md.c 			if (mddev->ro==1)
ro               8031 drivers/md/md.c 			if (mddev->ro==2)
ro               8288 drivers/md/md.c 	BUG_ON(mddev->ro == 1);
ro               8289 drivers/md/md.c 	if (mddev->ro == 2) {
ro               8291 drivers/md/md.c 		mddev->ro = 0;
ro               8342 drivers/md/md.c 	WARN_ON_ONCE(mddev->in_sync || mddev->ro);
ro               8374 drivers/md/md.c 	if (mddev->ro)
ro               8423 drivers/md/md.c 	if (mddev->ro) {/* never try to sync a read-only array */
ro               8892 drivers/md/md.c 			if (mddev->ro &&
ro               8993 drivers/md/md.c 	if (mddev->ro && !test_bit(MD_RECOVERY_NEEDED, &mddev->recovery))
ro               9012 drivers/md/md.c 		if (mddev->ro) {
ro                285 drivers/md/md.h 	int				ro;
ro               2500 drivers/md/raid1.c 	if (mddev->ro == 0
ro               2506 drivers/md/raid1.c 	} else if (mddev->ro == 0 && test_bit(FailFast, &rdev->flags)) {
ro               2595 drivers/md/raid10.c 	if (mddev->ro)
ro               4932 drivers/md/raid5.c 	if (s.failed <= conf->max_degraded && !conf->mddev->ro)
ro               7259 drivers/md/raid5.c 			else if (mddev->ro == 0) {
ro               7303 drivers/md/raid5.c 			mddev->ro = 1;
ro                436 drivers/media/dvb-frontends/stv0900_sw.c static u32 stv0900_carrier_width(u32 srate, enum fe_stv0900_rolloff ro)
ro                440 drivers/media/dvb-frontends/stv0900_sw.c 	switch (ro) {
ro                937 drivers/media/dvb-frontends/stv090x.c 	u32 ro;
ro                941 drivers/media/dvb-frontends/stv090x.c 		ro = 20;
ro                944 drivers/media/dvb-frontends/stv090x.c 		ro = 25;
ro                948 drivers/media/dvb-frontends/stv090x.c 		ro = 35;
ro                952 drivers/media/dvb-frontends/stv090x.c 	return srate + (srate * ro) / 100;
ro                301 drivers/mmc/core/mmc.c 			 unsigned int part_cfg, char *name, int idx, bool ro,
ro                307 drivers/mmc/core/mmc.c 	card->part[card->nr_parts].force_ro = ro;
ro                829 drivers/mmc/core/sd.c 	int ro;
ro                842 drivers/mmc/core/sd.c 	ro = host->ops->get_ro(host);
ro                844 drivers/mmc/core/sd.c 	return ro;
ro                898 drivers/mmc/core/sd.c 		int ro = mmc_sd_get_ro(host);
ro                900 drivers/mmc/core/sd.c 		if (ro < 0) {
ro                903 drivers/mmc/core/sd.c 		} else if (ro > 0) {
ro               1089 drivers/mmc/host/rtsx_pci_sdmmc.c 	int ro = 0;
ro               1103 drivers/mmc/host/rtsx_pci_sdmmc.c 		ro = 1;
ro               1107 drivers/mmc/host/rtsx_pci_sdmmc.c 	return ro;
ro                505 drivers/mtd/nand/raw/atmel/pmecc.c 	int ro;
ro                573 drivers/mtd/nand/raw/atmel/pmecc.c 			ro = 0;
ro                579 drivers/mtd/nand/raw/atmel/pmecc.c 					ro = j;
ro                584 drivers/mtd/nand/raw/atmel/pmecc.c 			diff = (mu[i] - mu[ro]);
ro                587 drivers/mtd/nand/raw/atmel/pmecc.c 			if ((lmu[i] >> 1) > ((lmu[ro] >> 1) + diff))
ro                590 drivers/mtd/nand/raw/atmel/pmecc.c 				lmu[i + 1] = ((lmu[ro] >> 1) + diff) * 2;
ro                597 drivers/mtd/nand/raw/atmel/pmecc.c 			for (k = 0; k <= lmu[ro] >> 1; k++) {
ro                600 drivers/mtd/nand/raw/atmel/pmecc.c 				if (!(smu[ro * num + k] && dmu[i]))
ro                604 drivers/mtd/nand/raw/atmel/pmecc.c 				b = index_of[dmu[ro]];
ro                605 drivers/mtd/nand/raw/atmel/pmecc.c 				c = index_of[smu[ro * num + k]];
ro                437 drivers/net/ethernet/chelsio/cxgb/pm3393.c 	if (ro & \
ro                445 drivers/net/ethernet/chelsio/cxgb/pm3393.c 	u64	ro;
ro                457 drivers/net/ethernet/chelsio/cxgb/pm3393.c 	ro = ((u64)val0 & 0xffff) | (((u64)val1 & 0xffff) << 16) |
ro               1619 drivers/nvdimm/btt.c 	if (btt->init_state != INIT_READY && nd_region->ro) {
ro                636 drivers/nvdimm/bus.c 	if (disk_ro || nd_region->ro == disk_ro)
ro               1171 drivers/nvdimm/bus.c 	int rc, ro;
ro               1173 drivers/nvdimm/bus.c 	ro = ((file->f_flags & O_ACCMODE) == O_RDONLY);
ro               1200 drivers/nvdimm/bus.c 	rc = __nd_ioctl(nvdimm_bus, nvdimm, ro, cmd, arg);
ro                152 drivers/nvdimm/nd.h 	int id, num_lanes, ro, numa_node, target_node;
ro                739 drivers/nvdimm/pfn_devs.c 	if (nd_region->ro) {
ro                544 drivers/nvdimm/region_devs.c 	return sprintf(buf, "%d\n", nd_region->ro);
ro                550 drivers/nvdimm/region_devs.c 	bool ro;
ro                551 drivers/nvdimm/region_devs.c 	int rc = strtobool(buf, &ro);
ro                557 drivers/nvdimm/region_devs.c 	nd_region->ro = ro;
ro                941 drivers/nvdimm/region_devs.c 	int ro = 0;
ro                955 drivers/nvdimm/region_devs.c 			ro = 1;
ro               1022 drivers/nvdimm/region_devs.c 	nd_region->ro = ro;
ro                 29 drivers/pci/pci-bridge-emul.c 	u32 ro;
ro                 42 drivers/pci/pci-bridge-emul.c 	[PCI_VENDOR_ID / 4] = { .ro = ~0 },
ro                 47 drivers/pci/pci-bridge-emul.c 		.ro = ((PCI_COMMAND_SPECIAL | PCI_COMMAND_INVALIDATE |
ro                 60 drivers/pci/pci-bridge-emul.c 	[PCI_CLASS_REVISION / 4] = { .ro = ~0 },
ro                 78 drivers/pci/pci-bridge-emul.c 	[PCI_CACHE_LINE_SIZE / 4] = { .ro = ~0 },
ro                 84 drivers/pci/pci-bridge-emul.c 	[PCI_BASE_ADDRESS_0 / 4] = { .ro = ~0 },
ro                 85 drivers/pci/pci-bridge-emul.c 	[PCI_BASE_ADDRESS_1 / 4] = { .ro = ~0 },
ro                 91 drivers/pci/pci-bridge-emul.c 		.ro = GENMASK(31, 24),
ro                 99 drivers/pci/pci-bridge-emul.c 		.ro = (((PCI_STATUS_66MHZ | PCI_STATUS_FAST_BACK |
ro                118 drivers/pci/pci-bridge-emul.c 		.ro = GENMASK(19, 16) | GENMASK(3, 0),
ro                126 drivers/pci/pci-bridge-emul.c 		.ro = GENMASK(19, 16) | GENMASK(3, 0),
ro                142 drivers/pci/pci-bridge-emul.c 		.ro = GENMASK(7, 0),
ro                168 drivers/pci/pci-bridge-emul.c 		.ro = (GENMASK(15, 8) | ((PCI_BRIDGE_CTL_FAST_BACK) << 16)),
ro                182 drivers/pci/pci-bridge-emul.c 		.ro = ~0,
ro                186 drivers/pci/pci-bridge-emul.c 		.ro = ~0,
ro                198 drivers/pci/pci-bridge-emul.c 		.ro = GENMASK(20, 19),
ro                204 drivers/pci/pci-bridge-emul.c 		.ro = lower_32_bits(~BIT(23)),
ro                216 drivers/pci/pci-bridge-emul.c 		.ro = GENMASK(13, 0) << 16,
ro                222 drivers/pci/pci-bridge-emul.c 		.ro = ~0,
ro                237 drivers/pci/pci-bridge-emul.c 		.ro = (PCI_EXP_SLTSTA_MRLSS | PCI_EXP_SLTSTA_PDS |
ro                252 drivers/pci/pci-bridge-emul.c 		.ro = PCI_EXP_RTCAP_CRSVIS << 16,
ro                257 drivers/pci/pci-bridge-emul.c 		.ro = GENMASK(15, 0) | PCI_EXP_RTSTA_PENDING,
ro                300 drivers/pci/pci-bridge-emul.c 		bridge->pci_regs_behavior[PCI_PREF_MEMORY_BASE / 4].ro = ~0;
ro                175 drivers/regulator/tps6586x-regulator.c 	TPS6586X_REGULATOR(_id, ro, _pname, vdata, vreg, shift, nbits,	\
ro                666 drivers/scsi/aacraid/aachba.c 			fsa_dev_ptr->ro = ((le32_to_cpu(dresp->mnt[0].state) & FSCS_READONLY) != 0);
ro               1282 drivers/scsi/aacraid/aacraid.h 	u8		ro;
ro                 79 drivers/scsi/bfa/bfa_fc.h 	u32        ro;		/* relative offset */
ro                759 drivers/usb/gadget/function/f_mass_storage.c 	if (curlun->ro) {
ro               1243 drivers/usb/gadget/function/f_mass_storage.c 		buf[2] = (curlun->ro ? 0x80 : 0x00);		/* WP, DPOFUA */
ro               1247 drivers/usb/gadget/function/f_mass_storage.c 		buf[3] = (curlun->ro ? 0x80 : 0x00);		/* WP, DPOFUA */
ro               2557 drivers/usb/gadget/function/f_mass_storage.c static DEVICE_ATTR(ro, 0, ro_show, ro_store);
ro               2764 drivers/usb/gadget/function/f_mass_storage.c 	lun->ro = cfg->cdrom || cfg->ro;
ro               2765 drivers/usb/gadget/function/f_mass_storage.c 	lun->initially_ro = lun->ro;
ro               2807 drivers/usb/gadget/function/f_mass_storage.c 	      lun->ro ? "read only " : "",
ro               3065 drivers/usb/gadget/function/f_mass_storage.c CONFIGFS_ATTR(fsg_lun_opts_, ro);
ro               3456 drivers/usb/gadget/function/f_mass_storage.c 		lun->ro = !!params->ro[i];
ro                 10 drivers/usb/gadget/function/f_mass_storage.h 	bool		ro[FSG_MAX_LUNS];
ro                 35 drivers/usb/gadget/function/f_mass_storage.h 	_FSG_MODULE_PARAM_ARRAY(prefix, params, ro, bool,		\
ro                 89 drivers/usb/gadget/function/f_mass_storage.h 	char ro;
ro                180 drivers/usb/gadget/function/storage_common.c 	int				ro;
ro                191 drivers/usb/gadget/function/storage_common.c 	ro = curlun->initially_ro;
ro                192 drivers/usb/gadget/function/storage_common.c 	if (!ro) {
ro                195 drivers/usb/gadget/function/storage_common.c 			ro = 1;
ro                197 drivers/usb/gadget/function/storage_common.c 	if (ro)
ro                205 drivers/usb/gadget/function/storage_common.c 		ro = 1;
ro                222 drivers/usb/gadget/function/storage_common.c 		ro = 1;
ro                264 drivers/usb/gadget/function/storage_common.c 	curlun->ro = ro;
ro                288 drivers/usb/gadget/function/storage_common.c 	if (curlun->ro || !filp)
ro                319 drivers/usb/gadget/function/storage_common.c 				  ? curlun->ro
ro                377 drivers/usb/gadget/function/storage_common.c static ssize_t _fsg_store_ro(struct fsg_lun *curlun, bool ro)
ro                384 drivers/usb/gadget/function/storage_common.c 	curlun->ro = ro;
ro                385 drivers/usb/gadget/function/storage_common.c 	curlun->initially_ro = ro;
ro                386 drivers/usb/gadget/function/storage_common.c 	LDBG(curlun, "read-only status set to %d\n", curlun->ro);
ro                395 drivers/usb/gadget/function/storage_common.c 	bool		ro;
ro                397 drivers/usb/gadget/function/storage_common.c 	rc = strtobool(buf, &ro);
ro                406 drivers/usb/gadget/function/storage_common.c 	rc = _fsg_store_ro(curlun, ro);
ro                104 drivers/usb/gadget/function/storage_common.h 	unsigned int	ro:1;
ro                360 drivers/xen/grant-table.c 	bool ro;
ro                384 drivers/xen/grant-table.c 		if (_gnttab_end_foreign_access_ref(entry->ref, entry->ro)) {
ro                423 drivers/xen/grant-table.c 		entry->ro = readonly;
ro                288 fs/btrfs/block-group.c 	if (bg->ro)
ro                338 fs/btrfs/block-group.c 	ASSERT(bg->ro);
ro                880 fs/btrfs/block-group.c 	BUG_ON(!block_group->ro);
ro               1209 fs/btrfs/block-group.c 	if (cache->ro) {
ro               1210 fs/btrfs/block-group.c 		cache->ro++;
ro               1228 fs/btrfs/block-group.c 		cache->ro++;
ro               1286 fs/btrfs/block-group.c 		    block_group->ro ||
ro               2105 fs/btrfs/block-group.c 	BUG_ON(!cache->ro);
ro               2109 fs/btrfs/block-group.c 	if (!--cache->ro) {
ro               2186 fs/btrfs/block-group.c 		if (block_group->ro)
ro               2762 fs/btrfs/block-group.c 	if (cache->ro) {
ro               2797 fs/btrfs/block-group.c 	if (cache->ro)
ro                 80 fs/btrfs/block-group.h 	unsigned int ro;
ro               2523 fs/btrfs/extent-tree.c 	if (!block_group || block_group->ro)
ro               2846 fs/btrfs/extent-tree.c 		if (cache->ro) {
ro               3492 fs/btrfs/extent-tree.c 	if (cluster_bg != bg && (cluster_bg->ro ||
ro               3877 fs/btrfs/extent-tree.c 			    block_group->ro) {
ro               3905 fs/btrfs/extent-tree.c 		if (unlikely(block_group->ro))
ro               5514 fs/btrfs/extent-tree.c 		if (!block_group->ro) {
ro               2499 fs/btrfs/free-space-cache.c 		if (info->bytes >= bytes && !block_group->ro)
ro               3164 fs/btrfs/free-space-cache.c 	if (!block_group->ro) {
ro               3184 fs/btrfs/free-space-cache.c 		if (block_group->ro)
ro               3680 fs/btrfs/scrub.c 		if (!cache->removed && !cache->ro && cache->reserved == 0 &&
ro                306 fs/btrfs/space-info.c 			cache->reserved, cache->ro ? "[readonly]" : "");
ro                 42 fs/dlm/member.c 	struct rcom_slot *ro;
ro                 45 fs/dlm/member.c 	ro = (struct rcom_slot *)(rc->rc_buf + sizeof(struct rcom_config));
ro                 53 fs/dlm/member.c 		ro->ro_nodeid = cpu_to_le32(slot->nodeid);
ro                 54 fs/dlm/member.c 		ro->ro_slot = cpu_to_le16(slot->slot);
ro                 55 fs/dlm/member.c 		ro++;
ro                101 fs/dlm/member.c 	struct rcom_slot *ro0, *ro;
ro                122 fs/dlm/member.c 	for (i = 0, ro = ro0; i < num_slots; i++, ro++) {
ro                123 fs/dlm/member.c 		ro->ro_nodeid = le32_to_cpu(ro->ro_nodeid);
ro                124 fs/dlm/member.c 		ro->ro_slot = le16_to_cpu(ro->ro_slot);
ro                130 fs/dlm/member.c 		for (i = 0, ro = ro0; i < num_slots; i++, ro++) {
ro                131 fs/dlm/member.c 			if (ro->ro_nodeid != memb->nodeid)
ro                133 fs/dlm/member.c 			memb->slot = ro->ro_slot;
ro               1026 fs/gfs2/ops_fstype.c 	char ro[20];
ro               1028 fs/gfs2/ops_fstype.c 	char *envp[] = { ro, spectator, NULL };
ro               1029 fs/gfs2/ops_fstype.c 	sprintf(ro, "RDONLY=%d", sb_rdonly(sb));
ro                301 fs/gfs2/recovery.c 	int ro = 0;
ro                366 fs/gfs2/recovery.c 			ro = 1;
ro                369 fs/gfs2/recovery.c 				ro = 1;
ro                373 fs/gfs2/recovery.c 				ro = bdev_read_only(sdp->sd_vfs->s_bdev);
ro                374 fs/gfs2/recovery.c 				if (!ro) {
ro                383 fs/gfs2/recovery.c 		if (ro) {
ro                648 fs/gfs2/sys.c  	char ro[20];
ro                650 fs/gfs2/sys.c  	char *envp[] = { ro, spectator, NULL };
ro                652 fs/gfs2/sys.c  	sprintf(ro, "RDONLY=%d", sb_rdonly(sb));
ro               1793 fs/jbd2/journal.c 				 unsigned long ro, unsigned long incompat)
ro               1797 fs/jbd2/journal.c 	if (!compat && !ro && !incompat)
ro               1809 fs/jbd2/journal.c 	    ((be32_to_cpu(sb->s_feature_ro_compat) & ro) == ro) &&
ro               1828 fs/jbd2/journal.c 				      unsigned long ro, unsigned long incompat)
ro               1830 fs/jbd2/journal.c 	if (!compat && !ro && !incompat)
ro               1841 fs/jbd2/journal.c 	    (ro       & JBD2_KNOWN_ROCOMPAT_FEATURES) == ro &&
ro               1861 fs/jbd2/journal.c 			  unsigned long ro, unsigned long incompat)
ro               1869 fs/jbd2/journal.c 	if (jbd2_journal_check_used_features(journal, compat, ro, incompat))
ro               1872 fs/jbd2/journal.c 	if (!jbd2_journal_check_available_features(journal, compat, ro, incompat))
ro               1887 fs/jbd2/journal.c 		  compat, ro, incompat);
ro               1921 fs/jbd2/journal.c 	sb->s_feature_ro_compat |= cpu_to_be32(ro);
ro               1942 fs/jbd2/journal.c 				unsigned long ro, unsigned long incompat)
ro               1947 fs/jbd2/journal.c 		  compat, ro, incompat);
ro               1952 fs/jbd2/journal.c 	sb->s_feature_ro_compat &= ~cpu_to_be32(ro);
ro                314 fs/orangefs/file.c 	struct orangefs_read_options *ro;
ro                324 fs/orangefs/file.c 		iocb->ki_filp->private_data = kmalloc(sizeof *ro, GFP_KERNEL);
ro                327 fs/orangefs/file.c 		ro = iocb->ki_filp->private_data;
ro                328 fs/orangefs/file.c 		ro->blksiz = iter->count;
ro                262 fs/orangefs/inode.c 	struct orangefs_read_options *ro = file->private_data;
ro                281 fs/orangefs/inode.c 	if (ro) {
ro                282 fs/orangefs/inode.c 		if (ro->blksiz < PAGE_SIZE) {
ro                288 fs/orangefs/inode.c 			roundedup = ((PAGE_SIZE - 1) & ro->blksiz) ?
ro                289 fs/orangefs/inode.c 				((ro->blksiz + PAGE_SIZE) & ~(PAGE_SIZE -1)) :
ro                290 fs/orangefs/inode.c 				ro->blksiz;
ro                788 ipc/mqueue.c   static int prepare_open(struct dentry *dentry, int oflag, int ro,
ro                799 ipc/mqueue.c   		if (ro)
ro                800 ipc/mqueue.c   			return ro;
ro                823 ipc/mqueue.c   	int ro;
ro                834 ipc/mqueue.c   	ro = mnt_want_write(mnt);	/* we'll drop it in any case */
ro                842 ipc/mqueue.c   	error = prepare_open(path.dentry, oflag, ro, mode, name, attr);
ro                857 ipc/mqueue.c   	if (!ro)
ro               6937 kernel/events/core.c 	struct remote_output *ro = data;
ro               6938 kernel/events/core.c 	struct ring_buffer *rb = ro->rb;
ro               6960 kernel/events/core.c 		ro->err = __perf_event_stop(&sd);
ro               6968 kernel/events/core.c 	struct remote_output ro = {
ro               6973 kernel/events/core.c 	perf_iterate_ctx(&cpuctx->ctx, __perf_event_output_stop, &ro, false);
ro               6976 kernel/events/core.c 				   &ro, false);
ro               6979 kernel/events/core.c 	return ro.err;
ro                578 kernel/futex.c 	int err, ro = 0;
ro                620 kernel/futex.c 		ro = 1;
ro                698 kernel/futex.c 		if (unlikely(should_fail_futex(fshared)) || ro) {
ro                119 net/can/raw.c  	struct raw_sock *ro = raw_sk(sk);
ro                125 net/can/raw.c  	if (!ro->recv_own_msgs && oskb->sk == sk)
ro                129 net/can/raw.c  	if (!ro->fd_frames && oskb->len != CAN_MTU)
ro                133 net/can/raw.c  	if (this_cpu_ptr(ro->uniq)->skb == oskb &&
ro                134 net/can/raw.c  	    this_cpu_ptr(ro->uniq)->skbcnt == can_skb_prv(oskb)->skbcnt) {
ro                135 net/can/raw.c  		if (ro->join_filters) {
ro                136 net/can/raw.c  			this_cpu_inc(ro->uniq->join_rx_count);
ro                138 net/can/raw.c  			if (this_cpu_ptr(ro->uniq)->join_rx_count < ro->count)
ro                144 net/can/raw.c  		this_cpu_ptr(ro->uniq)->skb = oskb;
ro                145 net/can/raw.c  		this_cpu_ptr(ro->uniq)->skbcnt = can_skb_prv(oskb)->skbcnt;
ro                146 net/can/raw.c  		this_cpu_ptr(ro->uniq)->join_rx_count = 1;
ro                148 net/can/raw.c  		if (ro->join_filters && ro->count > 1)
ro                243 net/can/raw.c  	struct raw_sock *ro = raw_sk(sk);
ro                245 net/can/raw.c  	raw_disable_filters(net, dev, sk, ro->filter, ro->count);
ro                246 net/can/raw.c  	raw_disable_errfilter(net, dev, sk, ro->err_mask);
ro                252 net/can/raw.c  	struct raw_sock *ro = raw_sk(sk);
ro                255 net/can/raw.c  	err = raw_enable_filters(net, dev, sk, ro->filter, ro->count);
ro                257 net/can/raw.c  		err = raw_enable_errfilter(net, dev, sk, ro->err_mask);
ro                259 net/can/raw.c  			raw_disable_filters(net, dev, sk, ro->filter,
ro                260 net/can/raw.c  					    ro->count);
ro                270 net/can/raw.c  	struct raw_sock *ro = container_of(nb, struct raw_sock, notifier);
ro                271 net/can/raw.c  	struct sock *sk = &ro->sk;
ro                279 net/can/raw.c  	if (ro->ifindex != dev->ifindex)
ro                286 net/can/raw.c  		if (ro->bound)
ro                289 net/can/raw.c  		if (ro->count > 1)
ro                290 net/can/raw.c  			kfree(ro->filter);
ro                292 net/can/raw.c  		ro->ifindex = 0;
ro                293 net/can/raw.c  		ro->bound   = 0;
ro                294 net/can/raw.c  		ro->count   = 0;
ro                314 net/can/raw.c  	struct raw_sock *ro = raw_sk(sk);
ro                316 net/can/raw.c  	ro->bound            = 0;
ro                317 net/can/raw.c  	ro->ifindex          = 0;
ro                320 net/can/raw.c  	ro->dfilter.can_id   = 0;
ro                321 net/can/raw.c  	ro->dfilter.can_mask = MASK_ALL;
ro                322 net/can/raw.c  	ro->filter           = &ro->dfilter;
ro                323 net/can/raw.c  	ro->count            = 1;
ro                326 net/can/raw.c  	ro->loopback         = 1;
ro                327 net/can/raw.c  	ro->recv_own_msgs    = 0;
ro                328 net/can/raw.c  	ro->fd_frames        = 0;
ro                329 net/can/raw.c  	ro->join_filters     = 0;
ro                332 net/can/raw.c  	ro->uniq = alloc_percpu(struct uniqframe);
ro                333 net/can/raw.c  	if (unlikely(!ro->uniq))
ro                337 net/can/raw.c  	ro->notifier.notifier_call = raw_notifier;
ro                339 net/can/raw.c  	register_netdevice_notifier(&ro->notifier);
ro                347 net/can/raw.c  	struct raw_sock *ro;
ro                352 net/can/raw.c  	ro = raw_sk(sk);
ro                354 net/can/raw.c  	unregister_netdevice_notifier(&ro->notifier);
ro                359 net/can/raw.c  	if (ro->bound) {
ro                360 net/can/raw.c  		if (ro->ifindex) {
ro                363 net/can/raw.c  			dev = dev_get_by_index(sock_net(sk), ro->ifindex);
ro                373 net/can/raw.c  	if (ro->count > 1)
ro                374 net/can/raw.c  		kfree(ro->filter);
ro                376 net/can/raw.c  	ro->ifindex = 0;
ro                377 net/can/raw.c  	ro->bound   = 0;
ro                378 net/can/raw.c  	ro->count   = 0;
ro                379 net/can/raw.c  	free_percpu(ro->uniq);
ro                394 net/can/raw.c  	struct raw_sock *ro = raw_sk(sk);
ro                406 net/can/raw.c  	if (ro->bound && addr->can_ifindex == ro->ifindex)
ro                438 net/can/raw.c  		if (ro->bound) {
ro                440 net/can/raw.c  			if (ro->ifindex) {
ro                444 net/can/raw.c  						       ro->ifindex);
ro                454 net/can/raw.c  		ro->ifindex = ifindex;
ro                455 net/can/raw.c  		ro->bound = 1;
ro                475 net/can/raw.c  	struct raw_sock *ro = raw_sk(sk);
ro                482 net/can/raw.c  	addr->can_ifindex = ro->ifindex;
ro                491 net/can/raw.c  	struct raw_sock *ro = raw_sk(sk);
ro                524 net/can/raw.c  		if (ro->bound && ro->ifindex)
ro                525 net/can/raw.c  			dev = dev_get_by_index(sock_net(sk), ro->ifindex);
ro                527 net/can/raw.c  		if (ro->bound) {
ro                542 net/can/raw.c  			raw_disable_filters(sock_net(sk), dev, sk, ro->filter,
ro                543 net/can/raw.c  					    ro->count);
ro                547 net/can/raw.c  		if (ro->count > 1)
ro                548 net/can/raw.c  			kfree(ro->filter);
ro                553 net/can/raw.c  			ro->dfilter = sfilter;
ro                554 net/can/raw.c  			filter = &ro->dfilter;
ro                556 net/can/raw.c  		ro->filter = filter;
ro                557 net/can/raw.c  		ro->count  = count;
ro                578 net/can/raw.c  		if (ro->bound && ro->ifindex)
ro                579 net/can/raw.c  			dev = dev_get_by_index(sock_net(sk), ro->ifindex);
ro                582 net/can/raw.c  		if (ro->bound) {
ro                592 net/can/raw.c  					      ro->err_mask);
ro                596 net/can/raw.c  		ro->err_mask = err_mask;
ro                607 net/can/raw.c  		if (optlen != sizeof(ro->loopback))
ro                610 net/can/raw.c  		if (copy_from_user(&ro->loopback, optval, optlen))
ro                616 net/can/raw.c  		if (optlen != sizeof(ro->recv_own_msgs))
ro                619 net/can/raw.c  		if (copy_from_user(&ro->recv_own_msgs, optval, optlen))
ro                625 net/can/raw.c  		if (optlen != sizeof(ro->fd_frames))
ro                628 net/can/raw.c  		if (copy_from_user(&ro->fd_frames, optval, optlen))
ro                634 net/can/raw.c  		if (optlen != sizeof(ro->join_filters))
ro                637 net/can/raw.c  		if (copy_from_user(&ro->join_filters, optval, optlen))
ro                652 net/can/raw.c  	struct raw_sock *ro = raw_sk(sk);
ro                667 net/can/raw.c  		if (ro->count > 0) {
ro                668 net/can/raw.c  			int fsize = ro->count * sizeof(struct can_filter);
ro                672 net/can/raw.c  			if (copy_to_user(optval, ro->filter, len))
ro                686 net/can/raw.c  		val = &ro->err_mask;
ro                692 net/can/raw.c  		val = &ro->loopback;
ro                698 net/can/raw.c  		val = &ro->recv_own_msgs;
ro                704 net/can/raw.c  		val = &ro->fd_frames;
ro                710 net/can/raw.c  		val = &ro->join_filters;
ro                727 net/can/raw.c  	struct raw_sock *ro = raw_sk(sk);
ro                744 net/can/raw.c  		ifindex = ro->ifindex;
ro                752 net/can/raw.c  	if (ro->fd_frames && dev->mtu == CANFD_MTU) {
ro                779 net/can/raw.c  	err = can_send(skb, ro->loopback);
ro                477 net/ieee802154/socket.c 	struct dgram_sock *ro = dgram_sk(sk);
ro                479 net/ieee802154/socket.c 	ro->want_ack = 1;
ro                480 net/ieee802154/socket.c 	ro->want_lqi = 0;
ro                493 net/ieee802154/socket.c 	struct dgram_sock *ro = dgram_sk(sk);
ro                499 net/ieee802154/socket.c 	ro->bound = 0;
ro                519 net/ieee802154/socket.c 	ro->src_addr = haddr;
ro                521 net/ieee802154/socket.c 	ro->bound = 1;
ro                569 net/ieee802154/socket.c 	struct dgram_sock *ro = dgram_sk(sk);
ro                580 net/ieee802154/socket.c 	if (!ro->bound) {
ro                585 net/ieee802154/socket.c 	ieee802154_addr_from_sa(&ro->dst_addr, &addr->addr);
ro                586 net/ieee802154/socket.c 	ro->connected = 1;
ro                595 net/ieee802154/socket.c 	struct dgram_sock *ro = dgram_sk(sk);
ro                598 net/ieee802154/socket.c 	ro->connected = 0;
ro                610 net/ieee802154/socket.c 	struct dgram_sock *ro = dgram_sk(sk);
ro                620 net/ieee802154/socket.c 	if (!ro->connected && !msg->msg_name)
ro                622 net/ieee802154/socket.c 	else if (ro->connected && msg->msg_name)
ro                625 net/ieee802154/socket.c 	if (!ro->bound)
ro                628 net/ieee802154/socket.c 		dev = ieee802154_get_dev(sock_net(sk), &ro->src_addr);
ro                658 net/ieee802154/socket.c 	cb->ackreq = ro->want_ack;
ro                666 net/ieee802154/socket.c 		dst_addr = ro->dst_addr;
ro                669 net/ieee802154/socket.c 	cb->secen = ro->secen;
ro                670 net/ieee802154/socket.c 	cb->secen_override = ro->secen_override;
ro                671 net/ieee802154/socket.c 	cb->seclevel = ro->seclevel;
ro                672 net/ieee802154/socket.c 	cb->seclevel_override = ro->seclevel_override;
ro                675 net/ieee802154/socket.c 				   ro->bound ? &ro->src_addr : NULL, size);
ro                708 net/ieee802154/socket.c 	struct dgram_sock *ro = dgram_sk(sk);
ro                740 net/ieee802154/socket.c 	if (ro->want_lqi) {
ro                773 net/ieee802154/socket.c 		      struct dgram_sock *ro)
ro                775 net/ieee802154/socket.c 	if (!ro->bound)
ro                778 net/ieee802154/socket.c 	if (ro->src_addr.mode == IEEE802154_ADDR_LONG &&
ro                779 net/ieee802154/socket.c 	    hw_addr == ro->src_addr.extended_addr)
ro                782 net/ieee802154/socket.c 	if (ro->src_addr.mode == IEEE802154_ADDR_SHORT &&
ro                783 net/ieee802154/socket.c 	    pan_id == ro->src_addr.pan_id &&
ro                784 net/ieee802154/socket.c 	    short_addr == ro->src_addr.short_addr)
ro                834 net/ieee802154/socket.c 	struct dgram_sock *ro = dgram_sk(sk);
ro                848 net/ieee802154/socket.c 		val = ro->want_ack;
ro                851 net/ieee802154/socket.c 		val = ro->want_lqi;
ro                854 net/ieee802154/socket.c 		if (!ro->secen_override)
ro                856 net/ieee802154/socket.c 		else if (ro->secen)
ro                862 net/ieee802154/socket.c 		if (!ro->seclevel_override)
ro                865 net/ieee802154/socket.c 			val = ro->seclevel;
ro                881 net/ieee802154/socket.c 	struct dgram_sock *ro = dgram_sk(sk);
ro                896 net/ieee802154/socket.c 		ro->want_ack = !!val;
ro                899 net/ieee802154/socket.c 		ro->want_lqi = !!val;
ro                910 net/ieee802154/socket.c 			ro->secen_override = 0;
ro                913 net/ieee802154/socket.c 			ro->secen_override = 1;
ro                914 net/ieee802154/socket.c 			ro->secen = 1;
ro                917 net/ieee802154/socket.c 			ro->secen_override = 1;
ro                918 net/ieee802154/socket.c 			ro->secen = 0;
ro                936 net/ieee802154/socket.c 			ro->seclevel_override = 0;
ro                938 net/ieee802154/socket.c 			ro->seclevel_override = 1;
ro                939 net/ieee802154/socket.c 			ro->seclevel = val;
ro                455 net/rds/rdma.c void rds_rdma_free_op(struct rm_rdma_op *ro)
ro                459 net/rds/rdma.c 	for (i = 0; i < ro->op_nents; i++) {
ro                460 net/rds/rdma.c 		struct page *page = sg_page(&ro->op_sg[i]);
ro                465 net/rds/rdma.c 		if (!ro->op_write) {
ro                472 net/rds/rdma.c 	kfree(ro->op_notifier);
ro                473 net/rds/rdma.c 	ro->op_notifier = NULL;
ro                474 net/rds/rdma.c 	ro->op_active = 0;
ro                938 net/rds/rds.h  void rds_rdma_free_op(struct rm_rdma_op *ro);
ro                503 net/rds/send.c 	struct rm_rdma_op *ro;
ro                509 net/rds/send.c 	ro = &rm->rdma;
ro                511 net/rds/send.c 	    ro->op_active && ro->op_notify && ro->op_notifier) {
ro                512 net/rds/send.c 		notifier = ro->op_notifier;
ro                521 net/rds/send.c 		ro->op_notifier = NULL;
ro                577 net/rds/send.c 	struct rm_rdma_op *ro;
ro                580 net/rds/send.c 	ro = &rm->rdma;
ro                581 net/rds/send.c 	if (ro->op_active && ro->op_notify && ro->op_notifier) {
ro                582 net/rds/send.c 		ro->op_notifier->n_status = status;
ro                583 net/rds/send.c 		list_add_tail(&ro->op_notifier->n_list, &rs->rs_notify_queue);
ro                584 net/rds/send.c 		ro->op_notifier = NULL;
ro                646 net/rds/send.c 			struct rm_rdma_op *ro = &rm->rdma;
ro                652 net/rds/send.c 			if (ro->op_active && ro->op_notifier &&
ro                653 net/rds/send.c 			       (ro->op_notify || (ro->op_recverr && status))) {
ro                654 net/rds/send.c 				notifier = ro->op_notifier;
ro                 72 security/loadpin/loadpin.c 	bool ro = false;
ro                 81 security/loadpin/loadpin.c 		ro = bdev_read_only(mnt_sb->s_bdev);
ro                 86 security/loadpin/loadpin.c 			ro ? "read-only" : "writable");
ro                 90 security/loadpin/loadpin.c 	if (!ro) {
ro                212 sound/soc/au1x/psc-ac97.c 	unsigned long r, ro, stat;
ro                217 sound/soc/au1x/psc-ac97.c 	r = ro = __raw_readl(AC97_CFG(pscdata));
ro                244 sound/soc/au1x/psc-ac97.c 		if (!(r ^ ro))