scheduled         146 arch/s390/pci/pci_irq.c 	atomic_t scheduled;
scheduled         152 arch/s390/pci/pci_irq.c 	atomic_t *scheduled = data;
scheduled         156 arch/s390/pci/pci_irq.c 	} while (atomic_dec_return(scheduled));
scheduled         178 arch/s390/pci/pci_irq.c 		if (atomic_inc_return(&cpu_data->scheduled) > 1)
scheduled         182 arch/s390/pci/pci_irq.c 		cpu_data->csd.info = &cpu_data->scheduled;
scheduled        3515 drivers/acpi/nfit/core.c 	int scheduled = 0, busy = 0;
scheduled        3535 drivers/acpi/nfit/core.c 			scheduled++;
scheduled        3537 drivers/acpi/nfit/core.c 	if (scheduled) {
scheduled        3543 drivers/acpi/nfit/core.c 	if (scheduled)
scheduled        1056 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			fence = dma_fence_get(&s_fence->scheduled);
scheduled         147 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c 			job->base.s_fence->scheduled.context : 0;
scheduled         280 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c 				if (dma_fence_is_signaled(&s_fence->scheduled))
scheduled         283 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c 				return &s_fence->scheduled;
scheduled         421 drivers/gpu/drm/scheduler/sched_entity.c 		fence = dma_fence_get(&s_fence->scheduled);
scheduled          53 drivers/gpu/drm/scheduler/sched_fence.c 	int ret = dma_fence_signal(&fence->scheduled);
scheduled          56 drivers/gpu/drm/scheduler/sched_fence.c 		DMA_FENCE_TRACE(&fence->scheduled,
scheduled          59 drivers/gpu/drm/scheduler/sched_fence.c 		DMA_FENCE_TRACE(&fence->scheduled,
scheduled         128 drivers/gpu/drm/scheduler/sched_fence.c 	dma_fence_put(&fence->scheduled);
scheduled         146 drivers/gpu/drm/scheduler/sched_fence.c 		return container_of(f, struct drm_sched_fence, scheduled);
scheduled         170 drivers/gpu/drm/scheduler/sched_fence.c 	dma_fence_init(&fence->scheduled, &drm_sched_fence_ops_scheduled,
scheduled         336 drivers/gpu/drm/scheduler/sched_main.c 				if (bad->s_fence->scheduled.context ==
scheduled         489 drivers/gpu/drm/scheduler/sched_main.c 			guilty_context = s_job->s_fence->scheduled.context;
scheduled         492 drivers/gpu/drm/scheduler/sched_main.c 		if (found_guilty && s_job->s_fence->scheduled.context == guilty_context)
scheduled          59 drivers/infiniband/hw/hfi1/rc.c 				      u8 *prev_ack, bool *scheduled)
scheduled          93 drivers/infiniband/hw/hfi1/rc.c 	if (scheduled)
scheduled          94 drivers/infiniband/hw/hfi1/rc.c 		*scheduled = s;
scheduled          53 drivers/infiniband/hw/hfi1/rc.h 				      u8 *prev_ack, bool *scheduled);
scheduled         412 drivers/rtc/interface.c 	time64_t now, scheduled;
scheduled         419 drivers/rtc/interface.c 	scheduled = rtc_tm_to_time64(&alarm->time);
scheduled         426 drivers/rtc/interface.c 	if (scheduled <= now)
scheduled         422 drivers/scsi/libsas/sas_scsi_host.c 	int scheduled = 0, tries = 100;
scheduled         432 drivers/scsi/libsas/sas_scsi_host.c 	while (!scheduled && tries--) {
scheduled         436 drivers/scsi/libsas/sas_scsi_host.c 			scheduled = 1;
scheduled         449 drivers/scsi/libsas/sas_scsi_host.c 		if (scheduled)
scheduled          61 fs/btrfs/reada.c 	int			scheduled;
scheduled         110 fs/btrfs/reada.c 	re->scheduled = 0;
scheduled         716 fs/btrfs/reada.c 	if (re->scheduled || list_empty(&re->extctl)) {
scheduled         721 fs/btrfs/reada.c 	re->scheduled = 1;
scheduled         875 fs/btrfs/reada.c 				list_empty(&re->extctl), re->scheduled);
scheduled         902 fs/btrfs/reada.c 		if (!re->scheduled) {
scheduled         908 fs/btrfs/reada.c 			list_empty(&re->extctl), re->scheduled);
scheduled         126 include/drm/gpu_scheduler.h 	struct dma_fence		scheduled;
scheduled         333 kernel/locking/lockdep.c 	int			scheduled;
scheduled        4929 kernel/locking/lockdep.c 	if (delayed_free.scheduled)
scheduled        4932 kernel/locking/lockdep.c 	delayed_free.scheduled = true;
scheduled        4974 kernel/locking/lockdep.c 	delayed_free.scheduled = false;
scheduled        1826 kernel/workqueue.c 		INIT_LIST_HEAD(&worker->scheduled);
scheduled        1977 kernel/workqueue.c 	    WARN_ON(!list_empty(&worker->scheduled)) ||
scheduled        2198 kernel/workqueue.c 		move_linked_works(work, &collision->scheduled, NULL);
scheduled        2331 kernel/workqueue.c 	while (!list_empty(&worker->scheduled)) {
scheduled        2332 kernel/workqueue.c 		struct work_struct *work = list_first_entry(&worker->scheduled,
scheduled        2398 kernel/workqueue.c 	WARN_ON_ONCE(!list_empty(&worker->scheduled));
scheduled        2419 kernel/workqueue.c 			if (unlikely(!list_empty(&worker->scheduled)))
scheduled        2422 kernel/workqueue.c 			move_linked_works(work, &worker->scheduled, NULL);
scheduled        2468 kernel/workqueue.c 	struct list_head *scheduled = &rescuer->scheduled;
scheduled        2514 kernel/workqueue.c 		WARN_ON_ONCE(!list_empty(scheduled));
scheduled        2519 kernel/workqueue.c 				move_linked_works(work, scheduled, &n);
scheduled        2524 kernel/workqueue.c 		if (!list_empty(scheduled)) {
scheduled        2678 kernel/workqueue.c 		head = worker->scheduled.next;
scheduled        4687 kernel/workqueue.c 			list_for_each_entry(work, &worker->scheduled, entry)
scheduled          34 kernel/workqueue_internal.h 	struct list_head	scheduled;	/* L: scheduled works */
scheduled          92 net/sctp/stream_sched_prio.c 	bool scheduled = false;
scheduled          98 net/sctp/stream_sched_prio.c 		scheduled = true;
scheduled         114 net/sctp/stream_sched_prio.c 	return scheduled;