drm/msm/sde: clean up idle pc notification implementation

Video mode panel and command mode display uses different
logic for idle timeout management and power collapse signal
notification to client. It adds complexity for same feature
support in sde driver. This patch tries to unify the idle
power collapse management for video mode and command mode
displays. It reuses the existing infrastructure for this
feature and cleans up all duplicate logic.

Change-Id: If18c0bba70e191a2a98b82bb72f27a714589b0c6
Signed-off-by: Dhaval Patel <pdhaval@codeaurora.org>
diff --git a/drivers/gpu/drm/msm/msm_drv.h b/drivers/gpu/drm/msm/msm_drv.h
index a59da57..12d2d15 100644
--- a/drivers/gpu/drm/msm/msm_drv.h
+++ b/drivers/gpu/drm/msm/msm_drv.h
@@ -147,7 +147,7 @@
 	CRTC_PROP_ROT_CLK,
 	CRTC_PROP_ROI_V1,
 	CRTC_PROP_SECURITY_LEVEL,
-	CRTC_PROP_IDLE_TIME,
+	CRTC_PROP_IDLE_TIMEOUT,
 
 	/* total # of properties */
 	CRTC_PROP_COUNT
diff --git a/drivers/gpu/drm/msm/sde/sde_crtc.c b/drivers/gpu/drm/msm/sde/sde_crtc.c
index 8397e1c..cac7893 100644
--- a/drivers/gpu/drm/msm/sde/sde_crtc.c
+++ b/drivers/gpu/drm/msm/sde/sde_crtc.c
@@ -67,13 +67,9 @@
 static int sde_crtc_idle_interrupt_handler(struct drm_crtc *crtc_drm,
 	bool en, struct sde_irq_callback *idle_irq);
 
-static int sde_crtc_pm_event_handler(struct drm_crtc *crtc_drm,
-	bool en, struct sde_irq_callback *noirq);
-
 static struct sde_crtc_custom_events custom_events[] = {
 	{DRM_EVENT_AD_BACKLIGHT, sde_cp_ad_interrupt},
 	{DRM_EVENT_CRTC_POWER, sde_crtc_power_interrupt_handler},
-	{DRM_EVENT_SDE_POWER, sde_crtc_pm_event_handler},
 	{DRM_EVENT_IDLE_NOTIFY, sde_crtc_idle_interrupt_handler}
 };
 
@@ -2026,6 +2022,47 @@
 	SDE_ATRACE_END("signal_retire_fence");
 }
 
+/* _sde_crtc_idle_notify - signal idle timeout to client */
+static void _sde_crtc_idle_notify(struct sde_crtc *sde_crtc)
+{
+	struct drm_crtc *crtc;
+	struct drm_event event;
+	int ret = 0;
+
+	if (!sde_crtc) {
+		SDE_ERROR("invalid sde crtc\n");
+		return;
+	}
+
+	crtc = &sde_crtc->base;
+	event.type = DRM_EVENT_IDLE_NOTIFY;
+	event.length = sizeof(u32);
+	msm_mode_object_event_notify(&crtc->base, crtc->dev, &event,
+								(u8 *)&ret);
+
+	SDE_DEBUG("crtc:%d idle timeout notified\n", crtc->base.id);
+}
+
+/*
+ * sde_crtc_handle_event - crtc frame event handle.
+ * This API must manage only non-IRQ context events.
+ */
+static bool _sde_crtc_handle_event(struct sde_crtc *sde_crtc, u32 event)
+{
+	bool event_processed = false;
+
+	/**
+	 * idle events are originated from commit thread and can be processed
+	 * in same context
+	 */
+	if (event & SDE_ENCODER_FRAME_EVENT_IDLE) {
+		_sde_crtc_idle_notify(sde_crtc);
+		event_processed = true;
+	}
+
+	return event_processed;
+}
+
 static void sde_crtc_frame_event_work(struct kthread_work *work)
 {
 	struct msm_drm_private *priv;
@@ -2119,6 +2156,15 @@
 	SDE_ATRACE_END("crtc_frame_event");
 }
 
+/*
+ * sde_crtc_frame_event_cb - crtc frame event callback API. CRTC module
+ * registers this API to encoder for all frame event callbacks like
+ * release_fence, retire_fence, frame_error, frame_done, idle_timeout,
+ * etc. Encoder may call different events from different context - IRQ,
+ * user thread, commit_thread, etc. Each event should be carefully
+ * reviewed and should be processed in proper task context to avoid scheduling
+ * delay or properly manage the irq context's bottom half processing.
+ */
 static void sde_crtc_frame_event_cb(void *data, u32 event)
 {
 	struct drm_crtc *crtc = (struct drm_crtc *)data;
@@ -2127,6 +2173,7 @@
 	struct sde_crtc_frame_event *fevent;
 	unsigned long flags;
 	u32 crtc_id;
+	bool event_processed = false;
 
 	if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
 		SDE_ERROR("invalid parameters\n");
@@ -2139,6 +2186,11 @@
 	SDE_DEBUG("crtc%d\n", crtc->base.id);
 	SDE_EVT32_VERBOSE(DRMID(crtc), event);
 
+	/* try to process the event in caller context */
+	event_processed = _sde_crtc_handle_event(sde_crtc, event);
+	if (event_processed)
+		return;
+
 	spin_lock_irqsave(&sde_crtc->spin_lock, flags);
 	fevent = list_first_entry_or_null(&sde_crtc->frame_event_list,
 			struct sde_crtc_frame_event, list);
@@ -2179,6 +2231,24 @@
 		sde_crtc_secure_ctrl(crtc, true);
 }
 
+/* _sde_crtc_set_idle_timeout - update idle timeout wait duration */
+static void _sde_crtc_set_idle_timeout(struct drm_crtc *crtc, u64 val)
+{
+	struct drm_encoder *encoder;
+
+	if (!crtc) {
+		SDE_ERROR("invalid crtc\n");
+		return;
+	}
+
+	drm_for_each_encoder(encoder, crtc->dev) {
+		if (encoder->crtc != crtc)
+			continue;
+
+		sde_encoder_set_idle_timeout(encoder, (u32) val);
+	}
+}
+
 /**
  * _sde_crtc_set_input_fence_timeout - update ns version of in fence timeout
  * @cstate: Pointer to sde crtc state
@@ -2494,12 +2564,6 @@
 	if (unlikely(!sde_crtc->num_mixers))
 		return;
 
-	/* cancel the idle notify delayed work */
-	if (sde_encoder_check_mode(sde_crtc->mixers[0].encoder,
-					MSM_DISPLAY_CAP_VID_MODE) &&
-		kthread_cancel_delayed_work_sync(&sde_crtc->idle_notify_work))
-		SDE_DEBUG("idle notify work cancelled\n");
-
 	_sde_crtc_blend_setup(crtc);
 
 	/*
@@ -2533,7 +2597,6 @@
 	struct msm_drm_thread *event_thread;
 	unsigned long flags;
 	struct sde_crtc_state *cstate;
-	int idle_time = 0;
 
 	if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
 		SDE_ERROR("invalid crtc\n");
@@ -2559,7 +2622,6 @@
 	}
 
 	event_thread = &priv->event_thread[crtc->index];
-	idle_time = sde_crtc_get_property(cstate, CRTC_PROP_IDLE_TIME);
 
 	if (sde_crtc->event) {
 		SDE_DEBUG("already received sde_crtc->event\n");
@@ -2590,15 +2652,6 @@
 	/* wait for acquire fences before anything else is done */
 	_sde_crtc_wait_for_fences(crtc);
 
-	/* schedule the idle notify delayed work */
-	if (idle_time && sde_encoder_check_mode(sde_crtc->mixers[0].encoder,
-						MSM_DISPLAY_CAP_VID_MODE)) {
-		kthread_queue_delayed_work(&event_thread->worker,
-					&sde_crtc->idle_notify_work,
-					msecs_to_jiffies(idle_time));
-		SDE_DEBUG("schedule idle notify work in %dms\n", idle_time);
-	}
-
 	if (!cstate->rsc_update) {
 		drm_for_each_encoder(encoder, dev) {
 			if (encoder->crtc != crtc)
@@ -2992,8 +3045,7 @@
 	struct drm_plane *plane;
 	struct drm_encoder *encoder;
 	struct sde_crtc_mixer *m;
-	struct drm_event event;
-	u32 power_on = 0, i, misr_status;
+	u32 i, misr_status;
 
 	if (!crtc) {
 		SDE_ERROR("invalid crtc\n");
@@ -3016,12 +3068,6 @@
 		}
 		sde_cp_crtc_post_ipc(crtc);
 
-		event.type = DRM_EVENT_SDE_POWER;
-		event.length = sizeof(power_on);
-		power_on = 1;
-		msm_mode_object_event_notify(&crtc->base, crtc->dev, &event,
-				(u8 *)&power_on);
-
 		for (i = 0; i < sde_crtc->num_mixers; ++i) {
 			m = &sde_crtc->mixers[i];
 			if (!m->hw_lm || !m->hw_lm->ops.setup_misr ||
@@ -3054,12 +3100,6 @@
 			sde_plane_set_revalidate(plane, true);
 
 		sde_cp_crtc_suspend(crtc);
-
-		event.type = DRM_EVENT_SDE_POWER;
-		event.length = sizeof(power_on);
-		power_on = 0;
-		msm_mode_object_event_notify(&crtc->base, crtc->dev, &event,
-				(u8 *)&power_on);
 		break;
 	default:
 		SDE_DEBUG("event:%d not handled\n", event_type);
@@ -3805,7 +3845,8 @@
 			CRTC_PROP_ROT_CLK);
 
 	msm_property_install_range(&sde_crtc->property_info,
-		"idle_time", 0x0, 0, U64_MAX, 0, CRTC_PROP_IDLE_TIME);
+		"idle_timeout", IDLE_TIMEOUT, 0, U64_MAX, 0,
+		CRTC_PROP_IDLE_TIMEOUT);
 
 	msm_property_install_blob(&sde_crtc->property_info, "capabilities",
 		DRM_MODE_PROP_IMMUTABLE, CRTC_PROP_INFO);
@@ -3938,6 +3979,8 @@
 				cstate->bw_control = true;
 				cstate->bw_split_vote = true;
 				break;
+			case CRTC_PROP_IDLE_TIMEOUT:
+				_sde_crtc_set_idle_timeout(crtc, val);
 			default:
 				/* nothing to do */
 				break;
@@ -4539,31 +4582,6 @@
 	return rc;
 }
 
-/*
- * __sde_crtc_idle_notify_work - signal idle timeout to user space
- */
-static void __sde_crtc_idle_notify_work(struct kthread_work *work)
-{
-	struct sde_crtc *sde_crtc = container_of(work, struct sde_crtc,
-				idle_notify_work.work);
-	struct drm_crtc *crtc;
-	struct drm_event event;
-	int ret = 0;
-
-	if (!sde_crtc) {
-		SDE_ERROR("invalid sde crtc\n");
-	} else {
-		crtc = &sde_crtc->base;
-		event.type = DRM_EVENT_IDLE_NOTIFY;
-		event.length = sizeof(u32);
-		msm_mode_object_event_notify(&crtc->base, crtc->dev,
-				&event, (u8 *)&ret);
-
-		SDE_DEBUG("crtc[%d]: idle timeout notified\n", crtc->base.id);
-	}
-}
-
-
 /* initialize crtc */
 struct drm_crtc *sde_crtc_init(struct drm_device *dev, struct drm_plane *plane)
 {
@@ -4634,9 +4652,6 @@
 	sde_cp_crtc_init(crtc);
 	sde_cp_crtc_install_properties(crtc);
 
-	kthread_init_delayed_work(&sde_crtc->idle_notify_work,
-					__sde_crtc_idle_notify_work);
-
 	SDE_DEBUG("%s: successfully initialized crtc\n", sde_crtc->name);
 	return crtc;
 }
@@ -4773,16 +4788,6 @@
 	return 0;
 }
 
-static int sde_crtc_pm_event_handler(struct drm_crtc *crtc, bool en,
-		struct sde_irq_callback *noirq)
-{
-	/*
-	 * IRQ object noirq is not being used here since there is
-	 * no crtc irq from pm event.
-	 */
-	return 0;
-}
-
 static int sde_crtc_idle_interrupt_handler(struct drm_crtc *crtc_drm,
 	bool en, struct sde_irq_callback *irq)
 {
diff --git a/drivers/gpu/drm/msm/sde/sde_crtc.h b/drivers/gpu/drm/msm/sde/sde_crtc.h
index 1e0e917..ea606b3 100644
--- a/drivers/gpu/drm/msm/sde/sde_crtc.h
+++ b/drivers/gpu/drm/msm/sde/sde_crtc.h
@@ -204,7 +204,6 @@
  * @misr_enable   : boolean entry indicates misr enable/disable status.
  * @misr_frame_count  : misr frame count provided by client
  * @misr_data     : store misr data before turning off the clocks.
- * @idle_notify_work: delayed worker to notify idle timeout to user space
  * @power_event   : registered power event handle
  * @cur_perf      : current performance committed to clock/bandwidth driver
  * @rp_lock       : serialization lock for resource pool
@@ -265,8 +264,6 @@
 	u32 misr_frame_count;
 	u32 misr_data[CRTC_DUAL_MIXERS];
 
-	struct kthread_delayed_work idle_notify_work;
-
 	struct sde_power_event *power_event;
 
 	struct sde_core_perf_params cur_perf;
diff --git a/drivers/gpu/drm/msm/sde/sde_encoder.c b/drivers/gpu/drm/msm/sde/sde_encoder.c
index 4a643cc..0071352 100644
--- a/drivers/gpu/drm/msm/sde/sde_encoder.c
+++ b/drivers/gpu/drm/msm/sde/sde_encoder.c
@@ -71,7 +71,6 @@
 
 #define MISR_BUFF_SIZE			256
 
-#define IDLE_TIMEOUT	(66 - 16/2)
 #define IDLE_SHORT_TIMEOUT	1
 
 /* Maximum number of VSYNC wait attempts for RSC state transition */
@@ -197,6 +196,7 @@
  * @rsc_config:			rsc configuration for display vtotal, fps, etc.
  * @cur_conn_roi:		current connector roi
  * @prv_conn_roi:		previous connector roi to optimize if unchanged
+ * @idle_timeout:		idle timeout duration in milliseconds
  */
 struct sde_encoder_virt {
 	struct drm_encoder base;
@@ -242,6 +242,8 @@
 	struct sde_rsc_cmd_config rsc_config;
 	struct sde_rect cur_conn_roi;
 	struct sde_rect prv_conn_roi;
+
+	u32 idle_timeout;
 };
 
 #define to_sde_encoder_virt(x) container_of(x, struct sde_encoder_virt, base)
@@ -261,6 +263,17 @@
 	return (comp_info->comp_type == MSM_DISPLAY_COMPRESSION_DSC);
 }
 
+void sde_encoder_set_idle_timeout(struct drm_encoder *drm_enc, u32 idle_timeout)
+{
+	struct sde_encoder_virt *sde_enc;
+
+	if (!drm_enc)
+		return;
+
+	sde_enc = to_sde_encoder_virt(drm_enc);
+	sde_enc->idle_timeout = idle_timeout;
+}
+
 bool sde_encoder_is_dsc_merge(struct drm_encoder *drm_enc)
 {
 	enum sde_rm_topology_name topology;
@@ -1472,6 +1485,7 @@
 	struct msm_drm_private *priv;
 	struct msm_drm_thread *disp_thread;
 	int ret;
+	bool is_vid_mode = false;
 
 	if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private ||
 			!drm_enc->crtc) {
@@ -1480,6 +1494,8 @@
 	}
 	sde_enc = to_sde_encoder_virt(drm_enc);
 	priv = drm_enc->dev->dev_private;
+	is_vid_mode = sde_enc->disp_info.capabilities &
+						MSM_DISPLAY_CAP_VID_MODE;
 
 	if (drm_enc->crtc->index >= ARRAY_SIZE(priv->disp_thread)) {
 		SDE_ERROR("invalid crtc index\n");
@@ -1489,7 +1505,7 @@
 
 	/*
 	 * when idle_pc is not supported, process only KICKOFF, STOP and MODESET
-	 * events and return early for other events (ie video mode).
+	 * events and return early for other events (ie wb display).
 	 */
 	if (!sde_enc->idle_pc_supported &&
 			(sw_event != SDE_ENC_RC_EVENT_KICKOFF &&
@@ -1518,6 +1534,8 @@
 		if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON) {
 			SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in ON state\n",
 					sw_event);
+			SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
+				SDE_EVTLOG_FUNC_CASE1);
 			mutex_unlock(&sde_enc->rc_lock);
 			return 0;
 		} else if (sde_enc->rc_state != SDE_ENC_RC_STATE_OFF &&
@@ -1530,9 +1548,13 @@
 			return -EINVAL;
 		}
 
-		/* enable all the clks and resources */
-		_sde_encoder_resource_control_helper(drm_enc, true);
-		_sde_encoder_resource_control_rsc_update(drm_enc, true);
+		if (is_vid_mode && sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
+			_sde_encoder_irq_control(drm_enc, true);
+		} else {
+			/* enable all the clks and resources */
+			_sde_encoder_resource_control_helper(drm_enc, true);
+			_sde_encoder_resource_control_rsc_update(drm_enc, true);
+		}
 
 		SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
 				SDE_ENC_RC_STATE_ON, SDE_EVTLOG_FUNC_CASE1);
@@ -1562,6 +1584,8 @@
 		 */
 		if (sde_crtc_frame_pending(drm_enc->crtc) > 1) {
 			SDE_DEBUG_ENC(sde_enc, "skip schedule work");
+			SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
+				SDE_EVTLOG_FUNC_CASE2);
 			return 0;
 		}
 
@@ -1582,7 +1606,7 @@
 		if (lp == SDE_MODE_DPMS_LP2)
 			idle_timeout = IDLE_SHORT_TIMEOUT;
 		else
-			idle_timeout = IDLE_TIMEOUT;
+			idle_timeout = sde_enc->idle_timeout;
 
 		if (!autorefresh_enabled)
 			kthread_queue_delayed_work(
@@ -1605,11 +1629,17 @@
 
 		mutex_lock(&sde_enc->rc_lock);
 
+		if (is_vid_mode &&
+			  sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
+			_sde_encoder_irq_control(drm_enc, true);
+		}
 		/* skip if is already OFF or IDLE, resources are off already */
-		if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF ||
+		else if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF ||
 				sde_enc->rc_state == SDE_ENC_RC_STATE_IDLE) {
 			SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in %d state\n",
 					sw_event, sde_enc->rc_state);
+			SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
+				SDE_EVTLOG_FUNC_CASE3);
 			mutex_unlock(&sde_enc->rc_lock);
 			return 0;
 		}
@@ -1636,6 +1666,8 @@
 		if (sde_enc->rc_state == SDE_ENC_RC_STATE_OFF) {
 			SDE_DEBUG_ENC(sde_enc, "sw_event:%d, rc in OFF state\n",
 					sw_event);
+			SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
+				SDE_EVTLOG_FUNC_CASE4);
 			mutex_unlock(&sde_enc->rc_lock);
 			return 0;
 		} else if (sde_enc->rc_state == SDE_ENC_RC_STATE_ON ||
@@ -1756,9 +1788,15 @@
 			return 0;
 		}
 
-		/* disable all the clks and resources */
-		_sde_encoder_resource_control_rsc_update(drm_enc, false);
-		_sde_encoder_resource_control_helper(drm_enc, false);
+		if (is_vid_mode) {
+			_sde_encoder_irq_control(drm_enc, false);
+		} else {
+			/* disable all the clks and resources */
+			_sde_encoder_resource_control_rsc_update(drm_enc,
+								false);
+			_sde_encoder_resource_control_helper(drm_enc, false);
+		}
+
 		SDE_EVT32(DRMID(drm_enc), sw_event, sde_enc->rc_state,
 				SDE_ENC_RC_STATE_IDLE, SDE_EVTLOG_FUNC_CASE7);
 		sde_enc->rc_state = SDE_ENC_RC_STATE_IDLE;
@@ -1777,20 +1815,6 @@
 	return 0;
 }
 
-static void sde_encoder_off_work(struct kthread_work *work)
-{
-	struct sde_encoder_virt *sde_enc = container_of(work,
-			struct sde_encoder_virt, delayed_off_work.work);
-
-	if (!sde_enc) {
-		SDE_ERROR("invalid sde encoder\n");
-		return;
-	}
-
-	sde_encoder_resource_control(&sde_enc->base,
-			SDE_ENC_RC_EVENT_ENTER_IDLE);
-}
-
 static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
 				      struct drm_display_mode *mode,
 				      struct drm_display_mode *adj_mode)
@@ -2259,6 +2283,23 @@
 	}
 }
 
+static void sde_encoder_off_work(struct kthread_work *work)
+{
+	struct sde_encoder_virt *sde_enc = container_of(work,
+			struct sde_encoder_virt, delayed_off_work.work);
+
+	if (!sde_enc) {
+		SDE_ERROR("invalid sde encoder\n");
+		return;
+	}
+
+	sde_encoder_resource_control(&sde_enc->base,
+						SDE_ENC_RC_EVENT_ENTER_IDLE);
+
+	sde_encoder_frame_done_callback(&sde_enc->base, NULL,
+				SDE_ENCODER_FRAME_EVENT_IDLE);
+}
+
 /**
  * _sde_encoder_trigger_flush - trigger flush for a physical encoder
  * drm_enc: Pointer to drm encoder structure
@@ -3246,7 +3287,8 @@
 
 	SDE_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
 
-	if (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
+	if ((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) ||
+	    (disp_info->capabilities & MSM_DISPLAY_CAP_VID_MODE))
 		sde_enc->idle_pc_supported = sde_kms->catalog->has_idle_pc;
 
 	mutex_lock(&sde_enc->enc_lock);
@@ -3411,7 +3453,7 @@
 	mutex_init(&sde_enc->rc_lock);
 	kthread_init_delayed_work(&sde_enc->delayed_off_work,
 			sde_encoder_off_work);
-
+	sde_enc->idle_timeout = IDLE_TIMEOUT;
 	memcpy(&sde_enc->disp_info, disp_info, sizeof(*disp_info));
 
 	SDE_DEBUG_ENC(sde_enc, "created\n");
diff --git a/drivers/gpu/drm/msm/sde/sde_encoder.h b/drivers/gpu/drm/msm/sde/sde_encoder.h
index 618d4ae..bb7f31d 100644
--- a/drivers/gpu/drm/msm/sde/sde_encoder.h
+++ b/drivers/gpu/drm/msm/sde/sde_encoder.h
@@ -29,6 +29,9 @@
 #define SDE_ENCODER_FRAME_EVENT_PANEL_DEAD		BIT(2)
 #define SDE_ENCODER_FRAME_EVENT_SIGNAL_RELEASE_FENCE	BIT(3)
 #define SDE_ENCODER_FRAME_EVENT_SIGNAL_RETIRE_FENCE	BIT(4)
+#define SDE_ENCODER_FRAME_EVENT_IDLE			BIT(5)
+
+#define IDLE_TIMEOUT	(66 - 16/2)
 
 /**
  * Encoder functions and data types
@@ -205,4 +208,13 @@
  */
 void sde_encoder_prepare_commit(struct drm_encoder *drm_enc);
 
+/**
+ * sde_encoder_set_idle_timeout - set the idle timeout for video
+ *                    and command mode encoders.
+ * @drm_enc:    Pointer to previously created drm encoder structure
+ * @idle_timeout:    idle timeout duration in milliseconds
+ */
+void sde_encoder_set_idle_timeout(struct drm_encoder *drm_enc,
+							u32 idle_timeout);
+
 #endif /* __SDE_ENCODER_H__ */
diff --git a/drivers/gpu/drm/msm/sde/sde_encoder_phys_vid.c b/drivers/gpu/drm/msm/sde/sde_encoder_phys_vid.c
index 78c22c9..61e761d 100644
--- a/drivers/gpu/drm/msm/sde/sde_encoder_phys_vid.c
+++ b/drivers/gpu/drm/msm/sde/sde_encoder_phys_vid.c
@@ -532,17 +532,25 @@
 {
 	int ret = 0;
 	struct sde_encoder_phys_vid *vid_enc;
+	int refcount;
 
 	if (!phys_enc) {
 		SDE_ERROR("invalid encoder\n");
 		return -EINVAL;
 	}
 
+	refcount = atomic_read(&phys_enc->vblank_refcount);
 	vid_enc = to_sde_encoder_phys_vid(phys_enc);
 
 	/* Slave encoders don't report vblank */
 	if (!sde_encoder_phys_vid_is_master(phys_enc))
-		return 0;
+		goto end;
+
+	/* protect against negative */
+	if (!enable && refcount == 0) {
+		ret = -EINVAL;
+		goto end;
+	}
 
 	SDE_DEBUG_VIDENC(vid_enc, "[%pS] enable=%d/%d\n",
 			__builtin_return_address(0),
@@ -557,11 +565,15 @@
 		ret = sde_encoder_helper_unregister_irq(phys_enc,
 				INTR_IDX_VSYNC);
 
-	if (ret)
+end:
+	if (ret) {
 		SDE_ERROR_VIDENC(vid_enc,
 				"control vblank irq error %d, enable %d\n",
 				ret, enable);
-
+		SDE_EVT32(DRMID(phys_enc->parent),
+				vid_enc->hw_intf->idx - INTF_0,
+				enable, refcount, SDE_EVTLOG_ERROR);
+	}
 	return ret;
 }
 
@@ -572,7 +584,6 @@
 	struct sde_hw_intf *intf;
 	struct sde_hw_ctl *ctl;
 	u32 flush_mask = 0;
-	int ret;
 
 	if (!phys_enc || !phys_enc->parent || !phys_enc->parent->dev ||
 			!phys_enc->parent->dev->dev_private) {
@@ -601,15 +612,6 @@
 	sde_encoder_helper_split_config(phys_enc, vid_enc->hw_intf->idx);
 
 	sde_encoder_phys_vid_setup_timing_engine(phys_enc);
-	ret = sde_encoder_phys_vid_control_vblank_irq(phys_enc, true);
-	if (ret)
-		goto end;
-
-	ret = sde_encoder_helper_register_irq(phys_enc, INTR_IDX_UNDERRUN);
-	if (ret) {
-		sde_encoder_phys_vid_control_vblank_irq(phys_enc, false);
-		goto end;
-	}
 
 	/*
 	 * For pp-split, skip setting the flush bit for the slave intf, since
@@ -630,7 +632,6 @@
 	if (phys_enc->enable_state == SDE_ENC_DISABLED)
 		phys_enc->enable_state = SDE_ENC_ENABLING;
 
-end:
 	return;
 }
 
@@ -804,11 +805,8 @@
 			SDE_EVT32(DRMID(phys_enc->parent),
 					vid_enc->hw_intf->idx - INTF_0, ret);
 		}
-		sde_encoder_phys_vid_control_vblank_irq(phys_enc, false);
 	}
 
-	sde_encoder_helper_unregister_irq(phys_enc, INTR_IDX_UNDERRUN);
-
 	phys_enc->enable_state = SDE_ENC_DISABLED;
 }
 
@@ -840,6 +838,32 @@
 	}
 }
 
+static void sde_encoder_phys_vid_irq_control(struct sde_encoder_phys *phys_enc,
+		bool enable)
+{
+	struct sde_encoder_phys_vid *vid_enc;
+	int ret;
+
+	if (!phys_enc)
+		return;
+
+	vid_enc = to_sde_encoder_phys_vid(phys_enc);
+
+	SDE_EVT32(DRMID(phys_enc->parent), vid_enc->hw_intf->idx - INTF_0,
+			enable, atomic_read(&phys_enc->vblank_refcount));
+
+	if (enable) {
+		ret = sde_encoder_phys_vid_control_vblank_irq(phys_enc, true);
+		if (ret)
+			return;
+
+		sde_encoder_helper_register_irq(phys_enc, INTR_IDX_UNDERRUN);
+	} else {
+		sde_encoder_phys_vid_control_vblank_irq(phys_enc, false);
+		sde_encoder_helper_unregister_irq(phys_enc, INTR_IDX_UNDERRUN);
+	}
+}
+
 static void sde_encoder_phys_vid_setup_misr(struct sde_encoder_phys *phys_enc,
 						bool enable, u32 frame_count)
 {
@@ -879,6 +903,7 @@
 	ops->wait_for_commit_done = sde_encoder_phys_vid_wait_for_vblank;
 	ops->wait_for_vblank = sde_encoder_phys_vid_wait_for_vblank;
 	ops->wait_for_tx_complete = sde_encoder_phys_vid_wait_for_vblank;
+	ops->irq_control = sde_encoder_phys_vid_irq_control;
 	ops->prepare_for_kickoff = sde_encoder_phys_vid_prepare_for_kickoff;
 	ops->handle_post_kickoff = sde_encoder_phys_vid_handle_post_kickoff;
 	ops->needs_single_flush = sde_encoder_phys_vid_needs_single_flush;