Browse Source

Merge remote-tracking branch 'origin/display-kernel.lnx.5.4' into display-kernel.lnx.5.10

* origin/display-kernel.lnx.5.4:
  disp: msm: dsi: move backlight operations to post kickoff
  disp: msm: sde: cleanup ctl/intf registers on cmd encoder disable
  disp: msm: dsi: enable DMA cmd scheduling for video mode panels
  disp: msm: dp: validate edid before dereferencing
  disp: msm: sde: update encoder atomic check during qsync usecase
  disp: msm: dp: handle link maintenance failures
  disp: msm: dp: fix page fault from hdcp buffer access
  disp: msm: dsi: Add support for 5nm C-PHY shadow clock
  disp: msm: dsi: trigger broadcast commands using DMA start window
  disp: msm: sde: flush all event thread work during CRTC disable
  disp: msm: sde: ignore HW recovery disable event
  disp: msm: dp: remove debug log output in isr
  drm: msm: Reset register when NoiseThresh disabled
  disp: msm: sde: use prepare_commit connector callback for DSI
  disp: msm: sde: retain ubwc settings during LA to LE transition
  disp: msm: sde: always enable prog fetch and fix prefill calculations
  disp: msm: dp: fix DSC and PPS version mismatch
  disp: msm: dp: add support for continuous PPS command
  disp: msm: sde: fix dsc 1_2 rate control parameter

Change-Id: I8f427b6ba706dd04de1b840952a41f2cc3598c2d
Alisha Thapaliya 4 years ago
parent
commit
db23860b24

+ 7 - 0
msm/dp/dp_catalog.c

@@ -1534,6 +1534,7 @@ static void dp_catalog_panel_dp_flush(struct dp_catalog_panel *panel,
 	struct dp_catalog_private *catalog;
 	struct dp_io_data *io_data;
 	u32 dp_flush, offset;
+	struct dp_dsc_cfg_data *dsc;
 
 	if (!panel) {
 		DP_ERR("invalid input\n");
@@ -1547,6 +1548,7 @@ static void dp_catalog_panel_dp_flush(struct dp_catalog_panel *panel,
 
 	catalog = dp_catalog_get_priv(panel);
 	io_data = catalog->io.dp_link;
+	dsc = &panel->dsc;
 
 	if (panel->stream_id == DP_STREAM_0)
 		offset = 0;
@@ -1554,6 +1556,11 @@ static void dp_catalog_panel_dp_flush(struct dp_catalog_panel *panel,
 		offset = MMSS_DP1_FLUSH - MMSS_DP_FLUSH;
 
 	dp_flush = dp_read(MMSS_DP_FLUSH + offset);
+
+	if ((flush_bit == DP_PPS_FLUSH) &&
+		dsc->continuous_pps)
+		dp_flush &= ~BIT(2);
+
 	dp_flush |= BIT(flush_bit);
 	dp_write(MMSS_DP_FLUSH + offset, dp_flush);
 }

+ 1 - 0
msm/dp/dp_catalog.h

@@ -149,6 +149,7 @@ struct dp_catalog_audio {
 
 struct dp_dsc_cfg_data {
 	bool dsc_en;
+	bool continuous_pps;
 	char pps[128];
 	u32 pps_len;
 	u32 pps_word[32];

+ 6 - 2
msm/dp/dp_ctrl.c

@@ -10,6 +10,7 @@
 
 #include "dp_ctrl.h"
 #include "dp_debug.h"
+#include "sde_dbg.h"
 
 #define DP_MST_DEBUG(fmt, ...) DP_DEBUG(fmt, ##__VA_ARGS__)
 
@@ -93,13 +94,11 @@ enum notification_status {
 
 static void dp_ctrl_idle_patterns_sent(struct dp_ctrl_private *ctrl)
 {
-	DP_DEBUG("idle_patterns_sent\n");
 	complete(&ctrl->idle_comp);
 }
 
 static void dp_ctrl_video_ready(struct dp_ctrl_private *ctrl)
 {
-	DP_DEBUG("dp_video_ready\n");
 	complete(&ctrl->video_comp);
 }
 
@@ -185,6 +184,8 @@ static void dp_ctrl_wait4video_ready(struct dp_ctrl_private *ctrl)
 {
 	if (!wait_for_completion_timeout(&ctrl->video_comp, HZ / 2))
 		DP_WARN("SEND_VIDEO time out\n");
+	else
+		DP_DEBUG("SEND_VIDEO triggered\n");
 }
 
 static int dp_ctrl_update_sink_vx_px(struct dp_ctrl_private *ctrl)
@@ -1410,12 +1411,14 @@ static void dp_ctrl_isr(struct dp_ctrl *dp_ctrl)
 {
 	struct dp_ctrl_private *ctrl;
 
+	SDE_EVT32_EXTERNAL(SDE_EVTLOG_FUNC_ENTRY);
 	if (!dp_ctrl)
 		return;
 
 	ctrl = container_of(dp_ctrl, struct dp_ctrl_private, dp_ctrl);
 
 	ctrl->catalog->get_interrupt(ctrl->catalog);
+	SDE_EVT32_EXTERNAL(ctrl->catalog->isr);
 
 	if (ctrl->catalog->isr & DP_CTRL_INTR_READY_FOR_VIDEO)
 		dp_ctrl_video_ready(ctrl);
@@ -1428,6 +1431,7 @@ static void dp_ctrl_isr(struct dp_ctrl *dp_ctrl)
 
 	if (ctrl->catalog->isr5 & DP_CTRL_INTR_MST_DP1_VCPF_SENT)
 		dp_ctrl_idle_patterns_sent(ctrl);
+	SDE_EVT32_EXTERNAL(SDE_EVTLOG_FUNC_EXIT);
 }
 
 void dp_ctrl_set_sim_mode(struct dp_ctrl *dp_ctrl, bool en)

+ 2 - 2
msm/dp/dp_debug.c

@@ -157,7 +157,7 @@ static ssize_t dp_debug_write_edid(struct file *file,
 	edid = debug->edid;
 bail:
 	kfree(buf);
-	debug->panel->set_edid(debug->panel, edid);
+	debug->panel->set_edid(debug->panel, edid, debug->edid_size);
 
 	/*
 	 * print edid status as this code is executed
@@ -1617,7 +1617,7 @@ static void dp_debug_set_sim_mode(struct dp_debug_private *debug, bool sim)
 		debug->ctrl->set_sim_mode(debug->ctrl, false);
 		debug->dp_debug.sim_mode = false;
 
-		debug->panel->set_edid(debug->panel, 0);
+		debug->panel->set_edid(debug->panel, 0, 0);
 		if (debug->edid) {
 			devm_kfree(debug->dev, debug->edid);
 			debug->edid = NULL;

+ 9 - 3
msm/dp/dp_display.c

@@ -1604,6 +1604,7 @@ static void dp_display_attention_work(struct work_struct *work)
 {
 	struct dp_display_private *dp = container_of(work,
 			struct dp_display_private, attention_work);
+	int rc = 0;
 
 	SDE_EVT32_EXTERNAL(SDE_EVTLOG_FUNC_ENTRY, dp->state);
 	mutex_lock(&dp->session_lock);
@@ -1673,16 +1674,20 @@ static void dp_display_attention_work(struct work_struct *work)
 		if (dp->link->sink_request & DP_TEST_LINK_TRAINING) {
 			SDE_EVT32_EXTERNAL(dp->state, DP_TEST_LINK_TRAINING);
 			dp->link->send_test_response(dp->link);
-			dp->ctrl->link_maintenance(dp->ctrl);
+			rc = dp->ctrl->link_maintenance(dp->ctrl);
 		}
 
 		if (dp->link->sink_request & DP_LINK_STATUS_UPDATED) {
 			SDE_EVT32_EXTERNAL(dp->state, DP_LINK_STATUS_UPDATED);
-			dp->ctrl->link_maintenance(dp->ctrl);
+			rc = dp->ctrl->link_maintenance(dp->ctrl);
 		}
 
-		dp_audio_enable(dp, true);
+		if (!rc)
+			dp_audio_enable(dp, true);
+
 		mutex_unlock(&dp->session_lock);
+		if (rc)
+			goto exit;
 
 		if (dp->link->sink_request & (DP_TEST_LINK_PHY_TEST_PATTERN |
 			DP_TEST_LINK_TRAINING))
@@ -1883,6 +1888,7 @@ static int dp_init_sub_modules(struct dp_display_private *dp)
 	}
 
 	g_dp_display->is_mst_supported = dp->parser->has_mst;
+	g_dp_display->dsc_cont_pps = dp->parser->dsc_continuous_pps;
 
 	dp->catalog = dp_catalog_get(dev, dp->parser);
 	if (IS_ERR(dp->catalog)) {

+ 1 - 0
msm/dp/dp_display.h

@@ -70,6 +70,7 @@ struct dp_display {
 	void *base_dp_panel;
 	bool is_sst_connected;
 	bool is_mst_supported;
+	bool dsc_cont_pps;
 	u32 max_pclk_khz;
 	void *dp_mst_prv_info;
 	u32 max_mixer_count;

+ 4 - 0
msm/dp/dp_drm.c

@@ -366,6 +366,10 @@ int dp_connector_post_init(struct drm_connector *connector, void *display)
 	dp_display->bridge->dp_panel = sde_conn->drv_panel;
 
 	rc = dp_mst_init(dp_display);
+
+	if (dp_display->dsc_cont_pps)
+		sde_conn->ops.update_pps = NULL;
+
 end:
 	return rc;
 }

+ 17 - 2
msm/dp/dp_mst_drm.c

@@ -1768,6 +1768,21 @@ static void dp_mst_connector_pre_destroy(struct drm_connector *connector,
 	SDE_EVT32_EXTERNAL(SDE_EVTLOG_FUNC_EXIT, conn_id);
 }
 
+static int dp_mst_connector_post_init(struct drm_connector *connector,
+		void *display)
+{
+	struct dp_display *dp_display = display;
+	struct sde_connector *sde_conn = to_sde_connector(connector);
+
+	if (!dp_display || !connector)
+		return -EINVAL;
+
+	if (dp_display->dsc_cont_pps)
+		sde_conn->ops.update_pps = NULL;
+
+	return 0;
+}
+
 /* DRM MST callbacks */
 
 static struct drm_connector *
@@ -1775,7 +1790,7 @@ dp_mst_add_connector(struct drm_dp_mst_topology_mgr *mgr,
 		struct drm_dp_mst_port *port, const char *pathprop)
 {
 	static const struct sde_connector_ops dp_mst_connector_ops = {
-		.post_init  = NULL,
+		.post_init  = dp_mst_connector_post_init,
 		.detect     = dp_mst_connector_detect,
 		.get_modes  = dp_mst_connector_get_modes,
 		.mode_valid = dp_mst_connector_mode_valid,
@@ -2120,7 +2135,7 @@ dp_mst_drm_fixed_connector_init(struct dp_display *dp_display,
 			struct drm_encoder *encoder)
 {
 	static const struct sde_connector_ops dp_mst_connector_ops = {
-		.post_init  = NULL,
+		.post_init  = dp_mst_connector_post_init,
 		.detect     = dp_mst_fixed_connector_detect,
 		.get_modes  = dp_mst_connector_get_modes,
 		.mode_valid = dp_mst_connector_mode_valid,

+ 45 - 5
msm/dp/dp_panel.c

@@ -9,6 +9,7 @@
 #include "dp_debug.h"
 #include <drm/drm_dsc.h>
 #include "sde_dsc_helper.h"
+#include <drm/drm_edid.h>
 
 #define DP_KHZ_TO_HZ 1000
 #define DP_PANEL_DEFAULT_BPP 24
@@ -1357,9 +1358,28 @@ static int dp_panel_dsc_prepare_basic_params(
 	u32 ppr_per_slice;
 	u32 slice_caps_1;
 	u32 slice_caps_2;
+	u32 dsc_version_major, dsc_version_minor;
+	bool dsc_version_supported = false;
 
-	comp_info->dsc_info.config.dsc_version_major = 0x1;
-	comp_info->dsc_info.config.dsc_version_minor = 0x1;
+	dsc_version_major = dp_panel->sink_dsc_caps.version & 0xF;
+	dsc_version_minor = (dp_panel->sink_dsc_caps.version >> 4) & 0xF;
+	dsc_version_supported = (dsc_version_major == 0x1 &&
+			(dsc_version_minor == 0x1 || dsc_version_minor == 0x2))
+			? true : false;
+
+	DP_DEBUG("DSC version: %d.%d, dpcd value: %x\n",
+			dsc_version_major, dsc_version_minor,
+			dp_panel->sink_dsc_caps.version);
+
+	if (!dsc_version_supported) {
+		dsc_version_major = 1;
+		dsc_version_minor = 1;
+		DP_ERR("invalid sink DSC version, fallback to %d.%d\n",
+				dsc_version_major, dsc_version_minor);
+	}
+
+	comp_info->dsc_info.config.dsc_version_major = dsc_version_major;
+	comp_info->dsc_info.config.dsc_version_minor = dsc_version_minor;
 	comp_info->dsc_info.scr_rev = 0x0;
 
 	comp_info->dsc_info.slice_per_pkt = 0;
@@ -1597,7 +1617,25 @@ static int dp_panel_set_default_link_params(struct dp_panel *dp_panel)
 	return 0;
 }
 
-static int dp_panel_set_edid(struct dp_panel *dp_panel, u8 *edid)
+static bool dp_panel_validate_edid(struct edid *edid, size_t edid_size)
+{
+	if (!edid || (edid_size < EDID_LENGTH))
+		return false;
+
+	if (EDID_LENGTH * (edid->extensions + 1) > edid_size) {
+		DP_ERR("edid size does not match allocated.\n");
+		return false;
+	}
+
+	if (!drm_edid_is_valid(edid)) {
+		DP_ERR("invalid edid.\n");
+		return false;
+	}
+	return true;
+}
+
+static int dp_panel_set_edid(struct dp_panel *dp_panel, u8 *edid,
+		size_t edid_size)
 {
 	struct dp_panel_private *panel;
 
@@ -1608,7 +1646,7 @@ static int dp_panel_set_edid(struct dp_panel *dp_panel, u8 *edid)
 
 	panel = container_of(dp_panel, struct dp_panel_private, dp_panel);
 
-	if (edid) {
+	if (edid && dp_panel_validate_edid((struct edid *)edid, edid_size)) {
 		dp_panel->edid_ctrl->edid = (struct edid *)edid;
 		panel->custom_edid = true;
 	} else {
@@ -2202,7 +2240,7 @@ static void dp_panel_config_dsc(struct dp_panel *dp_panel, bool enable)
 		dsc->be_in_lane = _dp_panel_calc_be_in_lane(dp_panel);
 		dsc->dsc_en = true;
 		dsc->dto_en = true;
-
+		dsc->continuous_pps = dp_panel->dsc_continuous_pps;
 		dp_panel_get_dto_params(comp_info->comp_ratio, &dsc->dto_n,
 				&dsc->dto_d, pinfo->bpp);
 	} else {
@@ -2210,6 +2248,7 @@ static void dp_panel_config_dsc(struct dp_panel *dp_panel, bool enable)
 		dsc->dto_en = false;
 		dsc->dto_n = 0;
 		dsc->dto_d = 0;
+		dsc->continuous_pps = false;
 	}
 
 	catalog->stream_id = dp_panel->stream_id;
@@ -3007,6 +3046,7 @@ struct dp_panel *dp_panel_get(struct dp_panel_in *in)
 
 	dp_panel->dsc_feature_enable = panel->parser->dsc_feature_enable;
 	dp_panel->fec_feature_enable = panel->parser->fec_feature_enable;
+	dp_panel->dsc_continuous_pps = panel->parser->dsc_continuous_pps;
 
 	if (in->base_panel) {
 		memcpy(dp_panel->dpcd, in->base_panel->dpcd,

+ 2 - 1
msm/dp/dp_panel.h

@@ -133,6 +133,7 @@ struct dp_panel {
 	bool dsc_en;
 	bool fec_en;
 	bool widebus_en;
+	bool dsc_continuous_pps;
 	bool mst_state;
 
 	s64 fec_overhead_fp;
@@ -147,7 +148,7 @@ struct dp_panel {
 	int (*get_modes)(struct dp_panel *dp_panel,
 		struct drm_connector *connector, struct dp_display_mode *mode);
 	void (*handle_sink_request)(struct dp_panel *dp_panel);
-	int (*set_edid)(struct dp_panel *dp_panel, u8 *edid);
+	int (*set_edid)(struct dp_panel *dp_panel, u8 *edid, size_t edid_size);
 	int (*set_dpcd)(struct dp_panel *dp_panel, u8 *dpcd);
 	int (*setup_hdr)(struct dp_panel *dp_panel,
 		struct drm_msm_ext_hdr_metadata *hdr_meta,

+ 5 - 0
msm/dp/dp_parser.c

@@ -727,8 +727,13 @@ static void dp_parser_dsc(struct dp_parser *parser)
 	parser->dsc_feature_enable = of_property_read_bool(dev->of_node,
 			"qcom,dsc-feature-enable");
 
+	parser->dsc_continuous_pps = of_property_read_bool(dev->of_node,
+			"qcom,dsc-continuous-pps");
+
 	DP_DEBUG("dsc parsing successful. dsc:%d\n",
 			parser->dsc_feature_enable);
+	DP_DEBUG("cont_pps:%d\n",
+			parser->dsc_continuous_pps);
 }
 
 static void dp_parser_fec(struct dp_parser *parser)

+ 2 - 0
msm/dp/dp_parser.h

@@ -195,6 +195,7 @@ static inline char *dp_phy_aux_config_type_to_string(u32 cfg_type)
  * @gpio_aux_switch: presence GPIO AUX switch status
  * @dsc_feature_enable: DSC feature enable status
  * @fec_feature_enable: FEC feature enable status
+ * @dsc_continuous_pps: PPS sent every frame by HW
  * @has_widebus: widebus (2PPC) feature eanble status
   *@mst_fixed_port: mst port_num reserved for fixed topology
  * @parse: function to be called by client to parse device tree.
@@ -221,6 +222,7 @@ struct dp_parser {
 	bool no_aux_switch;
 	bool dsc_feature_enable;
 	bool fec_feature_enable;
+	bool dsc_continuous_pps;
 	bool has_widebus;
 	bool gpio_aux_switch;
 	bool lphw_hpd;

+ 21 - 9
msm/dsi/dsi_ctrl.c

@@ -1319,17 +1319,16 @@ static void dsi_configure_command_scheduling(struct dsi_ctrl *dsi_ctrl,
 	}
 
 	/*
-	 * In case of command scheduling in command mode, the window size
-	 * is reset to zero, if the total scheduling window is greater
-	 * than the panel height.
+	 * In case of command scheduling in command mode, set the maximum
+	 * possible size of the DMA start window in case no schedule line and
+	 * window size properties are defined by the panel.
 	 */
 	if ((dsi_ctrl->host_config.panel_mode == DSI_OP_CMD_MODE) &&
 			dsi_hw_ops.configure_cmddma_window) {
-		sched_line_no = line_no;
-
-		if ((sched_line_no + window) > timing->v_active)
-			window = 0;
 
+		sched_line_no = (line_no == 0) ? TEARCHECK_WINDOW_SIZE :
+					line_no;
+		window = (window == 0) ? timing->v_active : window;
 		sched_line_no += timing->v_active;
 
 		dsi_hw_ops.configure_cmddma_window(&dsi_ctrl->hw, cmd_mem,
@@ -1374,8 +1373,21 @@ static void dsi_kickoff_msg_tx(struct dsi_ctrl *dsi_ctrl,
 		dsi_hw_ops.reset_trig_ctrl(&dsi_ctrl->hw,
 				&dsi_ctrl->host_config.common_config);
 
-	/* check if custom dma scheduling line needed */
-	if (flags & DSI_CTRL_CMD_CUSTOM_DMA_SCHED)
+	/*
+	 * Always enable DMA scheduling for video mode panel.
+	 *
+	 * In video mode panel, if the DMA is triggered very close to
+	 * the beginning of the active window and the DMA transfer
+	 * happens in the last line of VBP, then the HW state will
+	 * stay in ‘wait’ and return to ‘idle’ in the first line of VFP.
+	 * But somewhere in the middle of the active window, if SW
+	 * disables DSI command mode engine while the HW is still
+	 * waiting and re-enable after timing engine is OFF. So the
+	 * HW never ‘sees’ another vblank line and hence it gets
+	 * stuck in the ‘wait’ state.
+	 */
+	if ((flags & DSI_CTRL_CMD_CUSTOM_DMA_SCHED) ||
+		(dsi_ctrl->host_config.panel_mode == DSI_OP_VIDEO_MODE))
 		dsi_configure_command_scheduling(dsi_ctrl, cmd_mem);
 
 	dsi_ctrl->cmd_mode = (dsi_ctrl->host_config.panel_mode ==

+ 3 - 0
msm/dsi/dsi_ctrl.h

@@ -53,6 +53,9 @@
 /* max size supported for dsi cmd transfer using TPG */
 #define DSI_CTRL_MAX_CMD_FIFO_STORE_SIZE 64
 
+/*Default tearcheck window size as programmed by MDP*/
+#define TEARCHECK_WINDOW_SIZE	5
+
 /**
  * enum dsi_power_state - defines power states for dsi controller.
  * @DSI_CTRL_POWER_VREG_OFF:    Digital and analog supplies for DSI controller

+ 0 - 3
msm/dsi/dsi_ctrl_hw_2_2.c

@@ -202,9 +202,6 @@ void dsi_ctrl_hw_22_configure_cmddma_window(struct dsi_ctrl_hw *ctrl,
 {
 	u32 reg = 0;
 
-	if (!window)
-		return;
-
 	if (cmd->en_broadcast) {
 		reg = DSI_R32(ctrl, DSI_TRIG_CTRL);
 		if (cmd->is_master) {

+ 8 - 4
msm/dsi/dsi_display.c

@@ -995,7 +995,7 @@ static int dsi_display_cmd_rx(struct dsi_display *display,
 	flags |= (DSI_CTRL_CMD_FETCH_MEMORY | DSI_CTRL_CMD_READ);
 	if ((m_ctrl->ctrl->host_config.panel_mode == DSI_OP_VIDEO_MODE) ||
 			((cmd->msg.flags & MIPI_DSI_MSG_CMD_DMA_SCHED) &&
-			 (display->panel->panel_initialized)))
+			 (display->enabled)))
 		flags |= DSI_CTRL_CMD_CUSTOM_DMA_SCHED;
 
 	rc = dsi_ctrl_cmd_transfer(m_ctrl->ctrl, &cmd->msg, &flags);
@@ -3084,8 +3084,12 @@ static int dsi_display_broadcast_cmd(struct dsi_display *display,
 		m_flags |= DSI_CTRL_CMD_LAST_COMMAND;
 	}
 
-	if ((msg->flags & MIPI_DSI_MSG_CMD_DMA_SCHED) &&
-				(display->panel->panel_initialized)) {
+	/*
+	 * During broadcast command dma scheduling is always recommended.
+	 * As long as the display is enabled and TE is running the
+	 * DSI_CTRL_CMD_CUSTOM_DMA_SCHED flag should be set.
+	 */
+	if (display->enabled) {
 		flags |= DSI_CTRL_CMD_CUSTOM_DMA_SCHED;
 		m_flags |= DSI_CTRL_CMD_CUSTOM_DMA_SCHED;
 	}
@@ -3261,7 +3265,7 @@ static ssize_t dsi_host_transfer(struct mipi_dsi_host *host,
 			cmd_flags |= DSI_CTRL_CMD_ASYNC_WAIT;
 
 		if ((msg->flags & MIPI_DSI_MSG_CMD_DMA_SCHED) &&
-				(display->panel->panel_initialized))
+				(display->enabled))
 			cmd_flags |= DSI_CTRL_CMD_CUSTOM_DMA_SCHED;
 
 		rc = dsi_ctrl_cmd_transfer(display->ctrl[ctrl_idx].ctrl, msg,

+ 3 - 0
msm/dsi/dsi_display.h

@@ -194,6 +194,7 @@ struct dsi_display_ext_bridge {
  *                    Set to false, otherwise.
  * @tx_cmd_buf_ndx:   Index to the DSI debugfs TX CMD buffer.
  * @cmd_set:	      Debugfs TX cmd set.
+ * @enabled:	      Boolean to indicate display enabled.
  */
 struct dsi_display {
 	struct platform_device *pdev;
@@ -292,6 +293,8 @@ struct dsi_display {
 
 	int tx_cmd_buf_ndx;
 	struct dsi_panel_cmd_set cmd_set;
+
+	bool enabled;
 };
 
 int dsi_display_dev_probe(struct platform_device *pdev);

+ 9 - 0
msm/dsi/dsi_drm.c

@@ -246,6 +246,9 @@ static void dsi_bridge_enable(struct drm_bridge *bridge)
 		DSI_ERR("[%d] DSI display post enabled failed, rc=%d\n",
 		       c_bridge->id, rc);
 
+	if (display)
+		display->enabled = true;
+
 	if (display && display->drm_conn) {
 		sde_connector_helper_bridge_enable(display->drm_conn);
 		if (c_bridge->dsi_mode.dsi_mode_flags & DSI_MODE_FLAG_POMS)
@@ -269,6 +272,9 @@ static void dsi_bridge_disable(struct drm_bridge *bridge)
 	private_flags =
 		bridge->encoder->crtc->state->adjusted_mode.private_flags;
 
+	if (display)
+		display->enabled = false;
+
 	if (display && display->drm_conn) {
 		display->poms_pending =
 			private_flags & MSM_MODE_FLAG_SEAMLESS_POMS;
@@ -1088,6 +1094,9 @@ int dsi_conn_post_kickoff(struct drm_connector *connector,
 			dsi_ctrl_setup_avr(display->ctrl[i].ctrl, enable);
 	}
 
+	if (display->drm_conn)
+		sde_connector_helper_post_kickoff(display->drm_conn);
+
 	return 0;
 }
 

+ 145 - 10
msm/dsi/dsi_pll_5nm.c

@@ -1034,7 +1034,8 @@ static void shadow_dsi_pll_dynamic_refresh_5nm(struct dsi_pll_5nm *pll,
 	DSI_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL6,
 			   (PLL_CMODE_1 + offset),
 			   (PLL_CLOCK_INVERTERS_1 + offset),
-			   0x10, reg->pll_clock_inverters);
+			   pll->cphy_enabled ? 0x00 : 0x10,
+			   reg->pll_clock_inverters);
 	upper_addr |=
 		(upper_8_bit(PLL_CMODE_1 + offset) << 12);
 	upper_addr |= (upper_8_bit(PLL_CLOCK_INVERTERS_1 + offset) << 13);
@@ -1493,6 +1494,10 @@ static unsigned long vco_5nm_recalc_rate(struct clk_hw *hw,
 		return 0;
 	}
 
+	if (!pll->priv) {
+		pr_err("pll priv is null\n");
+		return 0;
+	}
 	/*
 	 * In the case when vco arte is set, the recalculation function should
 	 * return the current rate as to avoid trying to set the vco rate
@@ -1513,6 +1518,8 @@ static unsigned long vco_5nm_recalc_rate(struct clk_hw *hw,
 		return 0;
 	}
 
+	dsi_pll_detect_phy_mode(pll->priv, pll);
+
 	if (dsi_pll_5nm_lock_status(pll)) {
 		pr_err("PLL not enabled\n");
 		pll->handoff_resources = false;
@@ -1926,6 +1933,17 @@ static struct clk_fixed_factor dsi0pll_post_vco_div3_5 = {
 	},
 };
 
+static struct clk_fixed_factor dsi0pll_shadow_post_vco_div3_5 = {
+	.div = 7,
+	.mult = 2,
+	.hw.init = &(struct clk_init_data){
+		.name = "dsi0pll_shadow_post_vco_div3_5",
+		.parent_names = (const char *[]){"dsi0pll_shadow_pll_out_div"},
+		.num_parents = 1,
+		.ops = &clk_fixed_factor_ops,
+	},
+};
+
 static struct clk_fixed_factor dsi1pll_post_vco_div3_5 = {
 	.div = 7,
 	.mult = 2,
@@ -1937,6 +1955,17 @@ static struct clk_fixed_factor dsi1pll_post_vco_div3_5 = {
 	},
 };
 
+static struct clk_fixed_factor dsi1pll_shadow_post_vco_div3_5 = {
+	.div = 7,
+	.mult = 2,
+	.hw.init = &(struct clk_init_data){
+		.name = "dsi1pll_shadow_post_vco_div3_5",
+		.parent_names = (const char *[]){"dsi1pll_shadow_pll_out_div"},
+		.num_parents = 1,
+		.ops = &clk_fixed_factor_ops,
+	},
+};
+
 static struct clk_fixed_factor dsi1pll_shadow_post_vco_div = {
 	.div = 4,
 	.mult = 1,
@@ -1996,6 +2025,18 @@ static struct clk_fixed_factor dsi0pll_cphy_byteclk_src = {
 	},
 };
 
+static struct clk_fixed_factor dsi0pll_shadow_cphy_byteclk_src = {
+	.div = 7,
+	.mult = 1,
+	.hw.init = &(struct clk_init_data){
+		.name = "dsi0pll_shadow_cphy_byteclk_src",
+		.parent_names = (const char *[]){"dsi0pll_shadow_bitclk_src"},
+		.num_parents = 1,
+		.flags = CLK_SET_RATE_PARENT,
+		.ops = &clk_fixed_factor_ops,
+	},
+};
+
 static struct clk_fixed_factor dsi1pll_cphy_byteclk_src = {
 	.div = 7,
 	.mult = 1,
@@ -2008,6 +2049,18 @@ static struct clk_fixed_factor dsi1pll_cphy_byteclk_src = {
 	},
 };
 
+static struct clk_fixed_factor dsi1pll_shadow_cphy_byteclk_src = {
+	.div = 7,
+	.mult = 1,
+	.hw.init = &(struct clk_init_data){
+		.name = "dsi1pll_shadow_cphy_byteclk_src",
+		.parent_names = (const char *[]){"dsi1pll_shadow_bitclk_src"},
+		.num_parents = 1,
+		.flags = CLK_SET_RATE_PARENT,
+		.ops = &clk_fixed_factor_ops,
+	},
+};
+
 static struct clk_fixed_factor dsi1pll_shadow_byteclk_src = {
 	.div = 8,
 	.mult = 1,
@@ -2072,8 +2125,9 @@ static struct clk_regmap_mux dsi0pll_byteclk_mux = {
 			.name = "dsi0_phy_pll_out_byteclk",
 			.parent_names = (const char *[]){"dsi0pll_byteclk_src",
 				"dsi0pll_shadow_byteclk_src",
-				"dsi0pll_cphy_byteclk_src"},
-			.num_parents = 3,
+				"dsi0pll_cphy_byteclk_src",
+				"dsi0pll_shadow_cphy_byteclk_src"},
+			.num_parents = 4,
 			.flags = (CLK_SET_RATE_PARENT |
 					CLK_SET_RATE_NO_REPARENT),
 			.ops = &clk_regmap_mux_closest_ops,
@@ -2089,8 +2143,9 @@ static struct clk_regmap_mux dsi1pll_byteclk_mux = {
 			.name = "dsi1_phy_pll_out_byteclk",
 			.parent_names = (const char *[]){"dsi1pll_byteclk_src",
 				"dsi1pll_shadow_byteclk_src",
-				"dsi1pll_cphy_byteclk_src"},
-			.num_parents = 3,
+				"dsi1pll_cphy_byteclk_src",
+				"dsi1pll_shadow_cphy_byteclk_src"},
+			.num_parents = 4,
 			.flags = (CLK_SET_RATE_PARENT |
 					CLK_SET_RATE_NO_REPARENT),
 			.ops = &clk_regmap_mux_closest_ops,
@@ -2144,6 +2199,22 @@ static struct clk_regmap_mux dsi0pll_cphy_pclk_src_mux = {
 	},
 };
 
+static struct clk_regmap_mux dsi0pll_shadow_cphy_pclk_src_mux = {
+	.reg = PHY_CMN_CLK_CFG1,
+	.shift = 0,
+	.width = 2,
+	.clkr = {
+		.hw.init = &(struct clk_init_data){
+			.name = "dsi0pll_shadow_cphy_pclk_src_mux",
+			.parent_names =
+				(const char *[]){
+					"dsi0pll_shadow_post_vco_div3_5"},
+			.num_parents = 1,
+			.ops = &clk_regmap_mux_closest_ops,
+		},
+	},
+};
+
 static struct clk_regmap_mux dsi1pll_pclk_src_mux = {
 	.reg = PHY_CMN_CLK_CFG1,
 	.shift = 0,
@@ -2159,6 +2230,22 @@ static struct clk_regmap_mux dsi1pll_pclk_src_mux = {
 	},
 };
 
+static struct clk_regmap_mux dsi1pll_shadow_cphy_pclk_src_mux = {
+	.reg = PHY_CMN_CLK_CFG1,
+	.shift = 0,
+	.width = 2,
+	.clkr = {
+		.hw.init = &(struct clk_init_data){
+			.name = "dsi1pll_shadow_cphy_pclk_src_mux",
+			.parent_names =
+				(const char *[]){
+					"dsi1pll_shadow_post_vco_div3_5"},
+			.num_parents = 1,
+			.ops = &clk_regmap_mux_closest_ops,
+		},
+	},
+};
+
 static struct clk_regmap_mux dsi1pll_shadow_pclk_src_mux = {
 	.reg = PHY_CMN_CLK_CFG1,
 	.shift = 0,
@@ -2238,6 +2325,22 @@ static struct clk_regmap_div dsi0pll_cphy_pclk_src = {
 	},
 };
 
+static struct clk_regmap_div dsi0pll_shadow_cphy_pclk_src = {
+	.shift = 0,
+	.width = 4,
+	.flags = CLK_DIVIDER_ONE_BASED | CLK_DIVIDER_ALLOW_ZERO,
+	.clkr = {
+		.hw.init = &(struct clk_init_data){
+			.name = "dsi0pll_shadow_cphy_pclk_src",
+			.parent_names = (const char *[]){
+				"dsi0pll_shadow_cphy_pclk_src_mux"},
+			.num_parents = 1,
+			.flags = CLK_SET_RATE_PARENT,
+			.ops = &clk_regmap_div_ops,
+		},
+	},
+};
+
 static struct clk_regmap_div dsi1pll_pclk_src = {
 	.shift = 0,
 	.width = 4,
@@ -2286,6 +2389,22 @@ static struct clk_regmap_div dsi1pll_cphy_pclk_src = {
 	},
 };
 
+static struct clk_regmap_div dsi1pll_shadow_cphy_pclk_src = {
+	.shift = 0,
+	.width = 4,
+	.flags = CLK_DIVIDER_ONE_BASED | CLK_DIVIDER_ALLOW_ZERO,
+	.clkr = {
+		.hw.init = &(struct clk_init_data){
+			.name = "dsi1pll_shadow_cphy_pclk_src",
+			.parent_names = (const char *[]){
+				"dsi1pll_shadow_cphy_pclk_src_mux"},
+			.num_parents = 1,
+			.flags = CLK_SET_RATE_PARENT,
+			.ops = &clk_regmap_div_ops,
+		},
+	},
+};
+
 static struct clk_regmap_mux dsi0pll_pclk_mux = {
 	.shift = 0,
 	.width = 1,
@@ -2294,8 +2413,9 @@ static struct clk_regmap_mux dsi0pll_pclk_mux = {
 			.name = "dsi0_phy_pll_out_dsiclk",
 			.parent_names = (const char *[]){"dsi0pll_pclk_src",
 				"dsi0pll_shadow_pclk_src",
-				"dsi0pll_cphy_pclk_src"},
-			.num_parents = 3,
+				"dsi0pll_cphy_pclk_src",
+				"dsi0pll_shadow_cphy_pclk_src"},
+			.num_parents = 4,
 			.flags = (CLK_SET_RATE_PARENT |
 					CLK_SET_RATE_NO_REPARENT),
 			.ops = &clk_regmap_mux_closest_ops,
@@ -2311,8 +2431,9 @@ static struct clk_regmap_mux dsi1pll_pclk_mux = {
 			.name = "dsi1_phy_pll_out_dsiclk",
 			.parent_names = (const char *[]){"dsi1pll_pclk_src",
 				"dsi1pll_shadow_pclk_src",
-				"dsi1pll_cphy_pclk_src"},
-			.num_parents = 3,
+				"dsi1pll_cphy_pclk_src",
+				"dsi1pll_shadow_cphy_pclk_src"},
+			.num_parents = 4,
 			.flags = (CLK_SET_RATE_PARENT |
 					CLK_SET_RATE_NO_REPARENT),
 			.ops = &clk_regmap_mux_closest_ops,
@@ -2339,10 +2460,15 @@ static struct clk_hw *dsi_pllcc_5nm[] = {
 	[SHADOW_PLL_OUT_DIV_0_CLK] = &dsi0pll_shadow_pll_out_div.clkr.hw,
 	[SHADOW_BITCLK_SRC_0_CLK] = &dsi0pll_shadow_bitclk_src.clkr.hw,
 	[SHADOW_BYTECLK_SRC_0_CLK] = &dsi0pll_shadow_byteclk_src.hw,
+	[SHADOW_CPHY_BYTECLK_SRC_0_CLK] = &dsi0pll_shadow_cphy_byteclk_src.hw,
 	[SHADOW_POST_BIT_DIV_0_CLK] = &dsi0pll_shadow_post_bit_div.hw,
 	[SHADOW_POST_VCO_DIV_0_CLK] = &dsi0pll_shadow_post_vco_div.hw,
+	[SHADOW_POST_VCO_DIV3_5_0_CLK] = &dsi0pll_shadow_post_vco_div3_5.hw,
 	[SHADOW_PCLK_SRC_MUX_0_CLK] = &dsi0pll_shadow_pclk_src_mux.clkr.hw,
 	[SHADOW_PCLK_SRC_0_CLK] = &dsi0pll_shadow_pclk_src.clkr.hw,
+	[SHADOW_CPHY_PCLK_SRC_MUX_0_CLK] =
+			&dsi0pll_shadow_cphy_pclk_src_mux.clkr.hw,
+	[SHADOW_CPHY_PCLK_SRC_0_CLK] = &dsi0pll_shadow_cphy_pclk_src.clkr.hw,
 	[VCO_CLK_1] = &dsi1pll_vco_clk.hw,
 	[PLL_OUT_DIV_1_CLK] = &dsi1pll_pll_out_div.clkr.hw,
 	[BITCLK_SRC_1_CLK] = &dsi1pll_bitclk_src.clkr.hw,
@@ -2361,10 +2487,15 @@ static struct clk_hw *dsi_pllcc_5nm[] = {
 	[SHADOW_PLL_OUT_DIV_1_CLK] = &dsi1pll_shadow_pll_out_div.clkr.hw,
 	[SHADOW_BITCLK_SRC_1_CLK] = &dsi1pll_shadow_bitclk_src.clkr.hw,
 	[SHADOW_BYTECLK_SRC_1_CLK] = &dsi1pll_shadow_byteclk_src.hw,
+	[SHADOW_CPHY_BYTECLK_SRC_1_CLK] = &dsi1pll_shadow_cphy_byteclk_src.hw,
 	[SHADOW_POST_BIT_DIV_1_CLK] = &dsi1pll_shadow_post_bit_div.hw,
 	[SHADOW_POST_VCO_DIV_1_CLK] = &dsi1pll_shadow_post_vco_div.hw,
+	[SHADOW_POST_VCO_DIV3_5_1_CLK] = &dsi1pll_shadow_post_vco_div3_5.hw,
 	[SHADOW_PCLK_SRC_MUX_1_CLK] = &dsi1pll_shadow_pclk_src_mux.clkr.hw,
 	[SHADOW_PCLK_SRC_1_CLK] = &dsi1pll_shadow_pclk_src.clkr.hw,
+	[SHADOW_CPHY_PCLK_SRC_MUX_1_CLK] =
+			&dsi1pll_shadow_cphy_pclk_src_mux.clkr.hw,
+	[SHADOW_CPHY_PCLK_SRC_1_CLK] = &dsi1pll_shadow_cphy_pclk_src.clkr.hw,
 };
 
 int dsi_pll_clock_register_5nm(struct platform_device *pdev,
@@ -2432,6 +2563,7 @@ int dsi_pll_clock_register_5nm(struct platform_device *pdev,
 		dsi0pll_pclk_src.clkr.regmap = rmap;
 		dsi0pll_cphy_pclk_src.clkr.regmap = rmap;
 		dsi0pll_shadow_pclk_src.clkr.regmap = rmap;
+		dsi0pll_shadow_cphy_pclk_src.clkr.regmap = rmap;
 
 		rmap_config->name = "pclk_mux";
 		rmap = devm_regmap_init(&pdev->dev, &dsi_mux_regmap_bus,
@@ -2449,6 +2581,7 @@ int dsi_pll_clock_register_5nm(struct platform_device *pdev,
 				&cphy_pclk_src_mux_regmap_bus,
 				pll_res, rmap_config);
 		dsi0pll_cphy_pclk_src_mux.clkr.regmap = rmap;
+		dsi0pll_shadow_cphy_pclk_src_mux.clkr.regmap = rmap;
 
 		rmap_config->name = "byteclk_mux";
 		rmap = devm_regmap_init(&pdev->dev, &dsi_mux_regmap_bus,
@@ -2465,7 +2598,7 @@ int dsi_pll_clock_register_5nm(struct platform_device *pdev,
 			dsi0pll_shadow_vco_clk.max_rate = 5000000000;
 		}
 
-		for (i = VCO_CLK_0; i <= CPHY_PCLK_SRC_0_CLK; i++) {
+		for (i = VCO_CLK_0; i <= SHADOW_CPHY_PCLK_SRC_0_CLK; i++) {
 			clk = devm_clk_register(&pdev->dev,
 						dsi_pllcc_5nm[i]);
 			if (IS_ERR(clk)) {
@@ -2499,6 +2632,7 @@ int dsi_pll_clock_register_5nm(struct platform_device *pdev,
 		dsi1pll_pclk_src.clkr.regmap = rmap;
 		dsi1pll_cphy_pclk_src.clkr.regmap = rmap;
 		dsi1pll_shadow_pclk_src.clkr.regmap = rmap;
+		dsi1pll_shadow_cphy_pclk_src.clkr.regmap = rmap;
 
 		rmap_config->name = "pclk_mux";
 		rmap = devm_regmap_init(&pdev->dev, &dsi_mux_regmap_bus,
@@ -2510,6 +2644,7 @@ int dsi_pll_clock_register_5nm(struct platform_device *pdev,
 				pll_res, rmap_config);
 		dsi1pll_pclk_src_mux.clkr.regmap = rmap;
 		dsi1pll_shadow_pclk_src_mux.clkr.regmap = rmap;
+		dsi1pll_shadow_cphy_pclk_src_mux.clkr.regmap = rmap;
 
 		rmap_config->name = "cphy_pclk_src_mux";
 		rmap = devm_regmap_init(&pdev->dev,

+ 14 - 5
msm/sde/sde_connector.c

@@ -945,6 +945,17 @@ void sde_connector_helper_bridge_disable(struct drm_connector *connector)
 }
 
 void sde_connector_helper_bridge_enable(struct drm_connector *connector)
+{
+	struct sde_connector *c_conn = NULL;
+
+	if (!connector)
+		return;
+
+	c_conn = to_sde_connector(connector);
+	c_conn->panel_dead = false;
+}
+
+void sde_connector_helper_post_kickoff(struct drm_connector *connector)
 {
 	struct sde_connector *c_conn = NULL;
 	struct dsi_display *display;
@@ -976,7 +987,6 @@ void sde_connector_helper_bridge_enable(struct drm_connector *connector)
 		c_conn->bl_device->props.state &= ~BL_CORE_FBBLANK;
 		backlight_update_status(c_conn->bl_device);
 	}
-	c_conn->panel_dead = false;
 }
 
 int sde_connector_clk_ctrl(struct drm_connector *connector, bool enable)
@@ -2994,8 +3004,7 @@ error_free_conn:
 	return ERR_PTR(rc);
 }
 
-static int _sde_conn_hw_recovery_handler(
-		struct drm_connector *connector, bool val)
+static int _sde_conn_enable_hw_recovery(struct drm_connector *connector)
 {
 	struct sde_connector *c_conn;
 
@@ -3006,7 +3015,7 @@ static int _sde_conn_hw_recovery_handler(
 	c_conn = to_sde_connector(connector);
 
 	if (c_conn->encoder)
-		sde_encoder_recovery_events_handler(c_conn->encoder, val);
+		sde_encoder_enable_recovery_event(c_conn->encoder);
 
 	return 0;
 }
@@ -3024,7 +3033,7 @@ int sde_connector_register_custom_event(struct sde_kms *kms,
 		ret = 0;
 		break;
 	case DRM_EVENT_SDE_HW_RECOVERY:
-		ret = _sde_conn_hw_recovery_handler(conn_drm, val);
+		ret = _sde_conn_enable_hw_recovery(conn_drm);
 		break;
 	default:
 		break;

+ 6 - 0
msm/sde/sde_connector.h

@@ -1086,4 +1086,10 @@ int sde_connector_get_panel_vfp(struct drm_connector *connector,
  */
 int sde_connector_esd_status(struct drm_connector *connector);
 
+/**
+ * sde_connector_helper_post_kickoff - helper function for drm connector post kickoff
+ * @connector: Pointer to DRM connector object
+ */
+void sde_connector_helper_post_kickoff(struct drm_connector *connector);
+
 #endif /* _SDE_CONNECTOR_H_ */

+ 3 - 3
msm/sde/sde_crtc.c

@@ -3504,7 +3504,7 @@ static void sde_crtc_destroy_state(struct drm_crtc *crtc,
 			&cstate->property_state);
 }
 
-static int _sde_crtc_flush_event_thread(struct drm_crtc *crtc)
+static int _sde_crtc_flush_frame_events(struct drm_crtc *crtc)
 {
 	struct sde_crtc *sde_crtc;
 	int i;
@@ -3780,7 +3780,7 @@ void sde_crtc_commit_kickoff(struct drm_crtc *crtc,
 
 	sde_crtc_calc_fps(sde_crtc);
 	SDE_ATRACE_BEGIN("flush_event_thread");
-	_sde_crtc_flush_event_thread(crtc);
+	_sde_crtc_flush_frame_events(crtc);
 	SDE_ATRACE_END("flush_event_thread");
 	sde_crtc->plane_mask_old = crtc->state->plane_mask;
 
@@ -4162,7 +4162,7 @@ static void sde_crtc_disable(struct drm_crtc *crtc)
 			(u8 *)&power_on);
 
 	mutex_unlock(&sde_crtc->crtc_lock);
-	_sde_crtc_flush_event_thread(crtc);
+	kthread_flush_worker(&priv->event_thread[crtc->index].worker);
 	mutex_lock(&sde_crtc->crtc_lock);
 
 	kthread_cancel_delayed_work_sync(&sde_crtc->static_cache_read_work);

+ 9 - 6
msm/sde/sde_encoder.c

@@ -1006,8 +1006,11 @@ static int sde_encoder_virt_atomic_check(
 				CONNECTOR_PROP_QSYNC_MODE);
 
 	if (has_modeset && qsync_dirty &&
-		!msm_is_mode_seamless_vrr(adj_mode)) {
-		SDE_ERROR("invalid qsync update during modeset\n");
+		(msm_is_mode_seamless_poms(adj_mode) ||
+		msm_is_mode_seamless_dms(adj_mode) ||
+		msm_is_mode_seamless_dyn_clk(adj_mode))) {
+		SDE_ERROR("invalid qsync update during modeset priv flag:%x\n",
+			adj_mode->private_flags);
 		return -EINVAL;
 	}
 
@@ -2551,7 +2554,8 @@ static void _sde_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
 					sde_enc->cur_master->hw_mdptop);
 
 	if (sde_enc->cur_master->hw_mdptop &&
-			sde_enc->cur_master->hw_mdptop->ops.reset_ubwc)
+			sde_enc->cur_master->hw_mdptop->ops.reset_ubwc &&
+			!sde_in_trusted_vm(sde_kms))
 		sde_enc->cur_master->hw_mdptop->ops.reset_ubwc(
 				sde_enc->cur_master->hw_mdptop,
 				sde_kms->catalog);
@@ -5422,8 +5426,7 @@ bool sde_encoder_recovery_events_enabled(struct drm_encoder *encoder)
 	return sde_enc->recovery_events_enabled;
 }
 
-void sde_encoder_recovery_events_handler(struct drm_encoder *encoder,
-		bool enabled)
+void sde_encoder_enable_recovery_event(struct drm_encoder *encoder)
 {
 	struct sde_encoder_virt *sde_enc;
 
@@ -5433,5 +5436,5 @@ void sde_encoder_recovery_events_handler(struct drm_encoder *encoder,
 	}
 
 	sde_enc = to_sde_encoder_virt(encoder);
-	sde_enc->recovery_events_enabled = enabled;
+	sde_enc->recovery_events_enabled = true;
 }

+ 3 - 4
msm/sde/sde_encoder.h

@@ -486,12 +486,11 @@ int sde_encoder_display_failure_notification(struct drm_encoder *enc,
 bool sde_encoder_recovery_events_enabled(struct drm_encoder *encoder);
 
 /**
- * sde_encoder_recovery_events_handler - handler to enable/disable the
- * sw recovery for this connector
+ * sde_encoder_enable_recovery_event - handler to enable the sw recovery
+ * for this connector
  * @drm_enc:    Pointer to drm encoder structure
  */
-void sde_encoder_recovery_events_handler(struct drm_encoder *encoder,
-		bool val);
+void sde_encoder_enable_recovery_event(struct drm_encoder *encoder);
 /**
  * sde_encoder_in_clone_mode - checks if underlying phys encoder is in clone
  *	mode or independent display mode. ref@ WB in Concurrent writeback mode.

+ 1 - 0
msm/sde/sde_encoder_phys_cmd.c

@@ -1345,6 +1345,7 @@ static void sde_encoder_phys_cmd_disable(struct sde_encoder_phys *phys_enc)
 		else if (phys_enc->hw_pp->ops.enable_tearcheck)
 			phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp,
 					false);
+		sde_encoder_helper_phys_disable(phys_enc, NULL);
 	}
 
 	phys_enc->enable_state = SDE_ENC_DISABLED;

+ 11 - 5
msm/sde/sde_encoder_phys_vid.c

@@ -205,6 +205,7 @@ static u32 programmable_fetch_get_num_lines(
 		const struct intf_timing_params *timing)
 {
 	struct sde_encoder_phys *phys_enc = &vid_enc->base;
+	struct sde_mdss_cfg *m;
 
 	u32 needed_prefill_lines, needed_vfp_lines, actual_vfp_lines;
 	const u32 fixed_prefill_fps = DEFAULT_FPS;
@@ -213,18 +214,23 @@ static u32 programmable_fetch_get_num_lines(
 	u32 start_of_frame_lines =
 	    timing->v_back_porch + timing->vsync_pulse_width;
 	u32 v_front_porch = timing->v_front_porch;
+	u32 vrefresh, max_fps;
+
+	m = phys_enc->sde_kms->catalog;
+	max_fps = sde_encoder_get_dfps_maxfps(phys_enc->parent);
+	vrefresh = (max_fps > timing->vrefresh) ? max_fps : timing->vrefresh;
 
 	/* minimum prefill lines are defined based on 60fps */
-	needed_prefill_lines = (timing->vrefresh > fixed_prefill_fps) ?
-		((default_prefill_lines * timing->vrefresh) /
+	needed_prefill_lines = (vrefresh > fixed_prefill_fps) ?
+		((default_prefill_lines * vrefresh) /
 			fixed_prefill_fps) : default_prefill_lines;
 	needed_vfp_lines = needed_prefill_lines - start_of_frame_lines;
 
 	/* Fetch must be outside active lines, otherwise undefined. */
 	if (start_of_frame_lines >= needed_prefill_lines) {
 		SDE_DEBUG_VIDENC(vid_enc,
-				"prog fetch is not needed, large vbp+vsw\n");
-		actual_vfp_lines = 0;
+				"prog fetch always enabled case\n");
+		actual_vfp_lines = (m->delay_prg_fetch_start) ? 2 : 1;
 	} else if (v_front_porch < needed_vfp_lines) {
 		/* Warn fetch needed, but not enough porch in panel config */
 		pr_warn_once
@@ -239,7 +245,7 @@ static u32 programmable_fetch_get_num_lines(
 
 	SDE_DEBUG_VIDENC(vid_enc,
 		"vrefresh:%u v_front_porch:%u v_back_porch:%u vsync_pulse_width:%u\n",
-		timing->vrefresh, v_front_porch, timing->v_back_porch,
+		vrefresh, v_front_porch, timing->v_back_porch,
 		timing->vsync_pulse_width);
 	SDE_DEBUG_VIDENC(vid_enc,
 		"prefill_lines:%u needed_vfp_lines:%u actual_vfp_lines:%u\n",

+ 2 - 1
msm/sde/sde_hw_color_proc_v4.c

@@ -283,7 +283,8 @@ void sde_setup_dspp_ltm_threshv1(struct sde_hw_dspp *ctx, void *cfg)
 	}
 
 	if (!hw_cfg->payload) {
-		DRM_ERROR("invalid payload parameters for ltm thresh param\n");
+		DRM_DEBUG_DRIVER("Disable LTM noise thresh feature\n");
+		SDE_REG_WRITE(&ctx->hw, ctx->cap->sblk->ltm.base + 0x60, 0);
 		return;
 	}
 

+ 1 - 0
msm/sde/sde_kms.c

@@ -1740,6 +1740,7 @@ static int _sde_kms_setup_displays(struct drm_device *dev,
 		.install_properties = NULL,
 		.set_allowed_mode_switch = dsi_conn_set_allowed_mode_switch,
 		.get_qsync_min_fps = dsi_display_get_qsync_min_fps,
+		.prepare_commit = dsi_conn_prepare_commit,
 	};
 	static const struct sde_connector_ops wb_ops = {
 		.post_init =    sde_wb_connector_post_init,

+ 44 - 8
msm/sde_dsc_helper.c

@@ -39,7 +39,7 @@ static u16 sde_dsc_rc_buf_thresh[DSC_NUM_BUF_RANGES - 1] =
  */
 static char sde_dsc_rc_range_min_qp[DSC_RATIO_TYPE_MAX][DSC_NUM_BUF_RANGES] = {
 	/* DSC v1.1 */
-	{0, 0, 1, 1, 3, 3, 3, 3, 3, 3, 5, 5, 5, 7, 12},
+	{0, 0, 1, 1, 3, 3, 3, 3, 3, 3, 5, 5, 5, 7, 13},
 	{0, 4, 5, 5, 7, 7, 7, 7, 7, 7, 9, 9, 9, 11, 17},
 	{0, 4, 5, 6, 7, 7, 7, 7, 7, 7, 9, 9, 9, 11, 15},
 	/* DSC v1.1 SCR and DSC v1.2 RGB 444 */
@@ -180,6 +180,36 @@ static int _get_rc_table_index(struct drm_dsc_config *dsc, int scr_ver)
 	return -EINVAL;
 }
 
+u8 _get_dsc_v1_2_bpg_offset(struct drm_dsc_config *dsc)
+{
+	u8 bpg_offset = 0;
+	u8 uncompressed_bpg_rate;
+	u8 bpp = DSC_BPP(*dsc);
+
+	if (dsc->slice_height < 8)
+		bpg_offset = 2 * (dsc->slice_height - 1);
+	else if (dsc->slice_height < 20)
+		bpg_offset = 12;
+	else if (dsc->slice_height <= 30)
+		bpg_offset = 13;
+	else if (dsc->slice_height < 42)
+		bpg_offset = 14;
+	else
+		bpg_offset = 15;
+
+	if (dsc->native_422)
+		uncompressed_bpg_rate = 3 * bpp * 4;
+	else if (dsc->native_420)
+		uncompressed_bpg_rate = 3 * bpp;
+	else
+		uncompressed_bpg_rate = (3 * bpp + 2) * 3;
+
+	if (bpg_offset < (uncompressed_bpg_rate - (3 * bpp)))
+		return bpg_offset;
+	else
+		return (uncompressed_bpg_rate - (3 * bpp));
+}
+
 int sde_dsc_populate_dsc_config(struct drm_dsc_config *dsc, int scr_ver) {
 	int bpp, bpc;
 	int groups_per_line, groups_total;
@@ -193,12 +223,15 @@ int sde_dsc_populate_dsc_config(struct drm_dsc_config *dsc, int scr_ver) {
 
 	dsc->rc_model_size = 8192;
 
-	if (dsc->dsc_version_major == 0x1 && ((dsc->dsc_version_minor == 0x1 &&
-			scr_ver == 0x1) ||
-			(dsc->dsc_version_minor == 0x2)))
-		dsc->first_line_bpg_offset = 15;
-	else
-		dsc->first_line_bpg_offset = 12;
+	if ((dsc->dsc_version_major == 0x1) &&
+			(dsc->dsc_version_minor == 0x1)) {
+		if (scr_ver == 0x1)
+			dsc->first_line_bpg_offset = 15;
+		else
+			dsc->first_line_bpg_offset = 12;
+	} else if (dsc->dsc_version_minor == 0x2) {
+		dsc->first_line_bpg_offset = _get_dsc_v1_2_bpg_offset(dsc);
+	}
 
 	dsc->rc_edge_factor = 6;
 	dsc->rc_tgt_offset_high = 3;
@@ -499,7 +532,10 @@ int sde_dsc_create_pps_buf_cmd(struct msm_display_dsc_info *dsc_info,
 	}
 
 	if (dsc->dsc_version_minor == 0x2) {
-		data = dsc->native_422 | dsc->native_420 << 1;
+		if (dsc->native_422)
+			data = BIT(0);
+		else if (dsc->native_420)
+			data = BIT(1);
 		*bp++ = data;				/* pps88 */
 		*bp++ = dsc->second_line_bpg_offset;	/* pps89 */
 

+ 2 - 2
msm/sde_hdcp_2x.c

@@ -478,14 +478,14 @@ static void sde_hdcp_2x_msg_sent(struct sde_hdcp_2x_ctrl *hdcp)
 		HDCP_TRANSPORT_CMD_INVALID,
 		hdcp->client_data};
 
-	SDE_EVT32_EXTERNAL(SDE_EVTLOG_FUNC_ENTRY, hdcp->authenticated,
-					hdcp->app_data.response.data[0]);
+	SDE_EVT32_EXTERNAL(SDE_EVTLOG_FUNC_ENTRY, hdcp->authenticated);
 
 	if (atomic_read(&hdcp->hdcp_off)) {
 		pr_debug("invalid state, hdcp off\n");
 		goto exit;
 	}
 
+	SDE_EVT32_EXTERNAL(hdcp->app_data.response.data[0]);
 	switch (hdcp->app_data.response.data[0]) {
 	case SKE_SEND_TYPE_ID:
 		if (!hdcp2_app_comm(hdcp->hdcp2_ctx,