Merge remote-tracking branch 'quic/display-kernel.lnx.5.10' into display-kernel.lnx.5.15
* quic/display-kernel.lnx.5.10: disp: msm: sde: avoid error during fal10_veto override enablement disp: msm: update copyright description disp: msm: sde: configure dest_scaler op_mode for two independent displays disp: msm: dp: updated copyright set for 4nm target Revert "disp: msm: sde: consider max of actual and default prefill lines" disp: msm: sde: Reset backlight scale when HWC is stopped disp: msm: dp: avoid duplicate read of link status disp: msm: dsi: update vreg_ctrl settings for cape disp: msm: fail commit if drm_gem_obj was found attached to a sec CB disp: msm: dp: updated register values for 4nm target disp: msm: sde: update framedata event handling disp: msm: dsi: Add new phy comaptible string for cape disp: msm: sde: software override for fal10 in cwb enable disp: msm: update cleanup during bind failure in msm_drm_component_init disp: msm: sde: dump user input_fence info on spec fence timeout disp: msm: sde: add null pointer check for encoder current master disp: msm: dsi: enable DMA start window scheduling for broadcast commands disp: msm: sde: avoid alignment checks for linear formats disp: msm: reset thread priority work on every new run disp: msm: sde: send power on event for cont. splash disp: msm: sde: always set CTL_x_UIDLE_ACTIVE register to "1" disp: msm: use vzalloc for large allocations disp: msm: sde: Add support to limit DSC size to 10k disp: msm: sde: add tx wait during DMS for sim panel disp: msm: dsi: add check for any queued DSI CMDs before clock force update disp: msm: sde: correct pp block allocation during dcwb dither programming disp: msm: sde: avoid setting of max vblank count disp: msm: sde: add cached lut flag in sde plane disp: msm: sde: avoid use after free in msm_lastclose disp: msm: sde: update TEAR_SYNC_WRCOUNT register before vsync counter disp: msm: dsi: Support uncompressed rgb101010 format disp: msm: sde: update idle_pc_enabled flag for all encoders disp: msm: sde: flush esd work before disabling the encoder disp: msm: sde: allow qsync update along with modeset disp: msm: dp: avoid dp sw reset on disconnect path disp: msm: sde: consider max of actual and default prefill lines disp: msm: ensure vbif debugbus not in use is disabled disp: msm: sde: update cached encoder mask if required disp: msm: sde: while timing engine enabling poll for active region disp: msm: enable cache flag for dumb buffer disp: msm: sde: disable ot limit for cwb disp: msm: sde: avoid race condition at vm release disp: msm: dsi: set qsync min fps list length to zero disp: msm: sde: reset mixers in crtc when ctl datapath switches disp: msm: sde: update vm state atomic check for non-primary usecases disp: msm: sde: reset CTL_UIDLE_ACTIVE register only if uidle is disabled Change-Id: If480e7f33743eb4788549f853ba05e744ecb38d3 Signed-off-by: Narendra Muppalla <quic_nmuppall@quicinc.com>
This commit is contained in:
@@ -142,7 +142,8 @@ void sde_encoder_uidle_enable(struct drm_encoder *drm_enc, bool enable)
|
||||
struct sde_encoder_phys *phys = sde_enc->phys_encs[i];
|
||||
|
||||
if (phys && phys->hw_ctl && phys->hw_ctl->ops.uidle_enable) {
|
||||
SDE_EVT32(DRMID(drm_enc), enable);
|
||||
if (enable)
|
||||
SDE_EVT32(DRMID(drm_enc), enable);
|
||||
phys->hw_ctl->ops.uidle_enable(phys->hw_ctl, enable);
|
||||
}
|
||||
}
|
||||
@@ -219,6 +220,36 @@ ktime_t sde_encoder_calc_last_vsync_timestamp(struct drm_encoder *drm_enc)
|
||||
return tvblank;
|
||||
}
|
||||
|
||||
static void _sde_encoder_control_fal10_veto(struct drm_encoder *drm_enc, bool veto)
|
||||
{
|
||||
bool clone_mode;
|
||||
struct sde_kms *sde_kms = sde_encoder_get_kms(drm_enc);
|
||||
struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
|
||||
|
||||
if (sde_kms->catalog && !sde_kms->catalog->uidle_cfg.uidle_rev)
|
||||
return;
|
||||
|
||||
if (!sde_kms->hw_uidle || !sde_kms->hw_uidle->ops.uidle_fal10_override) {
|
||||
SDE_ERROR("invalid args\n");
|
||||
return;
|
||||
}
|
||||
|
||||
/*
|
||||
* clone mode is the only scenario where we want to enable software override
|
||||
* of fal10 veto.
|
||||
*/
|
||||
clone_mode = sde_encoder_in_clone_mode(drm_enc);
|
||||
SDE_EVT32(DRMID(drm_enc), clone_mode, veto);
|
||||
|
||||
if (clone_mode && veto) {
|
||||
sde_kms->hw_uidle->ops.uidle_fal10_override(sde_kms->hw_uidle, veto);
|
||||
sde_enc->fal10_veto_override = true;
|
||||
} else if (sde_enc->fal10_veto_override && !veto) {
|
||||
sde_kms->hw_uidle->ops.uidle_fal10_override(sde_kms->hw_uidle, veto);
|
||||
sde_enc->fal10_veto_override = false;
|
||||
}
|
||||
}
|
||||
|
||||
static void _sde_encoder_pm_qos_add_request(struct drm_encoder *drm_enc)
|
||||
{
|
||||
struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
|
||||
@@ -1132,10 +1163,8 @@ static int _sde_encoder_atomic_check_qsync(struct sde_connector *sde_conn,
|
||||
qsync_dirty = msm_property_is_dirty(&sde_conn->property_info,
|
||||
&sde_conn_state->property_state, CONNECTOR_PROP_QSYNC_MODE);
|
||||
|
||||
if (has_modeset && qsync_dirty &&
|
||||
(msm_is_mode_seamless_poms(&sde_conn_state->msm_mode) ||
|
||||
msm_is_mode_seamless_dms(&sde_conn_state->msm_mode) ||
|
||||
msm_is_mode_seamless_dyn_clk(&sde_conn_state->msm_mode))) {
|
||||
if (has_modeset && qsync_dirty && (msm_is_mode_seamless_poms(&sde_conn_state->msm_mode) ||
|
||||
msm_is_mode_seamless_dyn_clk(&sde_conn_state->msm_mode))) {
|
||||
SDE_ERROR("invalid qsync update during modeset priv flag:%x\n",
|
||||
sde_conn_state->msm_mode.private_flags);
|
||||
return -EINVAL;
|
||||
@@ -2451,17 +2480,37 @@ static void _sde_encoder_virt_populate_hw_res(struct drm_encoder *drm_enc)
|
||||
}
|
||||
|
||||
static int sde_encoder_virt_modeset_rc(struct drm_encoder *drm_enc,
|
||||
struct msm_display_mode *msm_mode, bool pre_modeset)
|
||||
struct drm_display_mode *adj_mode, struct msm_display_mode *msm_mode, bool pre_modeset)
|
||||
{
|
||||
struct sde_encoder_virt *sde_enc = to_sde_encoder_virt(drm_enc);
|
||||
enum sde_intf_mode intf_mode;
|
||||
struct drm_display_mode *old_adj_mode = NULL;
|
||||
int ret;
|
||||
bool is_cmd_mode = false;
|
||||
bool is_cmd_mode = false, res_switch = false;
|
||||
|
||||
if (sde_encoder_check_curr_mode(drm_enc, MSM_DISPLAY_CMD_MODE))
|
||||
is_cmd_mode = true;
|
||||
|
||||
if (pre_modeset) {
|
||||
if (sde_enc->cur_master)
|
||||
old_adj_mode = &sde_enc->cur_master->cached_mode;
|
||||
if (old_adj_mode && is_cmd_mode)
|
||||
res_switch = !drm_mode_match(old_adj_mode, adj_mode,
|
||||
DRM_MODE_MATCH_TIMINGS);
|
||||
|
||||
if (res_switch && sde_enc->disp_info.is_te_using_watchdog_timer) {
|
||||
/*
|
||||
* add tx wait for sim panel to avoid wd timer getting
|
||||
* updated in middle of frame to avoid early vsync
|
||||
*/
|
||||
ret = sde_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
|
||||
if (ret && ret != -EWOULDBLOCK) {
|
||||
SDE_ERROR_ENC(sde_enc, "wait for idle failed %d\n", ret);
|
||||
SDE_EVT32(DRMID(drm_enc), ret, SDE_EVTLOG_ERROR);
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
intf_mode = sde_encoder_get_intf_mode(drm_enc);
|
||||
if (msm_is_mode_seamless_dms(msm_mode) ||
|
||||
(msm_is_mode_seamless_dyn_clk(msm_mode) &&
|
||||
@@ -2510,6 +2559,7 @@ static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
|
||||
struct drm_connector *conn;
|
||||
struct sde_connector_state *c_state;
|
||||
struct msm_display_mode *msm_mode;
|
||||
struct sde_crtc *sde_crtc;
|
||||
int i = 0, ret;
|
||||
int num_lm, num_intf, num_pp_per_intf;
|
||||
|
||||
@@ -2541,6 +2591,7 @@ static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
|
||||
}
|
||||
|
||||
sde_enc->crtc = drm_enc->crtc;
|
||||
sde_crtc = to_sde_crtc(drm_enc->crtc);
|
||||
sde_crtc_set_qos_dirty(drm_enc->crtc);
|
||||
|
||||
/* get and store the mode_info */
|
||||
@@ -2566,7 +2617,7 @@ static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
|
||||
|
||||
/* release resources before seamless mode change */
|
||||
msm_mode = &c_state->msm_mode;
|
||||
ret = sde_encoder_virt_modeset_rc(drm_enc, msm_mode, true);
|
||||
ret = sde_encoder_virt_modeset_rc(drm_enc, adj_mode, msm_mode, true);
|
||||
if (ret)
|
||||
return;
|
||||
|
||||
@@ -2600,12 +2651,13 @@ static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
|
||||
phys->hw_pp = sde_enc->hw_pp[i * num_pp_per_intf];
|
||||
phys->connector = conn;
|
||||
if (phys->ops.mode_set)
|
||||
phys->ops.mode_set(phys, mode, adj_mode);
|
||||
phys->ops.mode_set(phys, mode, adj_mode,
|
||||
&sde_crtc->reinit_crtc_mixers);
|
||||
}
|
||||
}
|
||||
|
||||
/* update resources after seamless mode change */
|
||||
sde_encoder_virt_modeset_rc(drm_enc, msm_mode, false);
|
||||
sde_encoder_virt_modeset_rc(drm_enc, adj_mode, msm_mode, false);
|
||||
}
|
||||
|
||||
void sde_encoder_control_te(struct drm_encoder *drm_enc, bool enable)
|
||||
@@ -2802,6 +2854,7 @@ static void _sde_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
|
||||
|
||||
memset(&sde_enc->prv_conn_roi, 0, sizeof(sde_enc->prv_conn_roi));
|
||||
memset(&sde_enc->cur_conn_roi, 0, sizeof(sde_enc->cur_conn_roi));
|
||||
_sde_encoder_control_fal10_veto(drm_enc, true);
|
||||
}
|
||||
|
||||
static void _sde_encoder_setup_dither(struct sde_encoder_phys *phys)
|
||||
@@ -3065,6 +3118,8 @@ void sde_encoder_virt_reset(struct drm_encoder *drm_enc)
|
||||
struct sde_kms *sde_kms = sde_encoder_get_kms(drm_enc);
|
||||
int i = 0;
|
||||
|
||||
_sde_encoder_control_fal10_veto(drm_enc, false);
|
||||
|
||||
for (i = 0; i < sde_enc->num_phys_encs; i++) {
|
||||
if (sde_enc->phys_encs[i]) {
|
||||
sde_enc->phys_encs[i]->cont_splash_enabled = false;
|
||||
@@ -3089,6 +3144,7 @@ void sde_encoder_virt_reset(struct drm_encoder *drm_enc)
|
||||
static void sde_encoder_virt_disable(struct drm_encoder *drm_enc)
|
||||
{
|
||||
struct sde_encoder_virt *sde_enc = NULL;
|
||||
struct sde_connector *sde_conn;
|
||||
struct sde_kms *sde_kms;
|
||||
enum sde_intf_mode intf_mode;
|
||||
int ret, i = 0;
|
||||
@@ -3110,6 +3166,11 @@ static void sde_encoder_virt_disable(struct drm_encoder *drm_enc)
|
||||
}
|
||||
|
||||
sde_enc = to_sde_encoder_virt(drm_enc);
|
||||
if (!sde_enc->cur_master) {
|
||||
SDE_ERROR("Invalid cur_master\n");
|
||||
return;
|
||||
}
|
||||
sde_conn = to_sde_connector(sde_enc->cur_master->connector);
|
||||
SDE_DEBUG_ENC(sde_enc, "\n");
|
||||
|
||||
sde_kms = sde_encoder_get_kms(&sde_enc->base);
|
||||
@@ -3126,6 +3187,7 @@ static void sde_encoder_virt_disable(struct drm_encoder *drm_enc)
|
||||
|
||||
_sde_encoder_input_handler_unregister(drm_enc);
|
||||
|
||||
flush_delayed_work(&sde_conn->status_work);
|
||||
/*
|
||||
* For primary command mode and video mode encoders, execute the
|
||||
* resource control pre-stop operations before the physical encoders
|
||||
|
Reference in New Issue
Block a user