Merge "disp: msm: dsi: DSI PHY V4 support of dynamic clock switch"

This commit is contained in:
qctecmdr
2019-06-27 03:34:31 -07:00
committed by Gerrit - the friendly Code Review server
32 changed files with 2878 additions and 531 deletions

View File

@@ -62,6 +62,8 @@ static void dsi_catalog_cmn_init(struct dsi_ctrl_hw *ctrl,
dsi_ctrl_hw_cmn_wait_for_cmd_mode_mdp_idle; dsi_ctrl_hw_cmn_wait_for_cmd_mode_mdp_idle;
ctrl->ops.setup_avr = dsi_ctrl_hw_cmn_setup_avr; ctrl->ops.setup_avr = dsi_ctrl_hw_cmn_setup_avr;
ctrl->ops.set_continuous_clk = dsi_ctrl_hw_cmn_set_continuous_clk; ctrl->ops.set_continuous_clk = dsi_ctrl_hw_cmn_set_continuous_clk;
ctrl->ops.wait4dynamic_refresh_done =
dsi_ctrl_hw_cmn_wait4dynamic_refresh_done;
switch (version) { switch (version) {
case DSI_CTRL_VERSION_1_4: case DSI_CTRL_VERSION_1_4:
@@ -215,6 +217,14 @@ static void dsi_catalog_phy_3_0_init(struct dsi_phy_hw *phy)
phy->ops.clamp_ctrl = dsi_phy_hw_v3_0_clamp_ctrl; phy->ops.clamp_ctrl = dsi_phy_hw_v3_0_clamp_ctrl;
phy->ops.phy_lane_reset = dsi_phy_hw_v3_0_lane_reset; phy->ops.phy_lane_reset = dsi_phy_hw_v3_0_lane_reset;
phy->ops.toggle_resync_fifo = dsi_phy_hw_v3_0_toggle_resync_fifo; phy->ops.toggle_resync_fifo = dsi_phy_hw_v3_0_toggle_resync_fifo;
phy->ops.dyn_refresh_ops.dyn_refresh_config =
dsi_phy_hw_v3_0_dyn_refresh_config;
phy->ops.dyn_refresh_ops.dyn_refresh_pipe_delay =
dsi_phy_hw_v3_0_dyn_refresh_pipe_delay;
phy->ops.dyn_refresh_ops.dyn_refresh_helper =
dsi_phy_hw_v3_0_dyn_refresh_helper;
phy->ops.dyn_refresh_ops.cache_phy_timings =
dsi_phy_hw_v3_0_cache_phy_timings;
} }
/** /**
@@ -242,6 +252,15 @@ static void dsi_catalog_phy_4_0_init(struct dsi_phy_hw *phy)
phy->ops.phy_lane_reset = dsi_phy_hw_v4_0_lane_reset; phy->ops.phy_lane_reset = dsi_phy_hw_v4_0_lane_reset;
phy->ops.toggle_resync_fifo = dsi_phy_hw_v4_0_toggle_resync_fifo; phy->ops.toggle_resync_fifo = dsi_phy_hw_v4_0_toggle_resync_fifo;
phy->ops.reset_clk_en_sel = dsi_phy_hw_v4_0_reset_clk_en_sel; phy->ops.reset_clk_en_sel = dsi_phy_hw_v4_0_reset_clk_en_sel;
phy->ops.dyn_refresh_ops.dyn_refresh_config =
dsi_phy_hw_v4_0_dyn_refresh_config;
phy->ops.dyn_refresh_ops.dyn_refresh_pipe_delay =
dsi_phy_hw_v4_0_dyn_refresh_pipe_delay;
phy->ops.dyn_refresh_ops.dyn_refresh_helper =
dsi_phy_hw_v4_0_dyn_refresh_helper;
phy->ops.dyn_refresh_ops.cache_phy_timings =
dsi_phy_hw_v4_0_cache_phy_timings;
} }
/** /**

View File

@@ -57,6 +57,7 @@ int dsi_phy_timing_calc_init(struct dsi_phy_hw *phy,
* @mode: DSI mode information. * @mode: DSI mode information.
* @host: DSI host configuration. * @host: DSI host configuration.
* @timing: DSI phy lane configurations. * @timing: DSI phy lane configurations.
* @use_mode_bit_clk: Boolean to indicate whether to recalculate bit clk.
* *
* This function setups the catalog information in the dsi_phy_hw object. * This function setups the catalog information in the dsi_phy_hw object.
* *
@@ -65,7 +66,8 @@ int dsi_phy_timing_calc_init(struct dsi_phy_hw *phy,
int dsi_phy_hw_calculate_timing_params(struct dsi_phy_hw *phy, int dsi_phy_hw_calculate_timing_params(struct dsi_phy_hw *phy,
struct dsi_mode_info *mode, struct dsi_mode_info *mode,
struct dsi_host_common_cfg *host, struct dsi_host_common_cfg *host,
struct dsi_phy_per_lane_cfgs *timing); struct dsi_phy_per_lane_cfgs *timing,
bool use_mode_bit_clk);
/* Definitions for 14nm PHY hardware driver */ /* Definitions for 14nm PHY hardware driver */
void dsi_phy_hw_v2_0_regulator_enable(struct dsi_phy_hw *phy, void dsi_phy_hw_v2_0_regulator_enable(struct dsi_phy_hw *phy,
@@ -238,4 +240,23 @@ void dsi_ctrl_hw_22_config_clk_gating(struct dsi_ctrl_hw *ctrl, bool enable,
void dsi_ctrl_hw_cmn_set_continuous_clk(struct dsi_ctrl_hw *ctrl, bool enable); void dsi_ctrl_hw_cmn_set_continuous_clk(struct dsi_ctrl_hw *ctrl, bool enable);
/* dynamic refresh specific functions */
void dsi_phy_hw_v3_0_dyn_refresh_helper(struct dsi_phy_hw *phy, u32 offset);
void dsi_phy_hw_v3_0_dyn_refresh_config(struct dsi_phy_hw *phy,
struct dsi_phy_cfg *cfg, bool is_master);
void dsi_phy_hw_v3_0_dyn_refresh_pipe_delay(struct dsi_phy_hw *phy,
struct dsi_dyn_clk_delay *delay);
int dsi_ctrl_hw_cmn_wait4dynamic_refresh_done(struct dsi_ctrl_hw *ctrl);
int dsi_phy_hw_v3_0_cache_phy_timings(struct dsi_phy_per_lane_cfgs *timings,
u32 *dst, u32 size);
void dsi_phy_hw_v4_0_dyn_refresh_helper(struct dsi_phy_hw *phy, u32 offset);
void dsi_phy_hw_v4_0_dyn_refresh_config(struct dsi_phy_hw *phy,
struct dsi_phy_cfg *cfg, bool is_master);
void dsi_phy_hw_v4_0_dyn_refresh_pipe_delay(struct dsi_phy_hw *phy,
struct dsi_dyn_clk_delay *delay);
int dsi_phy_hw_v4_0_cache_phy_timings(struct dsi_phy_per_lane_cfgs *timings,
u32 *dst, u32 size);
#endif /* _DSI_CATALOG_H_ */ #endif /* _DSI_CATALOG_H_ */

View File

@@ -307,4 +307,18 @@ int dsi_clk_set_byte_clk_rate(void *client, u64 byte_clk, u32 index);
*/ */
int dsi_clk_update_parent(struct dsi_clk_link_set *parent, int dsi_clk_update_parent(struct dsi_clk_link_set *parent,
struct dsi_clk_link_set *child); struct dsi_clk_link_set *child);
/**
* dsi_clk_prepare_enable() - prepare and enable dsi src clocks
* @clk: list of src clocks.
*
* @return: Zero on success and err no on failure
*/
int dsi_clk_prepare_enable(struct dsi_clk_link_set *clk);
/**
* dsi_clk_disable_unprepare() - disable and unprepare dsi src clocks
* @clk: list of src clocks.
*/
void dsi_clk_disable_unprepare(struct dsi_clk_link_set *clk);
#endif /* _DSI_CLK_H_ */ #endif /* _DSI_CLK_H_ */

View File

@@ -107,6 +107,7 @@ int dsi_clk_set_link_frequencies(void *client, struct link_clk_freq freq,
* dsi_clk_set_pixel_clk_rate() - set frequency for pixel clock * dsi_clk_set_pixel_clk_rate() - set frequency for pixel clock
* @clks: DSI link clock information. * @clks: DSI link clock information.
* @pixel_clk: Pixel clock rate in KHz. * @pixel_clk: Pixel clock rate in KHz.
* @index: Index of the DSI controller.
* *
* return: error code in case of failure or 0 for success. * return: error code in case of failure or 0 for success.
*/ */
@@ -129,7 +130,7 @@ int dsi_clk_set_pixel_clk_rate(void *client, u64 pixel_clk, u32 index)
/** /**
* dsi_clk_set_byte_clk_rate() - set frequency for byte clock * dsi_clk_set_byte_clk_rate() - set frequency for byte clock
* @client: DSI clock client pointer. * @client: DSI clock client pointer.
* @byte_clk: Pixel clock rate in Hz. * @byte_clk: Byte clock rate in Hz.
* @index: Index of the DSI controller. * @index: Index of the DSI controller.
* return: error code in case of failure or 0 for success. * return: error code in case of failure or 0 for success.
*/ */
@@ -138,6 +139,7 @@ int dsi_clk_set_byte_clk_rate(void *client, u64 byte_clk, u32 index)
int rc = 0; int rc = 0;
struct dsi_clk_client_info *c = client; struct dsi_clk_client_info *c = client;
struct dsi_clk_mngr *mngr; struct dsi_clk_mngr *mngr;
u64 byte_intf_rate;
mngr = c->mngr; mngr = c->mngr;
rc = clk_set_rate(mngr->link_clks[index].hs_clks.byte_clk, byte_clk); rc = clk_set_rate(mngr->link_clks[index].hs_clks.byte_clk, byte_clk);
@@ -146,8 +148,16 @@ int dsi_clk_set_byte_clk_rate(void *client, u64 byte_clk, u32 index)
else else
mngr->link_clks[index].freq.byte_clk_rate = byte_clk; mngr->link_clks[index].freq.byte_clk_rate = byte_clk;
return rc; if (mngr->link_clks[index].hs_clks.byte_intf_clk) {
byte_intf_rate = mngr->link_clks[index].freq.byte_clk_rate / 2;
rc = clk_set_rate(mngr->link_clks[index].hs_clks.byte_intf_clk,
byte_intf_rate);
if (rc)
pr_err("failed to set clk rate for byte intf clk=%d\n",
rc);
}
return rc;
} }
/** /**
@@ -175,6 +185,41 @@ error:
return rc; return rc;
} }
/**
* dsi_clk_prepare_enable() - prepare and enable dsi src clocks
* @clk: list of src clocks.
*
* @return: Zero on success and err no on failure.
*/
int dsi_clk_prepare_enable(struct dsi_clk_link_set *clk)
{
int rc;
rc = clk_prepare_enable(clk->byte_clk);
if (rc) {
pr_err("failed to enable byte src clk %d\n", rc);
return rc;
}
rc = clk_prepare_enable(clk->pixel_clk);
if (rc) {
pr_err("failed to enable pixel src clk %d\n", rc);
return rc;
}
return 0;
}
/**
* dsi_clk_disable_unprepare() - disable and unprepare dsi src clocks
* @clk: list of src clocks.
*/
void dsi_clk_disable_unprepare(struct dsi_clk_link_set *clk)
{
clk_disable_unprepare(clk->pixel_clk);
clk_disable_unprepare(clk->byte_clk);
}
int dsi_core_clk_start(struct dsi_core_clks *c_clks) int dsi_core_clk_start(struct dsi_core_clks *c_clks)
{ {
int rc = 0; int rc = 0;

View File

@@ -2887,7 +2887,12 @@ int dsi_ctrl_update_host_config(struct dsi_ctrl *ctrl,
goto error; goto error;
} }
if (!(flags & (DSI_MODE_FLAG_SEAMLESS | DSI_MODE_FLAG_VRR))) { if (!(flags & (DSI_MODE_FLAG_SEAMLESS | DSI_MODE_FLAG_VRR |
DSI_MODE_FLAG_DYN_CLK))) {
/*
* for dynamic clk switch case link frequence would
* be updated dsi_display_dynamic_clk_switch().
*/
rc = dsi_ctrl_update_link_freqs(ctrl, config, clk_handle, rc = dsi_ctrl_update_link_freqs(ctrl, config, clk_handle,
mode); mode);
if (rc) { if (rc) {
@@ -3603,6 +3608,27 @@ void dsi_ctrl_irq_update(struct dsi_ctrl *dsi_ctrl, bool enable)
mutex_unlock(&dsi_ctrl->ctrl_lock); mutex_unlock(&dsi_ctrl->ctrl_lock);
} }
/**
* dsi_ctrl_wait4dynamic_refresh_done() - Poll for dynamci refresh
* done interrupt.
* @dsi_ctrl: DSI controller handle.
*/
int dsi_ctrl_wait4dynamic_refresh_done(struct dsi_ctrl *ctrl)
{
int rc = 0;
if (!ctrl)
return 0;
mutex_lock(&ctrl->ctrl_lock);
if (ctrl->hw.ops.wait4dynamic_refresh_done)
rc = ctrl->hw.ops.wait4dynamic_refresh_done(&ctrl->hw);
mutex_unlock(&ctrl->ctrl_lock);
return rc;
}
/** /**
* dsi_ctrl_drv_register() - register platform driver for dsi controller * dsi_ctrl_drv_register() - register platform driver for dsi controller
*/ */

View File

@@ -796,4 +796,11 @@ int dsi_ctrl_pixel_format_to_bpp(enum dsi_pixel_format dst_format);
* @enable: variable to control continuous clock. * @enable: variable to control continuous clock.
*/ */
void dsi_ctrl_set_continuous_clk(struct dsi_ctrl *dsi_ctrl, bool enable); void dsi_ctrl_set_continuous_clk(struct dsi_ctrl *dsi_ctrl, bool enable);
/**
* dsi_ctrl_wait4dynamic_refresh_done() - Poll for dynamic refresh done
* interrupt.
* @dsi_ctrl: DSI controller handle.
*/
int dsi_ctrl_wait4dynamic_refresh_done(struct dsi_ctrl *ctrl);
#endif /* _DSI_CTRL_H_ */ #endif /* _DSI_CTRL_H_ */

View File

@@ -817,6 +817,12 @@ struct dsi_ctrl_hw_ops {
* @enable: Bool to control continuous clock request. * @enable: Bool to control continuous clock request.
*/ */
void (*set_continuous_clk)(struct dsi_ctrl_hw *ctrl, bool enable); void (*set_continuous_clk)(struct dsi_ctrl_hw *ctrl, bool enable);
/**
* hw.ops.wait4dynamic_refresh_done() - Wait for dynamic refresh done
* @ctrl: Pointer to the controller host hardware.
*/
int (*wait4dynamic_refresh_done)(struct dsi_ctrl_hw *ctrl);
}; };
/* /*

View File

@@ -1472,6 +1472,13 @@ void dsi_ctrl_hw_cmn_mask_error_intr(struct dsi_ctrl_hw *ctrl, u32 idx, bool en)
} }
} }
if (idx & BIT(DSI_PLL_UNLOCK_ERR)) {
if (en)
reg |= BIT(28);
else
reg &= ~BIT(28);
}
DSI_W32(ctrl, 0x10c, reg); DSI_W32(ctrl, 0x10c, reg);
wmb(); /* ensure error is masked */ wmb(); /* ensure error is masked */
} }
@@ -1538,3 +1545,25 @@ void dsi_ctrl_hw_cmn_set_continuous_clk(struct dsi_ctrl_hw *ctrl, bool enable)
DSI_W32(ctrl, DSI_LANE_CTRL, reg); DSI_W32(ctrl, DSI_LANE_CTRL, reg);
wmb(); /* make sure request is set */ wmb(); /* make sure request is set */
} }
int dsi_ctrl_hw_cmn_wait4dynamic_refresh_done(struct dsi_ctrl_hw *ctrl)
{
int rc;
u32 const sleep_us = 1000;
u32 const timeout_us = 84000; /* approximately 5 vsyncs */
u32 reg = 0, dyn_refresh_done = BIT(28);
rc = readl_poll_timeout(ctrl->base + DSI_INT_CTRL, reg,
(reg & dyn_refresh_done), sleep_us, timeout_us);
if (rc) {
pr_err("wait4dynamic refresh timedout %d\n", rc);
return rc;
}
/* ack dynamic refresh done status */
reg = DSI_R32(ctrl, DSI_INT_CTRL);
reg |= dyn_refresh_done;
DSI_W32(ctrl, DSI_INT_CTRL, reg);
return 0;
}

View File

@@ -129,44 +129,7 @@
#define DSI_SCRATCH_REGISTER_1 (0x01F8) #define DSI_SCRATCH_REGISTER_1 (0x01F8)
#define DSI_SCRATCH_REGISTER_2 (0x01FC) #define DSI_SCRATCH_REGISTER_2 (0x01FC)
#define DSI_DYNAMIC_REFRESH_CTRL (0x0200) #define DSI_DYNAMIC_REFRESH_CTRL (0x0200)
#define DSI_DYNAMIC_REFRESH_PIPE_DELAY (0x0204)
#define DSI_DYNAMIC_REFRESH_PIPE_DELAY2 (0x0208)
#define DSI_DYNAMIC_REFRESH_PLL_DELAY (0x020C)
#define DSI_DYNAMIC_REFRESH_STATUS (0x0210) #define DSI_DYNAMIC_REFRESH_STATUS (0x0210)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL0 (0x0214)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL1 (0x0218)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL2 (0x021C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL3 (0x0220)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL4 (0x0224)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL5 (0x0228)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL6 (0x022C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL7 (0x0230)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL8 (0x0234)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL9 (0x0238)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL10 (0x023C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL11 (0x0240)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL12 (0x0244)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL13 (0x0248)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL14 (0x024C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL15 (0x0250)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL16 (0x0254)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL17 (0x0258)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL18 (0x025C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL19 (0x0260)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL20 (0x0264)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL21 (0x0268)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL22 (0x026C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL23 (0x0270)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL24 (0x0274)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL25 (0x0278)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL26 (0x027C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL27 (0x0280)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL28 (0x0284)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL29 (0x0288)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL30 (0x028C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL31 (0x0290)
#define DSI_DYNAMIC_REFRESH_PLL_UPPER_ADDR (0x0294)
#define DSI_DYNAMIC_REFRESH_PLL_UPPER_ADDR2 (0x0298)
#define DSI_VIDEO_COMPRESSION_MODE_CTRL (0x02A0) #define DSI_VIDEO_COMPRESSION_MODE_CTRL (0x02A0)
#define DSI_VIDEO_COMPRESSION_MODE_CTRL2 (0x02A4) #define DSI_VIDEO_COMPRESSION_MODE_CTRL2 (0x02A4)
#define DSI_COMMAND_COMPRESSION_MODE_CTRL (0x02A8) #define DSI_COMMAND_COMPRESSION_MODE_CTRL (0x02A8)

View File

@@ -87,6 +87,7 @@ enum dsi_op_mode {
* New timing values are sent from DAL. * New timing values are sent from DAL.
* @DSI_MODE_FLAG_POMS: * @DSI_MODE_FLAG_POMS:
* Seamless transition is dynamic panel operating mode switch * Seamless transition is dynamic panel operating mode switch
* @DSI_MODE_FLAG_DYN_CLK: Seamless transition is dynamic clock change
*/ */
enum dsi_mode_flags { enum dsi_mode_flags {
DSI_MODE_FLAG_SEAMLESS = BIT(0), DSI_MODE_FLAG_SEAMLESS = BIT(0),
@@ -95,6 +96,7 @@ enum dsi_mode_flags {
DSI_MODE_FLAG_DMS = BIT(3), DSI_MODE_FLAG_DMS = BIT(3),
DSI_MODE_FLAG_VRR = BIT(4), DSI_MODE_FLAG_VRR = BIT(4),
DSI_MODE_FLAG_POMS = BIT(5), DSI_MODE_FLAG_POMS = BIT(5),
DSI_MODE_FLAG_DYN_CLK = BIT(6),
}; };
/** /**
@@ -660,12 +662,50 @@ struct dsi_event_cb_info {
* @DSI_FIFO_OVERFLOW: DSI FIFO Overflow error * @DSI_FIFO_OVERFLOW: DSI FIFO Overflow error
* @DSI_FIFO_UNDERFLOW: DSI FIFO Underflow error * @DSI_FIFO_UNDERFLOW: DSI FIFO Underflow error
* @DSI_LP_Rx_TIMEOUT: DSI LP/RX Timeout error * @DSI_LP_Rx_TIMEOUT: DSI LP/RX Timeout error
* @DSI_PLL_UNLOCK_ERR: DSI PLL unlock error
*/ */
enum dsi_error_status { enum dsi_error_status {
DSI_FIFO_OVERFLOW = 1, DSI_FIFO_OVERFLOW = 1,
DSI_FIFO_UNDERFLOW, DSI_FIFO_UNDERFLOW,
DSI_LP_Rx_TIMEOUT, DSI_LP_Rx_TIMEOUT,
DSI_PLL_UNLOCK_ERR,
DSI_ERR_INTR_ALL, DSI_ERR_INTR_ALL,
}; };
/* structure containing the delays required for dynamic clk */
struct dsi_dyn_clk_delay {
u32 pipe_delay;
u32 pipe_delay2;
u32 pll_delay;
};
/* dynamic refresh control bits */
enum dsi_dyn_clk_control_bits {
DYN_REFRESH_INTF_SEL = 1,
DYN_REFRESH_SYNC_MODE,
DYN_REFRESH_SW_TRIGGER,
DYN_REFRESH_SWI_CTRL,
};
/* convert dsi pixel format into bits per pixel */
static inline int dsi_pixel_format_to_bpp(enum dsi_pixel_format fmt)
{
switch (fmt) {
case DSI_PIXEL_FORMAT_RGB888:
case DSI_PIXEL_FORMAT_MAX:
return 24;
case DSI_PIXEL_FORMAT_RGB666:
case DSI_PIXEL_FORMAT_RGB666_LOOSE:
return 18;
case DSI_PIXEL_FORMAT_RGB565:
return 16;
case DSI_PIXEL_FORMAT_RGB111:
return 3;
case DSI_PIXEL_FORMAT_RGB332:
return 8;
case DSI_PIXEL_FORMAT_RGB444:
return 12;
}
return 24;
}
#endif /* _DSI_DEFS_H_ */ #endif /* _DSI_DEFS_H_ */

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
/* SPDX-License-Identifier: GPL-2.0-only */ /* SPDX-License-Identifier: GPL-2.0-only */
/* /*
* Copyright (c) 2015-2019, The Linux Foundation.All rights reserved. * Copyright (c) 2015-2019, The Linux Foundation. All rights reserved.
*/ */
#ifndef _DSI_DISPLAY_H_ #ifndef _DSI_DISPLAY_H_
@@ -410,13 +410,14 @@ int dsi_display_validate_mode(struct dsi_display *display,
u32 flags); u32 flags);
/** /**
* dsi_display_validate_mode_vrr() - validates mode if variable refresh case * dsi_display_validate_mode_change() - validates mode if variable refresh case
* or dynamic clk change case
* @display: Handle to display. * @display: Handle to display.
* @mode: Mode to be validated.. * @mode: Mode to be validated..
* *
* Return: 0 if error code. * Return: 0 if error code.
*/ */
int dsi_display_validate_mode_vrr(struct dsi_display *display, int dsi_display_validate_mode_change(struct dsi_display *display,
struct dsi_display_mode *cur_dsi_mode, struct dsi_display_mode *cur_dsi_mode,
struct dsi_display_mode *mode); struct dsi_display_mode *mode);

View File

@@ -73,6 +73,8 @@ static void convert_to_dsi_mode(const struct drm_display_mode *drm_mode,
dsi_mode->dsi_mode_flags |= DSI_MODE_FLAG_VRR; dsi_mode->dsi_mode_flags |= DSI_MODE_FLAG_VRR;
if (msm_is_mode_seamless_poms(drm_mode)) if (msm_is_mode_seamless_poms(drm_mode))
dsi_mode->dsi_mode_flags |= DSI_MODE_FLAG_POMS; dsi_mode->dsi_mode_flags |= DSI_MODE_FLAG_POMS;
if (msm_is_mode_seamless_dyn_clk(drm_mode))
dsi_mode->dsi_mode_flags |= DSI_MODE_FLAG_DYN_CLK;
dsi_mode->timing.h_sync_polarity = dsi_mode->timing.h_sync_polarity =
!!(drm_mode->flags & DRM_MODE_FLAG_PHSYNC); !!(drm_mode->flags & DRM_MODE_FLAG_PHSYNC);
@@ -88,6 +90,8 @@ static void convert_to_dsi_mode(const struct drm_display_mode *drm_mode,
void dsi_convert_to_drm_mode(const struct dsi_display_mode *dsi_mode, void dsi_convert_to_drm_mode(const struct dsi_display_mode *dsi_mode,
struct drm_display_mode *drm_mode) struct drm_display_mode *drm_mode)
{ {
bool video_mode = (dsi_mode->panel_mode == DSI_OP_VIDEO_MODE);
memset(drm_mode, 0, sizeof(*drm_mode)); memset(drm_mode, 0, sizeof(*drm_mode));
drm_mode->hdisplay = dsi_mode->timing.h_active; drm_mode->hdisplay = dsi_mode->timing.h_active;
@@ -122,6 +126,8 @@ void dsi_convert_to_drm_mode(const struct dsi_display_mode *dsi_mode,
drm_mode->private_flags |= MSM_MODE_FLAG_SEAMLESS_VRR; drm_mode->private_flags |= MSM_MODE_FLAG_SEAMLESS_VRR;
if (dsi_mode->dsi_mode_flags & DSI_MODE_FLAG_POMS) if (dsi_mode->dsi_mode_flags & DSI_MODE_FLAG_POMS)
drm_mode->private_flags |= MSM_MODE_FLAG_SEAMLESS_POMS; drm_mode->private_flags |= MSM_MODE_FLAG_SEAMLESS_POMS;
if (dsi_mode->dsi_mode_flags & DSI_MODE_FLAG_DYN_CLK)
drm_mode->private_flags |= MSM_MODE_FLAG_SEAMLESS_DYN_CLK;
if (dsi_mode->timing.h_sync_polarity) if (dsi_mode->timing.h_sync_polarity)
drm_mode->flags |= DRM_MODE_FLAG_PHSYNC; drm_mode->flags |= DRM_MODE_FLAG_PHSYNC;
@@ -133,7 +139,11 @@ void dsi_convert_to_drm_mode(const struct dsi_display_mode *dsi_mode,
if (dsi_mode->panel_mode == DSI_OP_CMD_MODE) if (dsi_mode->panel_mode == DSI_OP_CMD_MODE)
drm_mode->flags |= DRM_MODE_FLAG_CMD_MODE_PANEL; drm_mode->flags |= DRM_MODE_FLAG_CMD_MODE_PANEL;
drm_mode_set_name(drm_mode); /* set mode name */
snprintf(drm_mode->name, DRM_DISPLAY_MODE_LEN, "%dx%dx%dx%d%s",
drm_mode->hdisplay, drm_mode->vdisplay,
drm_mode->vrefresh, drm_mode->clock,
video_mode ? "vid" : "cmd");
} }
static int dsi_bridge_attach(struct drm_bridge *bridge) static int dsi_bridge_attach(struct drm_bridge *bridge)
@@ -178,7 +188,8 @@ static void dsi_bridge_pre_enable(struct drm_bridge *bridge)
} }
if (c_bridge->dsi_mode.dsi_mode_flags & if (c_bridge->dsi_mode.dsi_mode_flags &
(DSI_MODE_FLAG_SEAMLESS | DSI_MODE_FLAG_VRR)) { (DSI_MODE_FLAG_SEAMLESS | DSI_MODE_FLAG_VRR |
DSI_MODE_FLAG_DYN_CLK)) {
pr_debug("[%d] seamless pre-enable\n", c_bridge->id); pr_debug("[%d] seamless pre-enable\n", c_bridge->id);
return; return;
} }
@@ -220,7 +231,8 @@ static void dsi_bridge_enable(struct drm_bridge *bridge)
} }
if (c_bridge->dsi_mode.dsi_mode_flags & if (c_bridge->dsi_mode.dsi_mode_flags &
(DSI_MODE_FLAG_SEAMLESS | DSI_MODE_FLAG_VRR)) { (DSI_MODE_FLAG_SEAMLESS | DSI_MODE_FLAG_VRR |
DSI_MODE_FLAG_DYN_CLK)) {
pr_debug("[%d] seamless enable\n", c_bridge->id); pr_debug("[%d] seamless enable\n", c_bridge->id);
return; return;
} }
@@ -311,6 +323,12 @@ static void dsi_bridge_mode_set(struct drm_bridge *bridge,
memset(&(c_bridge->dsi_mode), 0x0, sizeof(struct dsi_display_mode)); memset(&(c_bridge->dsi_mode), 0x0, sizeof(struct dsi_display_mode));
convert_to_dsi_mode(adjusted_mode, &(c_bridge->dsi_mode)); convert_to_dsi_mode(adjusted_mode, &(c_bridge->dsi_mode));
/* restore bit_clk_rate also for dynamic clk use cases */
c_bridge->dsi_mode.timing.clk_rate_hz =
dsi_drm_find_bit_clk_rate(c_bridge->display, adjusted_mode);
pr_debug("clk_rate: %llu\n", c_bridge->dsi_mode.timing.clk_rate_hz);
} }
static bool dsi_bridge_mode_fixup(struct drm_bridge *bridge, static bool dsi_bridge_mode_fixup(struct drm_bridge *bridge,
@@ -379,11 +397,13 @@ static bool dsi_bridge_mode_fixup(struct drm_bridge *bridge,
cur_dsi_mode.timing.dsc_enabled = cur_dsi_mode.timing.dsc_enabled =
dsi_mode.priv_info->dsc_enabled; dsi_mode.priv_info->dsc_enabled;
cur_dsi_mode.timing.dsc = &dsi_mode.priv_info->dsc; cur_dsi_mode.timing.dsc = &dsi_mode.priv_info->dsc;
rc = dsi_display_validate_mode_vrr(c_bridge->display, rc = dsi_display_validate_mode_change(c_bridge->display,
&cur_dsi_mode, &dsi_mode); &cur_dsi_mode, &dsi_mode);
if (rc) if (rc) {
pr_debug("[%s] vrr mode mismatch failure rc=%d\n", pr_err("[%s] seamless mode mismatch failure rc=%d\n",
c_bridge->display->name, rc); c_bridge->display->name, rc);
return false;
}
cur_mode = crtc_state->crtc->mode; cur_mode = crtc_state->crtc->mode;
@@ -397,6 +417,7 @@ static bool dsi_bridge_mode_fixup(struct drm_bridge *bridge,
if (!drm_mode_equal(&cur_mode, adjusted_mode) && if (!drm_mode_equal(&cur_mode, adjusted_mode) &&
(!(dsi_mode.dsi_mode_flags & DSI_MODE_FLAG_VRR)) && (!(dsi_mode.dsi_mode_flags & DSI_MODE_FLAG_VRR)) &&
(!(dsi_mode.dsi_mode_flags & DSI_MODE_FLAG_POMS)) && (!(dsi_mode.dsi_mode_flags & DSI_MODE_FLAG_POMS)) &&
(!(dsi_mode.dsi_mode_flags & DSI_MODE_FLAG_DYN_CLK)) &&
(!crtc_state->active_changed || (!crtc_state->active_changed ||
display->is_cont_splash_enabled)) display->is_cont_splash_enabled))
dsi_mode.dsi_mode_flags |= DSI_MODE_FLAG_DMS; dsi_mode.dsi_mode_flags |= DSI_MODE_FLAG_DMS;
@@ -408,6 +429,33 @@ static bool dsi_bridge_mode_fixup(struct drm_bridge *bridge,
return true; return true;
} }
u64 dsi_drm_find_bit_clk_rate(void *display,
const struct drm_display_mode *drm_mode)
{
int i = 0, count = 0;
struct dsi_display *dsi_display = display;
struct dsi_display_mode *dsi_mode;
u64 bit_clk_rate = 0;
if (!dsi_display || !drm_mode)
return 0;
dsi_display_get_mode_count(dsi_display, &count);
for (i = 0; i < count; i++) {
dsi_mode = &dsi_display->modes[i];
if ((dsi_mode->timing.v_active == drm_mode->vdisplay) &&
(dsi_mode->timing.h_active == drm_mode->hdisplay) &&
(dsi_mode->pixel_clk_khz == drm_mode->clock) &&
(dsi_mode->timing.refresh_rate == drm_mode->vrefresh)) {
bit_clk_rate = dsi_mode->timing.clk_rate_hz;
break;
}
}
return bit_clk_rate;
}
int dsi_conn_get_mode_info(struct drm_connector *connector, int dsi_conn_get_mode_info(struct drm_connector *connector,
const struct drm_display_mode *drm_mode, const struct drm_display_mode *drm_mode,
struct msm_mode_info *mode_info, struct msm_mode_info *mode_info,
@@ -432,7 +480,7 @@ int dsi_conn_get_mode_info(struct drm_connector *connector,
mode_info->prefill_lines = dsi_mode.priv_info->panel_prefill_lines; mode_info->prefill_lines = dsi_mode.priv_info->panel_prefill_lines;
mode_info->jitter_numer = dsi_mode.priv_info->panel_jitter_numer; mode_info->jitter_numer = dsi_mode.priv_info->panel_jitter_numer;
mode_info->jitter_denom = dsi_mode.priv_info->panel_jitter_denom; mode_info->jitter_denom = dsi_mode.priv_info->panel_jitter_denom;
mode_info->clk_rate = dsi_mode.priv_info->clk_rate_hz; mode_info->clk_rate = dsi_drm_find_bit_clk_rate(display, drm_mode);
mode_info->mdp_transfer_time_us = mode_info->mdp_transfer_time_us =
dsi_mode.priv_info->mdp_transfer_time_us; dsi_mode.priv_info->mdp_transfer_time_us;
@@ -538,6 +586,9 @@ int dsi_conn_set_info_blob(struct drm_connector *connector,
panel->dfps_caps.max_refresh_rate); panel->dfps_caps.max_refresh_rate);
} }
sde_kms_info_add_keystr(info, "dyn bitclk support",
panel->dyn_clk_caps.dyn_clk_support ? "true" : "false");
switch (panel->phy_props.rotation) { switch (panel->phy_props.rotation) {
case DSI_PANEL_ROTATE_NONE: case DSI_PANEL_ROTATE_NONE:
sde_kms_info_add_keystr(info, "panel orientation", "none"); sde_kms_info_add_keystr(info, "panel orientation", "none");
@@ -796,6 +847,9 @@ int dsi_connector_get_modes(struct drm_connector *connector, void *data)
} }
m->width_mm = connector->display_info.width_mm; m->width_mm = connector->display_info.width_mm;
m->height_mm = connector->display_info.height_mm; m->height_mm = connector->display_info.height_mm;
/* set the first mode in list as preferred */
if (i == 0)
m->type |= DRM_MODE_TYPE_PREFERRED;
drm_mode_probed_add(connector, m); drm_mode_probed_add(connector, m);
} }
@@ -915,6 +969,9 @@ int dsi_conn_post_kickoff(struct drm_connector *connector)
c_bridge->dsi_mode.dsi_mode_flags &= ~DSI_MODE_FLAG_VRR; c_bridge->dsi_mode.dsi_mode_flags &= ~DSI_MODE_FLAG_VRR;
} }
/* ensure dynamic clk switch flag is reset */
c_bridge->dsi_mode.dsi_mode_flags &= ~DSI_MODE_FLAG_DYN_CLK;
return 0; return 0;
} }

View File

@@ -130,4 +130,6 @@ int dsi_conn_post_kickoff(struct drm_connector *connector);
void dsi_convert_to_drm_mode(const struct dsi_display_mode *dsi_mode, void dsi_convert_to_drm_mode(const struct dsi_display_mode *dsi_mode,
struct drm_display_mode *drm_mode); struct drm_display_mode *drm_mode);
u64 dsi_drm_find_bit_clk_rate(void *display,
const struct drm_display_mode *drm_mode);
#endif /* _DSI_DRM_H_ */ #endif /* _DSI_DRM_H_ */

View File

@@ -44,4 +44,14 @@
#define DSI_R64(dsi_hw, off) readq_relaxed((dsi_hw)->base + (off)) #define DSI_R64(dsi_hw, off) readq_relaxed((dsi_hw)->base + (off))
#define DSI_W64(dsi_hw, off, val) writeq_relaxed((val), (dsi_hw)->base + (off)) #define DSI_W64(dsi_hw, off, val) writeq_relaxed((val), (dsi_hw)->base + (off))
#define PLL_CALC_DATA(addr0, addr1, data0, data1) \
(((data1) << 24) | ((((addr1)/4) & 0xFF) << 16) | \
((data0) << 8) | (((addr0)/4) & 0xFF))
#define DSI_DYN_REF_REG_W(base, offset, addr0, addr1, data0, data1) \
writel_relaxed(PLL_CALC_DATA(addr0, addr1, data0, data1), \
(base) + (offset))
#define DSI_GEN_R32(base, offset) readl_relaxed(base + (offset))
#define DSI_GEN_W32(base, offset, val) writel_relaxed((val), base + (offset))
#endif /* _DSI_HW_H_ */ #endif /* _DSI_HW_H_ */

View File

@@ -1155,6 +1155,48 @@ static int dsi_panel_parse_qsync_caps(struct dsi_panel *panel,
return rc; return rc;
} }
static int dsi_panel_parse_dyn_clk_caps(struct dsi_panel *panel)
{
int rc = 0;
bool supported = false;
struct dsi_dyn_clk_caps *dyn_clk_caps = &panel->dyn_clk_caps;
struct dsi_parser_utils *utils = &panel->utils;
const char *name = panel->name;
supported = utils->read_bool(utils->data, "qcom,dsi-dyn-clk-enable");
if (!supported) {
dyn_clk_caps->dyn_clk_support = false;
return rc;
}
dyn_clk_caps->bit_clk_list_len = utils->count_u32_elems(utils->data,
"qcom,dsi-dyn-clk-list");
if (dyn_clk_caps->bit_clk_list_len < 1) {
pr_err("[%s] failed to get supported bit clk list\n", name);
return -EINVAL;
}
dyn_clk_caps->bit_clk_list = kcalloc(dyn_clk_caps->bit_clk_list_len,
sizeof(u32), GFP_KERNEL);
if (!dyn_clk_caps->bit_clk_list)
return -ENOMEM;
rc = utils->read_u32_array(utils->data, "qcom,dsi-dyn-clk-list",
dyn_clk_caps->bit_clk_list,
dyn_clk_caps->bit_clk_list_len);
if (rc) {
pr_err("[%s] failed to parse supported bit clk list\n", name);
return -EINVAL;
}
dyn_clk_caps->dyn_clk_support = true;
return 0;
}
static int dsi_panel_parse_dfps_caps(struct dsi_panel *panel) static int dsi_panel_parse_dfps_caps(struct dsi_panel *panel)
{ {
int rc = 0; int rc = 0;
@@ -1163,7 +1205,7 @@ static int dsi_panel_parse_dfps_caps(struct dsi_panel *panel)
struct dsi_parser_utils *utils = &panel->utils; struct dsi_parser_utils *utils = &panel->utils;
const char *name = panel->name; const char *name = panel->name;
const char *type; const char *type;
u32 val = 0; u32 i;
supported = utils->read_bool(utils->data, supported = utils->read_bool(utils->data,
"qcom,mdss-dsi-pan-enable-dynamic-fps"); "qcom,mdss-dsi-pan-enable-dynamic-fps");
@@ -1171,11 +1213,11 @@ static int dsi_panel_parse_dfps_caps(struct dsi_panel *panel)
if (!supported) { if (!supported) {
pr_debug("[%s] DFPS is not supported\n", name); pr_debug("[%s] DFPS is not supported\n", name);
dfps_caps->dfps_support = false; dfps_caps->dfps_support = false;
} else { return rc;
}
type = utils->get_property(utils->data, type = utils->get_property(utils->data,
"qcom,mdss-dsi-pan-fps-update", "qcom,mdss-dsi-pan-fps-update", NULL);
NULL);
if (!type) { if (!type) {
pr_err("[%s] dfps type not defined\n", name); pr_err("[%s] dfps type not defined\n", name);
rc = -EINVAL; rc = -EINVAL;
@@ -1194,43 +1236,41 @@ static int dsi_panel_parse_dfps_caps(struct dsi_panel *panel)
goto error; goto error;
} }
rc = utils->read_u32(utils->data, dfps_caps->dfps_list_len = utils->count_u32_elems(utils->data,
"qcom,mdss-dsi-min-refresh-rate", "qcom,dsi-supported-dfps-list");
&val); if (dfps_caps->dfps_list_len < 1) {
if (rc) { pr_err("[%s] dfps refresh list not present\n", name);
pr_err("[%s] Min refresh rate is not defined\n", name);
rc = -EINVAL; rc = -EINVAL;
goto error; goto error;
} }
dfps_caps->min_refresh_rate = val;
rc = utils->read_u32(utils->data, dfps_caps->dfps_list = kcalloc(dfps_caps->dfps_list_len, sizeof(u32),
"qcom,mdss-dsi-max-refresh-rate", GFP_KERNEL);
&val); if (!dfps_caps->dfps_list) {
rc = -ENOMEM;
goto error;
}
rc = utils->read_u32_array(utils->data,
"qcom,dsi-supported-dfps-list",
dfps_caps->dfps_list,
dfps_caps->dfps_list_len);
if (rc) { if (rc) {
pr_debug("[%s] Using default refresh rate\n", name); pr_err("[%s] dfps refresh rate list parse failed\n", name);
rc = utils->read_u32(utils->data,
"qcom,mdss-dsi-panel-framerate",
&val);
if (rc) {
pr_err("[%s] max refresh rate is not defined\n",
name);
rc = -EINVAL; rc = -EINVAL;
goto error; goto error;
} }
}
dfps_caps->max_refresh_rate = val;
if (dfps_caps->min_refresh_rate > dfps_caps->max_refresh_rate) {
pr_err("[%s] min rate > max rate\n", name);
rc = -EINVAL;
}
pr_debug("[%s] DFPS is supported %d-%d, mode %d\n", name,
dfps_caps->min_refresh_rate,
dfps_caps->max_refresh_rate,
dfps_caps->type);
dfps_caps->dfps_support = true; dfps_caps->dfps_support = true;
/* calculate max and min fps */
dfps_caps->max_refresh_rate = dfps_caps->dfps_list[0];
dfps_caps->min_refresh_rate = dfps_caps->dfps_list[0];
for (i = 1; i < dfps_caps->dfps_list_len; i++) {
if (dfps_caps->dfps_list[i] < dfps_caps->min_refresh_rate)
dfps_caps->min_refresh_rate = dfps_caps->dfps_list[i];
else if (dfps_caps->dfps_list[i] > dfps_caps->max_refresh_rate)
dfps_caps->max_refresh_rate = dfps_caps->dfps_list[i];
} }
error: error:
@@ -3053,6 +3093,10 @@ struct dsi_panel *dsi_panel_get(struct device *parent,
pr_err("failed to parse qsync features, rc=%d\n", rc); pr_err("failed to parse qsync features, rc=%d\n", rc);
} }
rc = dsi_panel_parse_dyn_clk_caps(panel);
if (rc)
pr_err("failed to parse dynamic clk config, rc=%d\n", rc);
rc = dsi_panel_parse_phy_props(panel); rc = dsi_panel_parse_phy_props(panel);
if (rc) { if (rc) {
pr_err("failed to parse panel physical dimension, rc=%d\n", rc); pr_err("failed to parse panel physical dimension, rc=%d\n", rc);
@@ -3475,6 +3519,7 @@ int dsi_panel_get_host_cfg_for_mode(struct dsi_panel *panel,
struct dsi_host_config *config) struct dsi_host_config *config)
{ {
int rc = 0; int rc = 0;
struct dsi_dyn_clk_caps *dyn_clk_caps = &panel->dyn_clk_caps;
if (!panel || !mode || !config) { if (!panel || !mode || !config) {
pr_err("invalid params\n"); pr_err("invalid params\n");
@@ -3502,7 +3547,11 @@ int dsi_panel_get_host_cfg_for_mode(struct dsi_panel *panel,
config->video_timing.dsc_enabled = mode->priv_info->dsc_enabled; config->video_timing.dsc_enabled = mode->priv_info->dsc_enabled;
config->video_timing.dsc = &mode->priv_info->dsc; config->video_timing.dsc = &mode->priv_info->dsc;
if (dyn_clk_caps->dyn_clk_support)
config->bit_clk_rate_hz_override = mode->timing.clk_rate_hz;
else
config->bit_clk_rate_hz_override = mode->priv_info->clk_rate_hz; config->bit_clk_rate_hz_override = mode->priv_info->clk_rate_hz;
config->esc_clk_rate_hz = 19200000; config->esc_clk_rate_hz = 19200000;
mutex_unlock(&panel->panel_lock); mutex_unlock(&panel->panel_lock);
return rc; return rc;

View File

@@ -63,10 +63,18 @@ enum dsi_dms_mode {
}; };
struct dsi_dfps_capabilities { struct dsi_dfps_capabilities {
bool dfps_support;
enum dsi_dfps_type type; enum dsi_dfps_type type;
u32 min_refresh_rate; u32 min_refresh_rate;
u32 max_refresh_rate; u32 max_refresh_rate;
u32 *dfps_list;
u32 dfps_list_len;
bool dfps_support;
};
struct dsi_dyn_clk_caps {
bool dyn_clk_support;
u32 *bit_clk_list;
u32 bit_clk_list_len;
}; };
struct dsi_pinctrl_info { struct dsi_pinctrl_info {
@@ -159,6 +167,7 @@ struct dsi_panel {
bool panel_mode_switch_enabled; bool panel_mode_switch_enabled;
struct dsi_dfps_capabilities dfps_caps; struct dsi_dfps_capabilities dfps_caps;
struct dsi_dyn_clk_caps dyn_clk_caps;
struct dsi_panel_phy_props phy_props; struct dsi_panel_phy_props phy_props;
struct dsi_display_mode *cur_mode; struct dsi_display_mode *cur_mode;

View File

@@ -118,6 +118,9 @@ static int dsi_phy_regmap_init(struct platform_device *pdev,
phy->hw.base = ptr; phy->hw.base = ptr;
ptr = msm_ioremap(pdev, "dyn_refresh_base", phy->name);
phy->hw.dyn_pll_base = ptr;
pr_debug("[%s] map dsi_phy registers to %pK\n", pr_debug("[%s] map dsi_phy registers to %pK\n",
phy->name, phy->hw.base); phy->name, phy->hw.base);
@@ -641,11 +644,8 @@ int dsi_phy_validate_mode(struct msm_dsi_phy *dsi_phy,
return -EINVAL; return -EINVAL;
} }
mutex_lock(&dsi_phy->phy_lock);
pr_debug("[PHY_%d] Skipping validation\n", dsi_phy->index); pr_debug("[PHY_%d] Skipping validation\n", dsi_phy->index);
mutex_unlock(&dsi_phy->phy_lock);
return rc; return rc;
} }
@@ -887,7 +887,7 @@ int dsi_phy_enable(struct msm_dsi_phy *phy,
rc = phy->hw.ops.calculate_timing_params(&phy->hw, rc = phy->hw.ops.calculate_timing_params(&phy->hw,
&phy->mode, &phy->mode,
&config->common_config, &config->common_config,
&phy->cfg.timing); &phy->cfg.timing, false);
if (rc) { if (rc) {
pr_err("[%s] failed to set timing, rc=%d\n", phy->name, rc); pr_err("[%s] failed to set timing, rc=%d\n", phy->name, rc);
goto error; goto error;
@@ -905,6 +905,27 @@ error:
return rc; return rc;
} }
/* update dsi phy timings for dynamic clk switch use case */
int dsi_phy_update_phy_timings(struct msm_dsi_phy *phy,
struct dsi_host_config *config)
{
int rc = 0;
if (!phy || !config) {
pr_err("invalid argument\n");
return -EINVAL;
}
memcpy(&phy->mode, &config->video_timing, sizeof(phy->mode));
rc = phy->hw.ops.calculate_timing_params(&phy->hw, &phy->mode,
&config->common_config,
&phy->cfg.timing, true);
if (rc)
pr_err("failed to calculate phy timings %d\n", rc);
return rc;
}
int dsi_phy_lane_reset(struct msm_dsi_phy *phy) int dsi_phy_lane_reset(struct msm_dsi_phy *phy)
{ {
int ret = 0; int ret = 0;
@@ -1069,6 +1090,7 @@ int dsi_phy_set_timing_params(struct msm_dsi_phy *phy,
rc = phy->hw.ops.phy_timing_val(&phy->cfg.timing, timing, size); rc = phy->hw.ops.phy_timing_val(&phy->cfg.timing, timing, size);
if (!rc) if (!rc)
phy->cfg.is_phy_timing_present = true; phy->cfg.is_phy_timing_present = true;
mutex_unlock(&phy->phy_lock); mutex_unlock(&phy->phy_lock);
return rc; return rc;
} }
@@ -1117,6 +1139,106 @@ int dsi_phy_conv_logical_to_phy_lane(
return i; return i;
} }
/**
* dsi_phy_config_dynamic_refresh() - Configure dynamic refresh registers
* @phy: DSI PHY handle
* @delay: pipe delays for dynamic refresh
* @is_master: Boolean to indicate if for master or slave.
*/
void dsi_phy_config_dynamic_refresh(struct msm_dsi_phy *phy,
struct dsi_dyn_clk_delay *delay,
bool is_master)
{
struct dsi_phy_cfg *cfg;
if (!phy)
return;
mutex_lock(&phy->phy_lock);
cfg = &phy->cfg;
if (phy->hw.ops.dyn_refresh_ops.dyn_refresh_config)
phy->hw.ops.dyn_refresh_ops.dyn_refresh_config(&phy->hw, cfg,
is_master);
if (phy->hw.ops.dyn_refresh_ops.dyn_refresh_pipe_delay)
phy->hw.ops.dyn_refresh_ops.dyn_refresh_pipe_delay(
&phy->hw, delay);
mutex_unlock(&phy->phy_lock);
}
/**
* dsi_phy_dynamic_refresh_trigger() - trigger dynamic refresh
* @phy: DSI PHY handle
* @is_master: Boolean to indicate if for master or slave.
*/
void dsi_phy_dynamic_refresh_trigger(struct msm_dsi_phy *phy, bool is_master)
{
u32 off;
if (!phy)
return;
mutex_lock(&phy->phy_lock);
/*
* program PLL_SWI_INTF_SEL and SW_TRIGGER bit only for
* master and program SYNC_MODE bit only for slave.
*/
if (is_master)
off = BIT(DYN_REFRESH_INTF_SEL) | BIT(DYN_REFRESH_SWI_CTRL) |
BIT(DYN_REFRESH_SW_TRIGGER);
else
off = BIT(DYN_REFRESH_SYNC_MODE) | BIT(DYN_REFRESH_SWI_CTRL);
if (phy->hw.ops.dyn_refresh_ops.dyn_refresh_helper)
phy->hw.ops.dyn_refresh_ops.dyn_refresh_helper(&phy->hw, off);
mutex_unlock(&phy->phy_lock);
}
/**
* dsi_phy_cache_phy_timings - cache the phy timings calculated as part of
* dynamic refresh.
* @phy: DSI PHY Handle.
* @dst: Pointer to cache location.
* @size: Number of phy lane settings.
*/
int dsi_phy_dyn_refresh_cache_phy_timings(struct msm_dsi_phy *phy, u32 *dst,
u32 size)
{
int rc = 0;
if (!phy || !dst || !size)
return -EINVAL;
if (phy->hw.ops.dyn_refresh_ops.cache_phy_timings)
rc = phy->hw.ops.dyn_refresh_ops.cache_phy_timings(
&phy->cfg.timing, dst, size);
if (rc)
pr_err("failed to cache phy timings %d\n", rc);
return rc;
}
/**
* dsi_phy_dynamic_refresh_clear() - clear dynamic refresh config
* @phy: DSI PHY handle
*/
void dsi_phy_dynamic_refresh_clear(struct msm_dsi_phy *phy)
{
if (!phy)
return;
mutex_lock(&phy->phy_lock);
if (phy->hw.ops.dyn_refresh_ops.dyn_refresh_helper)
phy->hw.ops.dyn_refresh_ops.dyn_refresh_helper(&phy->hw, 0);
mutex_unlock(&phy->phy_lock);
}
void dsi_phy_drv_register(void) void dsi_phy_drv_register(void)
{ {
platform_driver_register(&dsi_phy_platform_driver); platform_driver_register(&dsi_phy_platform_driver);

View File

@@ -290,4 +290,45 @@ void dsi_phy_drv_register(void);
*/ */
void dsi_phy_drv_unregister(void); void dsi_phy_drv_unregister(void);
/**
* dsi_phy_update_phy_timings() - Update dsi phy timings
* @phy: DSI PHY handle
* @config: DSI Host config parameters
*
* Return: error code.
*/
int dsi_phy_update_phy_timings(struct msm_dsi_phy *phy,
struct dsi_host_config *config);
/**
* dsi_phy_config_dynamic_refresh() - Configure dynamic refresh registers
* @phy: DSI PHY handle
* @delay: pipe delays for dynamic refresh
* @is_master: Boolean to indicate if for master or slave
*/
void dsi_phy_config_dynamic_refresh(struct msm_dsi_phy *phy,
struct dsi_dyn_clk_delay *delay,
bool is_master);
/**
* dsi_phy_dynamic_refresh_trigger() - trigger dynamic refresh
* @phy: DSI PHY handle
* @is_master: Boolean to indicate if for master or slave.
*/
void dsi_phy_dynamic_refresh_trigger(struct msm_dsi_phy *phy, bool is_master);
/**
* dsi_phy_dynamic_refresh_clear() - clear dynamic refresh config
* @phy: DSI PHY handle
*/
void dsi_phy_dynamic_refresh_clear(struct msm_dsi_phy *phy);
/**
* dsi_phy_dyn_refresh_cache_phy_timings - cache the phy timings calculated
* as part of dynamic refresh.
* @phy: DSI PHY Handle.
* @dst: Pointer to cache location.
* @size: Number of phy lane settings.
*/
int dsi_phy_dyn_refresh_cache_phy_timings(struct msm_dsi_phy *phy,
u32 *dst, u32 size);
#endif /* _DSI_PHY_H_ */ #endif /* _DSI_PHY_H_ */

View File

@@ -161,6 +161,43 @@ struct phy_ulps_config_ops {
bool (*is_lanes_in_ulps)(u32 ulps, u32 ulps_lanes); bool (*is_lanes_in_ulps)(u32 ulps, u32 ulps_lanes);
}; };
struct phy_dyn_refresh_ops {
/**
* dyn_refresh_helper - helper function to config particular registers
* @phy: Pointer to DSI PHY hardware instance.
* @offset: register offset to program.
*/
void (*dyn_refresh_helper)(struct dsi_phy_hw *phy, u32 offset);
/**
* dyn_refresh_config - configure dynamic refresh ctrl registers
* @phy: Pointer to DSI PHY hardware instance.
* @cfg: Pointer to DSI PHY timings.
* @is_master: Boolean to indicate whether for master or slave.
*/
void (*dyn_refresh_config)(struct dsi_phy_hw *phy,
struct dsi_phy_cfg *cfg, bool is_master);
/**
* dyn_refresh_pipe_delay - configure pipe delay registers for dynamic
* refresh.
* @phy: Pointer to DSI PHY hardware instance.
* @delay: structure containing all the delays to be programed.
*/
void (*dyn_refresh_pipe_delay)(struct dsi_phy_hw *phy,
struct dsi_dyn_clk_delay *delay);
/**
* cache_phy_timings - cache the phy timings calculated as part of
* dynamic refresh.
* @timings: Pointer to calculated phy timing parameters.
* @dst: Pointer to cache location.
* @size: Number of phy lane settings.
*/
int (*cache_phy_timings)(struct dsi_phy_per_lane_cfgs *timings,
u32 *dst, u32 size);
};
/** /**
* struct dsi_phy_hw_ops - Operations for DSI PHY hardware. * struct dsi_phy_hw_ops - Operations for DSI PHY hardware.
* @regulator_enable: Enable PHY regulators. * @regulator_enable: Enable PHY regulators.
@@ -220,11 +257,14 @@ struct dsi_phy_hw_ops {
* @mode: Mode information for which timing has to be calculated. * @mode: Mode information for which timing has to be calculated.
* @config: DSI host configuration for this mode. * @config: DSI host configuration for this mode.
* @timing: Timing parameters for each lane which will be returned. * @timing: Timing parameters for each lane which will be returned.
* @use_mode_bit_clk: Boolean to indicate whether reacalculate dsi
* bitclk or use the existing bitclk(for dynamic clk case).
*/ */
int (*calculate_timing_params)(struct dsi_phy_hw *phy, int (*calculate_timing_params)(struct dsi_phy_hw *phy,
struct dsi_mode_info *mode, struct dsi_mode_info *mode,
struct dsi_host_common_cfg *config, struct dsi_host_common_cfg *config,
struct dsi_phy_per_lane_cfgs *timing); struct dsi_phy_per_lane_cfgs *timing,
bool use_mode_bit_clk);
/** /**
* phy_timing_val() - Gets PHY timing values. * phy_timing_val() - Gets PHY timing values.
@@ -265,12 +305,15 @@ struct dsi_phy_hw_ops {
void *timing_ops; void *timing_ops;
struct phy_ulps_config_ops ulps_ops; struct phy_ulps_config_ops ulps_ops;
struct phy_dyn_refresh_ops dyn_refresh_ops;
}; };
/** /**
* struct dsi_phy_hw - DSI phy hardware object specific to an instance * struct dsi_phy_hw - DSI phy hardware object specific to an instance
* @base: VA for the DSI PHY base address. * @base: VA for the DSI PHY base address.
* @length: Length of the DSI PHY register base map. * @length: Length of the DSI PHY register base map.
* @dyn_pll_base: VA for the DSI dynamic refresh base address.
* @length: Length of the DSI dynamic refresh register base map.
* @index: Instance ID of the controller. * @index: Instance ID of the controller.
* @version: DSI PHY version. * @version: DSI PHY version.
* @phy_clamp_base: Base address of phy clamp register map. * @phy_clamp_base: Base address of phy clamp register map.
@@ -280,6 +323,8 @@ struct dsi_phy_hw_ops {
struct dsi_phy_hw { struct dsi_phy_hw {
void __iomem *base; void __iomem *base;
u32 length; u32 length;
void __iomem *dyn_pll_base;
u32 dyn_refresh_len;
u32 index; u32 index;
enum dsi_phy_version version; enum dsi_phy_version version;

View File

@@ -58,6 +58,47 @@
#define DSIPHY_LNX_LPRX_CTRL(n) (0x228 + (0x80 * (n))) #define DSIPHY_LNX_LPRX_CTRL(n) (0x228 + (0x80 * (n)))
#define DSIPHY_LNX_TX_DCTRL(n) (0x22C + (0x80 * (n))) #define DSIPHY_LNX_TX_DCTRL(n) (0x22C + (0x80 * (n)))
/* dynamic refresh control registers */
#define DSI_DYN_REFRESH_CTRL (0x000)
#define DSI_DYN_REFRESH_PIPE_DELAY (0x004)
#define DSI_DYN_REFRESH_PIPE_DELAY2 (0x008)
#define DSI_DYN_REFRESH_PLL_DELAY (0x00C)
#define DSI_DYN_REFRESH_STATUS (0x010)
#define DSI_DYN_REFRESH_PLL_CTRL0 (0x014)
#define DSI_DYN_REFRESH_PLL_CTRL1 (0x018)
#define DSI_DYN_REFRESH_PLL_CTRL2 (0x01C)
#define DSI_DYN_REFRESH_PLL_CTRL3 (0x020)
#define DSI_DYN_REFRESH_PLL_CTRL4 (0x024)
#define DSI_DYN_REFRESH_PLL_CTRL5 (0x028)
#define DSI_DYN_REFRESH_PLL_CTRL6 (0x02C)
#define DSI_DYN_REFRESH_PLL_CTRL7 (0x030)
#define DSI_DYN_REFRESH_PLL_CTRL8 (0x034)
#define DSI_DYN_REFRESH_PLL_CTRL9 (0x038)
#define DSI_DYN_REFRESH_PLL_CTRL10 (0x03C)
#define DSI_DYN_REFRESH_PLL_CTRL11 (0x040)
#define DSI_DYN_REFRESH_PLL_CTRL12 (0x044)
#define DSI_DYN_REFRESH_PLL_CTRL13 (0x048)
#define DSI_DYN_REFRESH_PLL_CTRL14 (0x04C)
#define DSI_DYN_REFRESH_PLL_CTRL15 (0x050)
#define DSI_DYN_REFRESH_PLL_CTRL16 (0x054)
#define DSI_DYN_REFRESH_PLL_CTRL17 (0x058)
#define DSI_DYN_REFRESH_PLL_CTRL18 (0x05C)
#define DSI_DYN_REFRESH_PLL_CTRL19 (0x060)
#define DSI_DYN_REFRESH_PLL_CTRL20 (0x064)
#define DSI_DYN_REFRESH_PLL_CTRL21 (0x068)
#define DSI_DYN_REFRESH_PLL_CTRL22 (0x06C)
#define DSI_DYN_REFRESH_PLL_CTRL23 (0x070)
#define DSI_DYN_REFRESH_PLL_CTRL24 (0x074)
#define DSI_DYN_REFRESH_PLL_CTRL25 (0x078)
#define DSI_DYN_REFRESH_PLL_CTRL26 (0x07C)
#define DSI_DYN_REFRESH_PLL_CTRL27 (0x080)
#define DSI_DYN_REFRESH_PLL_CTRL28 (0x084)
#define DSI_DYN_REFRESH_PLL_CTRL29 (0x088)
#define DSI_DYN_REFRESH_PLL_CTRL30 (0x08C)
#define DSI_DYN_REFRESH_PLL_CTRL31 (0x090)
#define DSI_DYN_REFRESH_PLL_UPPER_ADDR (0x094)
#define DSI_DYN_REFRESH_PLL_UPPER_ADDR2 (0x098)
/** /**
* regulator_enable() - enable regulators for DSI PHY * regulator_enable() - enable regulators for DSI PHY
* @phy: Pointer to DSI PHY hardware object. * @phy: Pointer to DSI PHY hardware object.
@@ -462,3 +503,163 @@ int dsi_phy_hw_timing_val_v3_0(struct dsi_phy_per_lane_cfgs *timing_cfg,
timing_cfg->lane_v3[i] = timing_val[i]; timing_cfg->lane_v3[i] = timing_val[i];
return 0; return 0;
} }
void dsi_phy_hw_v3_0_dyn_refresh_config(struct dsi_phy_hw *phy,
struct dsi_phy_cfg *cfg, bool is_master)
{
u32 reg;
if (is_master) {
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL9,
DSIPHY_CMN_GLBL_CTRL, DSIPHY_CMN_VREG_CTRL,
0x10, 0x59);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL10,
DSIPHY_CMN_TIMING_CTRL_0, DSIPHY_CMN_TIMING_CTRL_1,
cfg->timing.lane_v3[0], cfg->timing.lane_v3[1]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL11,
DSIPHY_CMN_TIMING_CTRL_2, DSIPHY_CMN_TIMING_CTRL_3,
cfg->timing.lane_v3[2], cfg->timing.lane_v3[3]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL12,
DSIPHY_CMN_TIMING_CTRL_4, DSIPHY_CMN_TIMING_CTRL_5,
cfg->timing.lane_v3[4], cfg->timing.lane_v3[5]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL13,
DSIPHY_CMN_TIMING_CTRL_6, DSIPHY_CMN_TIMING_CTRL_7,
cfg->timing.lane_v3[6], cfg->timing.lane_v3[7]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL14,
DSIPHY_CMN_TIMING_CTRL_8, DSIPHY_CMN_TIMING_CTRL_9,
cfg->timing.lane_v3[8], cfg->timing.lane_v3[9]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL15,
DSIPHY_CMN_TIMING_CTRL_10, DSIPHY_CMN_TIMING_CTRL_11,
cfg->timing.lane_v3[10], cfg->timing.lane_v3[11]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL16,
DSIPHY_CMN_CTRL_0, DSIPHY_CMN_LANE_CTRL0,
0x7f, 0x1f);
} else {
reg = DSI_R32(phy, DSIPHY_CMN_CLK_CFG0);
reg &= ~BIT(5);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL0,
DSIPHY_CMN_CLK_CFG0, DSIPHY_CMN_PLL_CNTRL,
reg, 0x0);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL1,
DSIPHY_CMN_RBUF_CTRL, DSIPHY_CMN_GLBL_CTRL,
0x0, 0x10);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL2,
DSIPHY_CMN_VREG_CTRL, DSIPHY_CMN_TIMING_CTRL_0,
0x59, cfg->timing.lane_v3[0]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL3,
DSIPHY_CMN_TIMING_CTRL_1, DSIPHY_CMN_TIMING_CTRL_2,
cfg->timing.lane_v3[1], cfg->timing.lane_v3[2]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL4,
DSIPHY_CMN_TIMING_CTRL_3, DSIPHY_CMN_TIMING_CTRL_4,
cfg->timing.lane_v3[3], cfg->timing.lane_v3[4]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL5,
DSIPHY_CMN_TIMING_CTRL_5, DSIPHY_CMN_TIMING_CTRL_6,
cfg->timing.lane_v3[5], cfg->timing.lane_v3[6]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL6,
DSIPHY_CMN_TIMING_CTRL_7, DSIPHY_CMN_TIMING_CTRL_8,
cfg->timing.lane_v3[7], cfg->timing.lane_v3[8]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL7,
DSIPHY_CMN_TIMING_CTRL_9, DSIPHY_CMN_TIMING_CTRL_10,
cfg->timing.lane_v3[9], cfg->timing.lane_v3[10]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL8,
DSIPHY_CMN_TIMING_CTRL_11, DSIPHY_CMN_CTRL_0,
cfg->timing.lane_v3[11], 0x7f);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL9,
DSIPHY_CMN_LANE_CTRL0, DSIPHY_CMN_CTRL_2,
0x1f, 0x40);
/*
* fill with dummy register writes since controller will blindly
* send these values to DSI PHY.
*/
reg = DSI_DYN_REFRESH_PLL_CTRL11;
while (reg <= DSI_DYN_REFRESH_PLL_CTRL29) {
DSI_DYN_REF_REG_W(phy->dyn_pll_base, reg,
DSIPHY_CMN_LANE_CTRL0, DSIPHY_CMN_CTRL_0,
0x1f, 0x7f);
reg += 0x4;
}
DSI_GEN_W32(phy->dyn_pll_base,
DSI_DYN_REFRESH_PLL_UPPER_ADDR, 0);
DSI_GEN_W32(phy->dyn_pll_base,
DSI_DYN_REFRESH_PLL_UPPER_ADDR2, 0);
}
wmb(); /* make sure all registers are updated */
}
void dsi_phy_hw_v3_0_dyn_refresh_pipe_delay(struct dsi_phy_hw *phy,
struct dsi_dyn_clk_delay *delay)
{
if (!delay)
return;
DSI_GEN_W32(phy->dyn_pll_base, DSI_DYN_REFRESH_PIPE_DELAY,
delay->pipe_delay);
DSI_GEN_W32(phy->dyn_pll_base, DSI_DYN_REFRESH_PIPE_DELAY2,
delay->pipe_delay2);
DSI_GEN_W32(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_DELAY,
delay->pll_delay);
}
void dsi_phy_hw_v3_0_dyn_refresh_helper(struct dsi_phy_hw *phy, u32 offset)
{
u32 reg;
/*
* if no offset is mentioned then this means we want to clear
* the dynamic refresh ctrl register which is the last step
* of dynamic refresh sequence.
*/
if (!offset) {
reg = DSI_GEN_R32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL);
reg &= ~(BIT(0) | BIT(8));
DSI_GEN_W32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL, reg);
wmb(); /* ensure dynamic fps is cleared */
return;
}
if (offset & BIT(DYN_REFRESH_INTF_SEL)) {
reg = DSI_GEN_R32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL);
reg |= BIT(13);
DSI_GEN_W32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL, reg);
}
if (offset & BIT(DYN_REFRESH_SYNC_MODE)) {
reg = DSI_GEN_R32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL);
reg |= BIT(16);
DSI_GEN_W32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL, reg);
}
if (offset & BIT(DYN_REFRESH_SWI_CTRL)) {
reg = DSI_GEN_R32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL);
reg |= BIT(0);
DSI_GEN_W32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL, reg);
}
if (offset & BIT(DYN_REFRESH_SW_TRIGGER)) {
reg = DSI_GEN_R32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL);
reg |= BIT(8);
DSI_GEN_W32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL, reg);
wmb(); /* ensure dynamic fps is triggered */
}
}
int dsi_phy_hw_v3_0_cache_phy_timings(struct dsi_phy_per_lane_cfgs *timings,
u32 *dst, u32 size)
{
int i;
if (!timings || !dst || !size)
return -EINVAL;
if (size != DSI_PHY_TIMING_V3_SIZE) {
pr_err("size mis-match\n");
return -EINVAL;
}
for (i = 0; i < size; i++)
dst[i] = timings->lane_v3[i];
return 0;
}

View File

@@ -62,7 +62,6 @@
#define DSIPHY_CMN_LANE_STATUS0 0x148 #define DSIPHY_CMN_LANE_STATUS0 0x148
#define DSIPHY_CMN_LANE_STATUS1 0x14C #define DSIPHY_CMN_LANE_STATUS1 0x14C
/* n = 0..3 for data lanes and n = 4 for clock lane */ /* n = 0..3 for data lanes and n = 4 for clock lane */
#define DSIPHY_LNX_CFG0(n) (0x200 + (0x80 * (n))) #define DSIPHY_LNX_CFG0(n) (0x200 + (0x80 * (n)))
#define DSIPHY_LNX_CFG1(n) (0x204 + (0x80 * (n))) #define DSIPHY_LNX_CFG1(n) (0x204 + (0x80 * (n)))
@@ -72,6 +71,47 @@
#define DSIPHY_LNX_LPRX_CTRL(n) (0x214 + (0x80 * (n))) #define DSIPHY_LNX_LPRX_CTRL(n) (0x214 + (0x80 * (n)))
#define DSIPHY_LNX_TX_DCTRL(n) (0x218 + (0x80 * (n))) #define DSIPHY_LNX_TX_DCTRL(n) (0x218 + (0x80 * (n)))
/* dynamic refresh control registers */
#define DSI_DYN_REFRESH_CTRL (0x000)
#define DSI_DYN_REFRESH_PIPE_DELAY (0x004)
#define DSI_DYN_REFRESH_PIPE_DELAY2 (0x008)
#define DSI_DYN_REFRESH_PLL_DELAY (0x00C)
#define DSI_DYN_REFRESH_STATUS (0x010)
#define DSI_DYN_REFRESH_PLL_CTRL0 (0x014)
#define DSI_DYN_REFRESH_PLL_CTRL1 (0x018)
#define DSI_DYN_REFRESH_PLL_CTRL2 (0x01C)
#define DSI_DYN_REFRESH_PLL_CTRL3 (0x020)
#define DSI_DYN_REFRESH_PLL_CTRL4 (0x024)
#define DSI_DYN_REFRESH_PLL_CTRL5 (0x028)
#define DSI_DYN_REFRESH_PLL_CTRL6 (0x02C)
#define DSI_DYN_REFRESH_PLL_CTRL7 (0x030)
#define DSI_DYN_REFRESH_PLL_CTRL8 (0x034)
#define DSI_DYN_REFRESH_PLL_CTRL9 (0x038)
#define DSI_DYN_REFRESH_PLL_CTRL10 (0x03C)
#define DSI_DYN_REFRESH_PLL_CTRL11 (0x040)
#define DSI_DYN_REFRESH_PLL_CTRL12 (0x044)
#define DSI_DYN_REFRESH_PLL_CTRL13 (0x048)
#define DSI_DYN_REFRESH_PLL_CTRL14 (0x04C)
#define DSI_DYN_REFRESH_PLL_CTRL15 (0x050)
#define DSI_DYN_REFRESH_PLL_CTRL16 (0x054)
#define DSI_DYN_REFRESH_PLL_CTRL17 (0x058)
#define DSI_DYN_REFRESH_PLL_CTRL18 (0x05C)
#define DSI_DYN_REFRESH_PLL_CTRL19 (0x060)
#define DSI_DYN_REFRESH_PLL_CTRL20 (0x064)
#define DSI_DYN_REFRESH_PLL_CTRL21 (0x068)
#define DSI_DYN_REFRESH_PLL_CTRL22 (0x06C)
#define DSI_DYN_REFRESH_PLL_CTRL23 (0x070)
#define DSI_DYN_REFRESH_PLL_CTRL24 (0x074)
#define DSI_DYN_REFRESH_PLL_CTRL25 (0x078)
#define DSI_DYN_REFRESH_PLL_CTRL26 (0x07C)
#define DSI_DYN_REFRESH_PLL_CTRL27 (0x080)
#define DSI_DYN_REFRESH_PLL_CTRL28 (0x084)
#define DSI_DYN_REFRESH_PLL_CTRL29 (0x088)
#define DSI_DYN_REFRESH_PLL_CTRL30 (0x08C)
#define DSI_DYN_REFRESH_PLL_CTRL31 (0x090)
#define DSI_DYN_REFRESH_PLL_UPPER_ADDR (0x094)
#define DSI_DYN_REFRESH_PLL_UPPER_ADDR2 (0x098)
static int dsi_phy_hw_v4_0_is_pll_on(struct dsi_phy_hw *phy) static int dsi_phy_hw_v4_0_is_pll_on(struct dsi_phy_hw *phy)
{ {
u32 data = 0; u32 data = 0;
@@ -481,3 +521,165 @@ int dsi_phy_hw_timing_val_v4_0(struct dsi_phy_per_lane_cfgs *timing_cfg,
timing_cfg->lane_v4[i] = timing_val[i]; timing_cfg->lane_v4[i] = timing_val[i];
return 0; return 0;
} }
void dsi_phy_hw_v4_0_dyn_refresh_config(struct dsi_phy_hw *phy,
struct dsi_phy_cfg *cfg, bool is_master)
{
u32 reg;
if (is_master) {
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL19,
DSIPHY_CMN_TIMING_CTRL_0, DSIPHY_CMN_TIMING_CTRL_1,
cfg->timing.lane_v4[0], cfg->timing.lane_v4[1]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL20,
DSIPHY_CMN_TIMING_CTRL_2, DSIPHY_CMN_TIMING_CTRL_3,
cfg->timing.lane_v4[2], cfg->timing.lane_v4[3]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL21,
DSIPHY_CMN_TIMING_CTRL_4, DSIPHY_CMN_TIMING_CTRL_5,
cfg->timing.lane_v4[4], cfg->timing.lane_v4[5]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL22,
DSIPHY_CMN_TIMING_CTRL_6, DSIPHY_CMN_TIMING_CTRL_7,
cfg->timing.lane_v4[6], cfg->timing.lane_v4[7]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL23,
DSIPHY_CMN_TIMING_CTRL_8, DSIPHY_CMN_TIMING_CTRL_9,
cfg->timing.lane_v4[8], cfg->timing.lane_v4[9]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL24,
DSIPHY_CMN_TIMING_CTRL_10, DSIPHY_CMN_TIMING_CTRL_11,
cfg->timing.lane_v4[10], cfg->timing.lane_v4[11]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL25,
DSIPHY_CMN_TIMING_CTRL_12, DSIPHY_CMN_TIMING_CTRL_13,
cfg->timing.lane_v4[12], cfg->timing.lane_v4[13]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL26,
DSIPHY_CMN_CTRL_0, DSIPHY_CMN_LANE_CTRL0,
0x7f, 0x1f);
} else {
reg = DSI_R32(phy, DSIPHY_CMN_CLK_CFG1);
reg &= ~BIT(5);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL0,
DSIPHY_CMN_CLK_CFG1, DSIPHY_CMN_PLL_CNTRL,
reg, 0x0);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL1,
DSIPHY_CMN_RBUF_CTRL, DSIPHY_CMN_TIMING_CTRL_0,
0x0, cfg->timing.lane_v4[0]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL2,
DSIPHY_CMN_TIMING_CTRL_1, DSIPHY_CMN_TIMING_CTRL_2,
cfg->timing.lane_v4[1], cfg->timing.lane_v4[2]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL3,
DSIPHY_CMN_TIMING_CTRL_3, DSIPHY_CMN_TIMING_CTRL_4,
cfg->timing.lane_v4[3], cfg->timing.lane_v4[4]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL4,
DSIPHY_CMN_TIMING_CTRL_5, DSIPHY_CMN_TIMING_CTRL_6,
cfg->timing.lane_v4[5], cfg->timing.lane_v4[6]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL5,
DSIPHY_CMN_TIMING_CTRL_7, DSIPHY_CMN_TIMING_CTRL_8,
cfg->timing.lane_v4[7], cfg->timing.lane_v4[8]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL6,
DSIPHY_CMN_TIMING_CTRL_9, DSIPHY_CMN_TIMING_CTRL_10,
cfg->timing.lane_v4[9], cfg->timing.lane_v4[10]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL7,
DSIPHY_CMN_TIMING_CTRL_11, DSIPHY_CMN_TIMING_CTRL_12,
cfg->timing.lane_v4[11], cfg->timing.lane_v4[12]);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL8,
DSIPHY_CMN_TIMING_CTRL_13, DSIPHY_CMN_CTRL_0,
cfg->timing.lane_v4[13], 0x7f);
DSI_DYN_REF_REG_W(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_CTRL9,
DSIPHY_CMN_LANE_CTRL0, DSIPHY_CMN_CTRL_2,
0x1f, 0x40);
/*
* fill with dummy register writes since controller will blindly
* send these values to DSI PHY.
*/
reg = DSI_DYN_REFRESH_PLL_CTRL11;
while (reg <= DSI_DYN_REFRESH_PLL_CTRL29) {
DSI_DYN_REF_REG_W(phy->dyn_pll_base, reg,
DSIPHY_CMN_LANE_CTRL0, DSIPHY_CMN_CTRL_0,
0x1f, 0x7f);
reg += 0x4;
}
DSI_GEN_W32(phy->dyn_pll_base,
DSI_DYN_REFRESH_PLL_UPPER_ADDR, 0);
DSI_GEN_W32(phy->dyn_pll_base,
DSI_DYN_REFRESH_PLL_UPPER_ADDR2, 0);
}
wmb(); /* make sure all registers are updated */
}
void dsi_phy_hw_v4_0_dyn_refresh_pipe_delay(struct dsi_phy_hw *phy,
struct dsi_dyn_clk_delay *delay)
{
if (!delay)
return;
DSI_GEN_W32(phy->dyn_pll_base, DSI_DYN_REFRESH_PIPE_DELAY,
delay->pipe_delay);
DSI_GEN_W32(phy->dyn_pll_base, DSI_DYN_REFRESH_PIPE_DELAY2,
delay->pipe_delay2);
DSI_GEN_W32(phy->dyn_pll_base, DSI_DYN_REFRESH_PLL_DELAY,
delay->pll_delay);
}
void dsi_phy_hw_v4_0_dyn_refresh_helper(struct dsi_phy_hw *phy, u32 offset)
{
u32 reg;
/*
* if no offset is mentioned then this means we want to clear
* the dynamic refresh ctrl register which is the last step
* of dynamic refresh sequence.
*/
if (!offset) {
reg = DSI_GEN_R32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL);
reg &= ~(BIT(0) | BIT(8));
DSI_GEN_W32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL, reg);
wmb(); /* ensure dynamic fps is cleared */
return;
}
if (offset & BIT(DYN_REFRESH_INTF_SEL)) {
reg = DSI_GEN_R32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL);
reg |= BIT(13);
DSI_GEN_W32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL, reg);
}
if (offset & BIT(DYN_REFRESH_SYNC_MODE)) {
reg = DSI_GEN_R32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL);
reg |= BIT(16);
DSI_GEN_W32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL, reg);
}
if (offset & BIT(DYN_REFRESH_SWI_CTRL)) {
reg = DSI_GEN_R32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL);
reg |= BIT(0);
DSI_GEN_W32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL, reg);
}
if (offset & BIT(DYN_REFRESH_SW_TRIGGER)) {
reg = DSI_GEN_R32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL);
reg |= BIT(8);
DSI_GEN_W32(phy->dyn_pll_base, DSI_DYN_REFRESH_CTRL, reg);
wmb(); /* ensure dynamic fps is triggered */
}
}
int dsi_phy_hw_v4_0_cache_phy_timings(struct dsi_phy_per_lane_cfgs *timings,
u32 *dst, u32 size)
{
int i;
if (!timings || !dst || !size)
return -EINVAL;
if (size != DSI_PHY_TIMING_V4_SIZE) {
pr_err("size mis-match\n");
return -EINVAL;
}
for (i = 0; i < size; i++)
dst[i] = timings->lane_v4[i];
return 0;
}

View File

@@ -630,11 +630,14 @@ error:
* @mode: Mode information for which timing has to be calculated. * @mode: Mode information for which timing has to be calculated.
* @config: DSI host configuration for this mode. * @config: DSI host configuration for this mode.
* @timing: Timing parameters for each lane which will be returned. * @timing: Timing parameters for each lane which will be returned.
* @use_mode_bit_clk: Boolean to indicate whether reacalculate dsi
* bit clk or use the existing bit clk(for dynamic clk case).
*/ */
int dsi_phy_hw_calculate_timing_params(struct dsi_phy_hw *phy, int dsi_phy_hw_calculate_timing_params(struct dsi_phy_hw *phy,
struct dsi_mode_info *mode, struct dsi_mode_info *mode,
struct dsi_host_common_cfg *host, struct dsi_host_common_cfg *host,
struct dsi_phy_per_lane_cfgs *timing) struct dsi_phy_per_lane_cfgs *timing,
bool use_mode_bit_clk)
{ {
/* constants */ /* constants */
u32 const esc_clk_mhz = 192; /* TODO: esc clock is hardcoded */ u32 const esc_clk_mhz = 192; /* TODO: esc clock is hardcoded */
@@ -677,6 +680,9 @@ int dsi_phy_hw_calculate_timing_params(struct dsi_phy_hw *phy,
num_of_lanes++; num_of_lanes++;
if (use_mode_bit_clk)
x = mode->clk_rate_hz;
else
x = mult_frac(v_total * h_total, inter_num, num_of_lanes); x = mult_frac(v_total * h_total, inter_num, num_of_lanes);
y = rounddown(x, 1); y = rounddown(x, 1);

View File

@@ -87,7 +87,8 @@ static inline bool _msm_seamless_for_crtc(struct drm_atomic_state *state,
if (msm_is_mode_seamless(&crtc_state->mode) || if (msm_is_mode_seamless(&crtc_state->mode) ||
msm_is_mode_seamless_vrr(&crtc_state->adjusted_mode) || msm_is_mode_seamless_vrr(&crtc_state->adjusted_mode) ||
msm_is_mode_seamless_poms(&crtc_state->adjusted_mode)) msm_is_mode_seamless_poms(&crtc_state->adjusted_mode) ||
msm_is_mode_seamless_dyn_clk(&crtc_state->adjusted_mode))
return true; return true;
if (msm_is_mode_seamless_dms(&crtc_state->adjusted_mode) && !enable) if (msm_is_mode_seamless_dms(&crtc_state->adjusted_mode) && !enable)
@@ -132,6 +133,10 @@ static inline bool _msm_seamless_for_conn(struct drm_connector *connector,
&connector->encoder->crtc->state->adjusted_mode)) &connector->encoder->crtc->state->adjusted_mode))
return true; return true;
if (msm_is_mode_seamless_dyn_clk(
&connector->encoder->crtc->state->adjusted_mode))
return true;
if (msm_is_mode_seamless_dms( if (msm_is_mode_seamless_dms(
&connector->encoder->crtc->state->adjusted_mode)) &connector->encoder->crtc->state->adjusted_mode))
return true; return true;

View File

@@ -40,6 +40,8 @@
#define MSM_MODE_FLAG_SEAMLESS_VRR (1<<3) #define MSM_MODE_FLAG_SEAMLESS_VRR (1<<3)
/* Request to switch the panel mode */ /* Request to switch the panel mode */
#define MSM_MODE_FLAG_SEAMLESS_POMS (1<<4) #define MSM_MODE_FLAG_SEAMLESS_POMS (1<<4)
/* Request to switch the bit clk */
#define MSM_MODE_FLAG_SEAMLESS_DYN_CLK (1<<5)
/* As there are different display controller blocks depending on the /* As there are different display controller blocks depending on the
* snapdragon version, the kms support is split out and the appropriate * snapdragon version, the kms support is split out and the appropriate
@@ -221,6 +223,13 @@ static inline bool msm_is_mode_seamless_poms(
: false; : false;
} }
static inline bool msm_is_mode_seamless_dyn_clk(
const struct drm_display_mode *mode)
{
return mode ? (mode->private_flags & MSM_MODE_FLAG_SEAMLESS_DYN_CLK)
: false;
}
static inline bool msm_needs_vblank_pre_modeset( static inline bool msm_needs_vblank_pre_modeset(
const struct drm_display_mode *mode) const struct drm_display_mode *mode)
{ {

View File

@@ -2092,6 +2092,9 @@ static int sde_connector_populate_mode_info(struct drm_connector *conn,
sde_kms_info_add_keystr(info, "mode_name", mode->name); sde_kms_info_add_keystr(info, "mode_name", mode->name);
sde_kms_info_add_keyint(info, "bit_clk_rate",
mode_info.clk_rate);
topology_idx = (int)sde_rm_get_topology_name( topology_idx = (int)sde_rm_get_topology_name(
mode_info.topology); mode_info.topology);
if (topology_idx < SDE_RM_TOPOLOGY_MAX) { if (topology_idx < SDE_RM_TOPOLOGY_MAX) {

View File

@@ -430,7 +430,8 @@ static bool sde_crtc_mode_fixup(struct drm_crtc *crtc,
SDE_DEBUG("\n"); SDE_DEBUG("\n");
if ((msm_is_mode_seamless(adjusted_mode) || if ((msm_is_mode_seamless(adjusted_mode) ||
msm_is_mode_seamless_vrr(adjusted_mode)) && (msm_is_mode_seamless_vrr(adjusted_mode) ||
msm_is_mode_seamless_dyn_clk(adjusted_mode))) &&
(!crtc->enabled)) { (!crtc->enabled)) {
SDE_ERROR("crtc state prevents seamless transition\n"); SDE_ERROR("crtc state prevents seamless transition\n");
return false; return false;
@@ -4056,7 +4057,9 @@ static void sde_crtc_enable(struct drm_crtc *crtc,
/* return early if crtc is already enabled, do this after UIDLE check */ /* return early if crtc is already enabled, do this after UIDLE check */
if (sde_crtc->enabled) { if (sde_crtc->enabled) {
if (msm_is_mode_seamless_dms(&crtc->state->adjusted_mode)) if (msm_is_mode_seamless_dms(&crtc->state->adjusted_mode) ||
msm_is_mode_seamless_dyn_clk(&crtc->state->adjusted_mode))
SDE_DEBUG("%s extra crtc enable expected during DMS\n", SDE_DEBUG("%s extra crtc enable expected during DMS\n",
sde_crtc->name); sde_crtc->name);
else else

View File

@@ -2751,7 +2751,7 @@ static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
struct sde_rm_hw_iter dsc_iter, pp_iter, qdss_iter; struct sde_rm_hw_iter dsc_iter, pp_iter, qdss_iter;
struct sde_rm_hw_request request_hw; struct sde_rm_hw_request request_hw;
enum sde_intf_mode intf_mode; enum sde_intf_mode intf_mode;
bool is_cmd_mode = false;
int i = 0, ret; int i = 0, ret;
if (!drm_enc) { if (!drm_enc) {
@@ -2767,6 +2767,9 @@ static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
sde_enc = to_sde_encoder_virt(drm_enc); sde_enc = to_sde_encoder_virt(drm_enc);
SDE_DEBUG_ENC(sde_enc, "\n"); SDE_DEBUG_ENC(sde_enc, "\n");
if (sde_encoder_check_curr_mode(drm_enc, MSM_DISPLAY_CMD_MODE))
is_cmd_mode = true;
priv = drm_enc->dev->dev_private; priv = drm_enc->dev->dev_private;
sde_kms = to_sde_kms(priv->kms); sde_kms = to_sde_kms(priv->kms);
connector_list = &sde_kms->dev->mode_config.connector_list; connector_list = &sde_kms->dev->mode_config.connector_list;
@@ -2815,7 +2818,9 @@ static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
sde_encoder_virt_mode_switch(intf_mode, sde_enc, adj_mode); sde_encoder_virt_mode_switch(intf_mode, sde_enc, adj_mode);
/* release resources before seamless mode change */ /* release resources before seamless mode change */
if (msm_is_mode_seamless_dms(adj_mode)) { if (msm_is_mode_seamless_dms(adj_mode) ||
(msm_is_mode_seamless_dyn_clk(adj_mode) &&
is_cmd_mode)) {
/* restore resource state before releasing them */ /* restore resource state before releasing them */
ret = sde_encoder_resource_control(drm_enc, ret = sde_encoder_resource_control(drm_enc,
SDE_ENC_RC_EVENT_PRE_MODESET); SDE_ENC_RC_EVENT_PRE_MODESET);
@@ -2905,7 +2910,9 @@ static void sde_encoder_virt_mode_set(struct drm_encoder *drm_enc,
} }
/* update resources after seamless mode change */ /* update resources after seamless mode change */
if (msm_is_mode_seamless_dms(adj_mode)) if (msm_is_mode_seamless_dms(adj_mode) ||
(msm_is_mode_seamless_dyn_clk(adj_mode) &&
is_cmd_mode))
sde_encoder_resource_control(&sde_enc->base, sde_encoder_resource_control(&sde_enc->base,
SDE_ENC_RC_EVENT_POST_MODESET); SDE_ENC_RC_EVENT_POST_MODESET);
} }
@@ -3190,7 +3197,8 @@ static void sde_encoder_virt_enable(struct drm_encoder *drm_enc)
} }
/* register input handler if not already registered */ /* register input handler if not already registered */
if (sde_enc->input_handler && !msm_is_mode_seamless_dms(cur_mode)) { if (sde_enc->input_handler && !msm_is_mode_seamless_dms(cur_mode) &&
!msm_is_mode_seamless_dyn_clk(cur_mode)) {
ret = _sde_encoder_input_handler_register( ret = _sde_encoder_input_handler_register(
sde_enc->input_handler); sde_enc->input_handler);
if (ret) if (ret)
@@ -3199,7 +3207,8 @@ static void sde_encoder_virt_enable(struct drm_encoder *drm_enc)
} }
if (!(msm_is_mode_seamless_vrr(cur_mode) if (!(msm_is_mode_seamless_vrr(cur_mode)
|| msm_is_mode_seamless_dms(cur_mode))) || msm_is_mode_seamless_dms(cur_mode)
|| msm_is_mode_seamless_dyn_clk(cur_mode)))
kthread_init_delayed_work(&sde_enc->delayed_off_work, kthread_init_delayed_work(&sde_enc->delayed_off_work,
sde_encoder_off_work); sde_encoder_off_work);
@@ -3235,7 +3244,8 @@ static void sde_encoder_virt_enable(struct drm_encoder *drm_enc)
* already. Invoke restore to reconfigure the * already. Invoke restore to reconfigure the
* new mode. * new mode.
*/ */
if (msm_is_mode_seamless_dms(cur_mode) && if ((msm_is_mode_seamless_dms(cur_mode) ||
msm_is_mode_seamless_dyn_clk(cur_mode)) &&
phys->ops.restore) phys->ops.restore)
phys->ops.restore(phys); phys->ops.restore(phys);
else if (phys->ops.enable) else if (phys->ops.enable)
@@ -3248,7 +3258,8 @@ static void sde_encoder_virt_enable(struct drm_encoder *drm_enc)
sde_enc->misr_frame_count); sde_enc->misr_frame_count);
} }
if (msm_is_mode_seamless_dms(cur_mode) && if ((msm_is_mode_seamless_dms(cur_mode) ||
msm_is_mode_seamless_dyn_clk(cur_mode)) &&
sde_enc->cur_master->ops.restore) sde_enc->cur_master->ops.restore)
sde_enc->cur_master->ops.restore(sde_enc->cur_master); sde_enc->cur_master->ops.restore(sde_enc->cur_master);
else if (sde_enc->cur_master->ops.enable) else if (sde_enc->cur_master->ops.enable)

View File

@@ -87,6 +87,9 @@
#define PLL_PLL_INT_GAIN_IFILT_BAND_1 0x15c #define PLL_PLL_INT_GAIN_IFILT_BAND_1 0x15c
#define PLL_PLL_FL_INT_GAIN_PFILT_BAND_1 0x164 #define PLL_PLL_FL_INT_GAIN_PFILT_BAND_1 0x164
#define PLL_FASTLOCK_EN_BAND 0x16c #define PLL_FASTLOCK_EN_BAND 0x16c
#define PLL_FREQ_TUNE_ACCUM_INIT_LOW 0x170
#define PLL_FREQ_TUNE_ACCUM_INIT_MID 0x174
#define PLL_FREQ_TUNE_ACCUM_INIT_HIGH 0x178
#define PLL_FREQ_TUNE_ACCUM_INIT_MUX 0x17c #define PLL_FREQ_TUNE_ACCUM_INIT_MUX 0x17c
#define PLL_PLL_LOCK_OVERRIDE 0x180 #define PLL_PLL_LOCK_OVERRIDE 0x180
#define PLL_PLL_LOCK_DELAY 0x184 #define PLL_PLL_LOCK_DELAY 0x184
@@ -104,6 +107,7 @@
#define PHY_CMN_RBUF_CTRL 0x01c #define PHY_CMN_RBUF_CTRL 0x01c
#define PHY_CMN_PLL_CNTRL 0x038 #define PHY_CMN_PLL_CNTRL 0x038
#define PHY_CMN_CTRL_0 0x024 #define PHY_CMN_CTRL_0 0x024
#define PHY_CMN_CTRL_2 0x02c
/* Bit definition of SSC control registers */ /* Bit definition of SSC control registers */
#define SSC_CENTER BIT(0) #define SSC_CENTER BIT(0)
@@ -115,6 +119,43 @@
#define SSC_START BIT(6) #define SSC_START BIT(6)
#define SSC_START_MUX BIT(7) #define SSC_START_MUX BIT(7)
/* Dynamic Refresh Control Registers */
#define DSI_DYNAMIC_REFRESH_PLL_CTRL0 (0x014)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL1 (0x018)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL2 (0x01C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL3 (0x020)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL4 (0x024)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL5 (0x028)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL6 (0x02C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL7 (0x030)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL8 (0x034)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL9 (0x038)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL10 (0x03C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL11 (0x040)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL12 (0x044)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL13 (0x048)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL14 (0x04C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL15 (0x050)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL16 (0x054)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL17 (0x058)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL18 (0x05C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL19 (0x060)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL20 (0x064)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL21 (0x068)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL22 (0x06C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL23 (0x070)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL24 (0x074)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL25 (0x078)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL26 (0x07C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL27 (0x080)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL28 (0x084)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL29 (0x088)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL30 (0x08C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL31 (0x090)
#define DSI_DYNAMIC_REFRESH_PLL_UPPER_ADDR (0x094)
#define DSI_DYNAMIC_REFRESH_PLL_UPPER_ADDR2 (0x098)
#define DSI_PHY_TO_PLL_OFFSET (0x600)
enum { enum {
DSI_PLL_0, DSI_PLL_0,
DSI_PLL_1, DSI_PLL_1,
@@ -621,6 +662,7 @@ static int vco_10nm_set_rate(struct clk_hw *hw, unsigned long rate,
rsc->vco_current_rate = rate; rsc->vco_current_rate = rate;
rsc->vco_ref_clk_rate = vco->ref_clk_rate; rsc->vco_ref_clk_rate = vco->ref_clk_rate;
rsc->dfps_trigger = false;
rc = mdss_pll_resource_enable(rsc, true); rc = mdss_pll_resource_enable(rsc, true);
if (rc) { if (rc) {
@@ -651,6 +693,237 @@ static int vco_10nm_set_rate(struct clk_hw *hw, unsigned long rate,
return 0; return 0;
} }
static int dsi_pll_read_stored_trim_codes(struct mdss_pll_resources *pll_res,
unsigned long vco_clk_rate)
{
int i;
bool found = false;
if (!pll_res->dfps)
return -EINVAL;
for (i = 0; i < pll_res->dfps->vco_rate_cnt; i++) {
struct dfps_codes_info *codes_info =
&pll_res->dfps->codes_dfps[i];
pr_debug("valid=%d vco_rate=%d, code %d %d %d\n",
codes_info->is_valid, codes_info->clk_rate,
codes_info->pll_codes.pll_codes_1,
codes_info->pll_codes.pll_codes_2,
codes_info->pll_codes.pll_codes_3);
if (vco_clk_rate != codes_info->clk_rate &&
codes_info->is_valid)
continue;
pll_res->cache_pll_trim_codes[0] =
codes_info->pll_codes.pll_codes_1;
pll_res->cache_pll_trim_codes[1] =
codes_info->pll_codes.pll_codes_2;
pll_res->cache_pll_trim_codes[2] =
codes_info->pll_codes.pll_codes_3;
found = true;
break;
}
if (!found)
return -EINVAL;
pr_debug("trim_code_0=0x%x trim_code_1=0x%x trim_code_2=0x%x\n",
pll_res->cache_pll_trim_codes[0],
pll_res->cache_pll_trim_codes[1],
pll_res->cache_pll_trim_codes[2]);
return 0;
}
static void shadow_dsi_pll_dynamic_refresh_10nm(struct dsi_pll_10nm *pll,
struct mdss_pll_resources *rsc)
{
u32 data;
u32 offset = DSI_PHY_TO_PLL_OFFSET;
u32 upper_addr = 0;
struct dsi_pll_regs *reg = &pll->reg_setup;
data = MDSS_PLL_REG_R(rsc->phy_base, PHY_CMN_CLK_CFG1);
data &= ~BIT(5);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL0,
PHY_CMN_CLK_CFG1, PHY_CMN_PLL_CNTRL, data, 0);
upper_addr |= (upper_8_bit(PHY_CMN_CLK_CFG1) << 0);
upper_addr |= (upper_8_bit(PHY_CMN_PLL_CNTRL) << 1);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL1,
PHY_CMN_RBUF_CTRL,
(PLL_DECIMAL_DIV_START_1 + offset),
0, reg->decimal_div_start);
upper_addr |= (upper_8_bit(PHY_CMN_RBUF_CTRL) << 2);
upper_addr |= (upper_8_bit(PLL_DECIMAL_DIV_START_1 + offset) << 3);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL2,
(PLL_FRAC_DIV_START_LOW_1 + offset),
(PLL_FRAC_DIV_START_MID_1 + offset),
reg->frac_div_start_low, reg->frac_div_start_mid);
upper_addr |= (upper_8_bit(PLL_FRAC_DIV_START_LOW_1 + offset) << 4);
upper_addr |= (upper_8_bit(PLL_FRAC_DIV_START_MID_1 + offset) << 5);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL3,
(PLL_FRAC_DIV_START_HIGH_1 + offset),
(PLL_PLL_PROP_GAIN_RATE_1 + offset),
reg->frac_div_start_high, reg->pll_prop_gain_rate);
upper_addr |= (upper_8_bit(PLL_FRAC_DIV_START_HIGH_1 + offset) << 6);
upper_addr |= (upper_8_bit(PLL_PLL_PROP_GAIN_RATE_1 + offset) << 7);
data = MDSS_PLL_REG_R(rsc->pll_base, PLL_PLL_OUTDIV_RATE) & 0x03;
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL4,
(PLL_PLL_OUTDIV_RATE + offset),
(PLL_FREQ_TUNE_ACCUM_INIT_LOW + offset),
data, 0);
upper_addr |= (upper_8_bit(PLL_PLL_OUTDIV_RATE + offset) << 8);
upper_addr |= (upper_8_bit(PLL_FREQ_TUNE_ACCUM_INIT_LOW + offset) << 9);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL5,
(PLL_FREQ_TUNE_ACCUM_INIT_MID + offset),
(PLL_FREQ_TUNE_ACCUM_INIT_HIGH + offset),
rsc->cache_pll_trim_codes[1],
rsc->cache_pll_trim_codes[0]);
upper_addr |=
(upper_8_bit(PLL_FREQ_TUNE_ACCUM_INIT_MID + offset) << 10);
upper_addr |=
(upper_8_bit(PLL_FREQ_TUNE_ACCUM_INIT_HIGH + offset) << 11);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL6,
(PLL_FREQ_TUNE_ACCUM_INIT_MUX + offset),
(PLL_PLL_BAND_SET_RATE_1 + offset),
0x07, rsc->cache_pll_trim_codes[2]);
upper_addr |=
(upper_8_bit(PLL_FREQ_TUNE_ACCUM_INIT_MUX + offset) << 12);
upper_addr |= (upper_8_bit(PLL_PLL_BAND_SET_RATE_1 + offset) << 13);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL7,
(PLL_CALIBRATION_SETTINGS + offset),
(PLL_BAND_SEL_CAL_SETTINGS + offset), 0x44, 0x3a);
upper_addr |= (upper_8_bit(PLL_CALIBRATION_SETTINGS + offset) << 14);
upper_addr |= (upper_8_bit(PLL_BAND_SEL_CAL_SETTINGS + offset) << 15);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL8,
(PLL_PLL_LOCKDET_RATE_1 + offset),
(PLL_PLL_LOCK_DELAY + offset), 0x10, 0x06);
upper_addr |= (upper_8_bit(PLL_PLL_LOCKDET_RATE_1 + offset) << 16);
upper_addr |= (upper_8_bit(PLL_PLL_LOCK_DELAY + offset) << 17);
data = MDSS_PLL_REG_R(rsc->phy_base, PHY_CMN_CLK_CFG0);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL17,
PHY_CMN_CTRL_2, PHY_CMN_CLK_CFG0, 0x40, data);
if (rsc->slave)
MDSS_DYN_PLL_REG_W(rsc->slave->dyn_pll_base,
DSI_DYNAMIC_REFRESH_PLL_CTRL10,
PHY_CMN_CLK_CFG0, PHY_CMN_CTRL_0,
data, 0x7f);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL18,
PHY_CMN_PLL_CNTRL, PHY_CMN_PLL_CNTRL, 0x01, 0x01);
/* Dummy register writes */
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL19,
PHY_CMN_PLL_CNTRL, PHY_CMN_PLL_CNTRL, 0x01, 0x01);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL20,
PHY_CMN_PLL_CNTRL, PHY_CMN_PLL_CNTRL, 0x01, 0x01);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL21,
PHY_CMN_PLL_CNTRL, PHY_CMN_PLL_CNTRL, 0x01, 0x01);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL22,
PHY_CMN_PLL_CNTRL, PHY_CMN_PLL_CNTRL, 0x01, 0x01);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL23,
PHY_CMN_PLL_CNTRL, PHY_CMN_PLL_CNTRL, 0x01, 0x01);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL24,
PHY_CMN_PLL_CNTRL, PHY_CMN_PLL_CNTRL, 0x01, 0x01);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL25,
PHY_CMN_PLL_CNTRL, PHY_CMN_PLL_CNTRL, 0x01, 0x01);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL26,
PHY_CMN_PLL_CNTRL, PHY_CMN_PLL_CNTRL, 0x01, 0x01);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL27,
PHY_CMN_PLL_CNTRL, PHY_CMN_PLL_CNTRL, 0x01, 0x01);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL28,
PHY_CMN_PLL_CNTRL, PHY_CMN_PLL_CNTRL, 0x01, 0x01);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL29,
PHY_CMN_PLL_CNTRL, PHY_CMN_PLL_CNTRL, 0x01, 0x01);
/* Registers to configure after PLL enable delay */
data = MDSS_PLL_REG_R(rsc->phy_base, PHY_CMN_CLK_CFG1) | BIT(5);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL30,
PHY_CMN_CLK_CFG1, PHY_CMN_RBUF_CTRL, data, 0x01);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL31,
PHY_CMN_CLK_CFG1, PHY_CMN_CLK_CFG1, data, data);
if (rsc->slave) {
data = MDSS_PLL_REG_R(rsc->slave->phy_base, PHY_CMN_CLK_CFG1) |
BIT(5);
MDSS_DYN_PLL_REG_W(rsc->slave->dyn_pll_base,
DSI_DYNAMIC_REFRESH_PLL_CTRL30,
PHY_CMN_CLK_CFG1, PHY_CMN_RBUF_CTRL,
data, 0x01);
MDSS_DYN_PLL_REG_W(rsc->slave->dyn_pll_base,
DSI_DYNAMIC_REFRESH_PLL_CTRL31,
PHY_CMN_CLK_CFG1, PHY_CMN_CLK_CFG1,
data, data);
}
MDSS_PLL_REG_W(rsc->dyn_pll_base,
DSI_DYNAMIC_REFRESH_PLL_UPPER_ADDR, upper_addr);
MDSS_PLL_REG_W(rsc->dyn_pll_base,
DSI_DYNAMIC_REFRESH_PLL_UPPER_ADDR2, 0);
wmb(); /* commit register writes */
}
static int shadow_vco_10nm_set_rate(struct clk_hw *hw, unsigned long rate,
unsigned long parent_rate)
{
int rc;
struct dsi_pll_10nm *pll;
struct dsi_pll_vco_clk *vco = to_vco_clk_hw(hw);
struct mdss_pll_resources *rsc = vco->priv;
if (!rsc) {
pr_err("pll resource not found\n");
return -EINVAL;
}
pll = rsc->priv;
if (!pll) {
pr_err("pll configuration not found\n");
return -EINVAL;
}
rc = dsi_pll_read_stored_trim_codes(rsc, rate);
if (rc) {
pr_err("cannot find pll codes rate=%ld\n", rate);
return -EINVAL;
}
pr_debug("ndx=%d, rate=%lu\n", rsc->index, rate);
rsc->vco_current_rate = rate;
rsc->vco_ref_clk_rate = vco->ref_clk_rate;
rc = mdss_pll_resource_enable(rsc, true);
if (rc) {
pr_err("failed to enable mdss dsi pll(%d), rc=%d\n",
rsc->index, rc);
return rc;
}
dsi_pll_setup_config(pll, rsc);
dsi_pll_calc_dec_frac(pll, rsc);
/* program dynamic refresh control registers */
shadow_dsi_pll_dynamic_refresh_10nm(pll, rsc);
/* update cached vco rate */
rsc->vco_cached_rate = rate;
rsc->dfps_trigger = true;
mdss_pll_resource_enable(rsc, false);
return 0;
}
static int dsi_pll_10nm_lock_status(struct mdss_pll_resources *pll) static int dsi_pll_10nm_lock_status(struct mdss_pll_resources *pll)
{ {
int rc; int rc;
@@ -716,7 +989,7 @@ static int dsi_pll_enable(struct dsi_pll_vco_clk *vco)
phy_reg_update_bits_sub(rsc, PHY_CMN_CLK_CFG1, 0x03, rsc->cached_cfg1); phy_reg_update_bits_sub(rsc, PHY_CMN_CLK_CFG1, 0x03, rsc->cached_cfg1);
if (rsc->slave) if (rsc->slave)
phy_reg_update_bits_sub(rsc->slave, PHY_CMN_CLK_CFG1, phy_reg_update_bits_sub(rsc->slave, PHY_CMN_CLK_CFG1,
0x03, rsc->cached_cfg1); 0x03, rsc->slave->cached_cfg1);
wmb(); /* ensure dsiclk_sel is always programmed before pll start */ wmb(); /* ensure dsiclk_sel is always programmed before pll start */
/* Start PLL */ /* Start PLL */
@@ -766,6 +1039,7 @@ static void dsi_pll_disable(struct dsi_pll_vco_clk *vco)
} }
rsc->handoff_resources = false; rsc->handoff_resources = false;
rsc->dfps_trigger = false;
pr_debug("stop PLL (%d)\n", rsc->index); pr_debug("stop PLL (%d)\n", rsc->index);
@@ -817,8 +1091,10 @@ static void vco_10nm_unprepare(struct clk_hw *hw)
/* /*
* During unprepare in continuous splash use case we want driver * During unprepare in continuous splash use case we want driver
* to pick all dividers instead of retaining bootloader configurations. * to pick all dividers instead of retaining bootloader configurations.
* Also handle use cases where dynamic refresh triggered before
* first suspend/resume.
*/ */
if (!pll->handoff_resources) { if (!pll->handoff_resources || pll->dfps_trigger) {
pll->cached_cfg0 = MDSS_PLL_REG_R(pll->phy_base, pll->cached_cfg0 = MDSS_PLL_REG_R(pll->phy_base,
PHY_CMN_CLK_CFG0); PHY_CMN_CLK_CFG0);
pll->cached_outdiv = MDSS_PLL_REG_R(pll->pll_base, pll->cached_outdiv = MDSS_PLL_REG_R(pll->pll_base,
@@ -826,7 +1102,7 @@ static void vco_10nm_unprepare(struct clk_hw *hw)
pr_debug("cfg0=%d,cfg1=%d, outdiv=%d\n", pll->cached_cfg0, pr_debug("cfg0=%d,cfg1=%d, outdiv=%d\n", pll->cached_cfg0,
pll->cached_cfg1, pll->cached_outdiv); pll->cached_cfg1, pll->cached_outdiv);
pll->vco_cached_rate = clk_hw_get_rate(hw); pll->vco_cached_rate = clk_get_rate(hw->clk);
} }
/* /*
@@ -836,9 +1112,15 @@ static void vco_10nm_unprepare(struct clk_hw *hw)
* does not change.For such usecases, we need to ensure that the cached * does not change.For such usecases, we need to ensure that the cached
* value is programmed prior to PLL being locked * value is programmed prior to PLL being locked
*/ */
if (pll->handoff_resources) if (pll->handoff_resources) {
pll->cached_cfg1 = MDSS_PLL_REG_R(pll->phy_base, pll->cached_cfg1 = MDSS_PLL_REG_R(pll->phy_base,
PHY_CMN_CLK_CFG1); PHY_CMN_CLK_CFG1);
if (pll->slave)
pll->slave->cached_cfg1 =
MDSS_PLL_REG_R(pll->slave->phy_base,
PHY_CMN_CLK_CFG1);
}
dsi_pll_disable(vco); dsi_pll_disable(vco);
mdss_pll_resource_enable(pll, false); mdss_pll_resource_enable(pll, false);
} }
@@ -866,7 +1148,7 @@ static int vco_10nm_prepare(struct clk_hw *hw)
} }
if ((pll->vco_cached_rate != 0) && if ((pll->vco_cached_rate != 0) &&
(pll->vco_cached_rate == clk_hw_get_rate(hw))) { (pll->vco_cached_rate == clk_get_rate(hw->clk))) {
rc = hw->init->ops->set_rate(hw, pll->vco_cached_rate, rc = hw->init->ops->set_rate(hw, pll->vco_cached_rate,
pll->vco_cached_rate); pll->vco_cached_rate);
if (rc) { if (rc) {
@@ -879,6 +1161,9 @@ static int vco_10nm_prepare(struct clk_hw *hw)
pll->cached_cfg1); pll->cached_cfg1);
MDSS_PLL_REG_W(pll->phy_base, PHY_CMN_CLK_CFG0, MDSS_PLL_REG_W(pll->phy_base, PHY_CMN_CLK_CFG0,
pll->cached_cfg0); pll->cached_cfg0);
if (pll->slave)
MDSS_PLL_REG_W(pll->slave->phy_base, PHY_CMN_CLK_CFG0,
pll->cached_cfg0);
MDSS_PLL_REG_W(pll->pll_base, PLL_PLL_OUTDIV_RATE, MDSS_PLL_REG_W(pll->pll_base, PLL_PLL_OUTDIV_RATE,
pll->cached_outdiv); pll->cached_outdiv);
} }
@@ -1014,6 +1299,14 @@ static void pixel_clk_set_div_sub(struct mdss_pll_resources *pll, int div)
reg_val &= ~0xF0; reg_val &= ~0xF0;
reg_val |= (div << 4); reg_val |= (div << 4);
MDSS_PLL_REG_W(pll->phy_base, PHY_CMN_CLK_CFG0, reg_val); MDSS_PLL_REG_W(pll->phy_base, PHY_CMN_CLK_CFG0, reg_val);
/*
* cache the current parent index for cases where parent
* is not changing but rate is changing. In that case
* clock framework won't call parent_set and hence dsiclk_sel
* bit won't be programmed. e.g. dfps update use case.
*/
pll->cached_cfg0 = reg_val;
} }
static int pixel_clk_set_div(void *context, unsigned int reg, unsigned int div) static int pixel_clk_set_div(void *context, unsigned int reg, unsigned int div)
@@ -1151,6 +1444,12 @@ static const struct clk_ops clk_ops_vco_10nm = {
.unprepare = vco_10nm_unprepare, .unprepare = vco_10nm_unprepare,
}; };
static const struct clk_ops clk_ops_shadow_vco_10nm = {
.recalc_rate = vco_10nm_recalc_rate,
.set_rate = shadow_vco_10nm_set_rate,
.round_rate = vco_10nm_round_rate,
};
static struct regmap_bus mdss_mux_regmap_bus = { static struct regmap_bus mdss_mux_regmap_bus = {
.reg_write = mdss_set_mux_sel, .reg_write = mdss_set_mux_sel,
.reg_read = mdss_get_mux_sel, .reg_read = mdss_get_mux_sel,
@@ -1225,6 +1524,19 @@ static struct dsi_pll_vco_clk dsi0pll_vco_clk = {
}, },
}; };
static struct dsi_pll_vco_clk dsi0pll_shadow_vco_clk = {
.ref_clk_rate = 19200000UL,
.min_rate = 1000000000UL,
.max_rate = 3500000000UL,
.hw.init = &(struct clk_init_data){
.name = "dsi0pll_shadow_vco_clk",
.parent_names = (const char *[]){"bi_tcxo"},
.num_parents = 1,
.ops = &clk_ops_shadow_vco_10nm,
.flags = CLK_GET_RATE_NOCACHE,
},
};
static struct dsi_pll_vco_clk dsi1pll_vco_clk = { static struct dsi_pll_vco_clk dsi1pll_vco_clk = {
.ref_clk_rate = 19200000UL, .ref_clk_rate = 19200000UL,
.min_rate = 1000000000UL, .min_rate = 1000000000UL,
@@ -1238,6 +1550,19 @@ static struct dsi_pll_vco_clk dsi1pll_vco_clk = {
}, },
}; };
static struct dsi_pll_vco_clk dsi1pll_shadow_vco_clk = {
.ref_clk_rate = 19200000UL,
.min_rate = 1000000000UL,
.max_rate = 3500000000UL,
.hw.init = &(struct clk_init_data){
.name = "dsi1pll_shadow_vco_clk",
.parent_names = (const char *[]){"bi_tcxo"},
.num_parents = 1,
.ops = &clk_ops_shadow_vco_10nm,
.flags = CLK_GET_RATE_NOCACHE,
},
};
static struct clk_regmap_div dsi0pll_pll_out_div = { static struct clk_regmap_div dsi0pll_pll_out_div = {
.reg = PLL_PLL_OUTDIV_RATE, .reg = PLL_PLL_OUTDIV_RATE,
.shift = 0, .shift = 0,
@@ -1254,6 +1579,23 @@ static struct clk_regmap_div dsi0pll_pll_out_div = {
}, },
}; };
static struct clk_regmap_div dsi0pll_shadow_pll_out_div = {
.reg = PLL_PLL_OUTDIV_RATE,
.shift = 0,
.width = 2,
.flags = CLK_DIVIDER_POWER_OF_TWO,
.clkr = {
.hw.init = &(struct clk_init_data){
.name = "dsi0pll_shadow_pll_out_div",
.parent_names = (const char *[]){
"dsi0pll_shadow_vco_clk"},
.num_parents = 1,
.flags = (CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT),
.ops = &clk_regmap_div_ops,
},
},
};
static struct clk_regmap_div dsi1pll_pll_out_div = { static struct clk_regmap_div dsi1pll_pll_out_div = {
.reg = PLL_PLL_OUTDIV_RATE, .reg = PLL_PLL_OUTDIV_RATE,
.shift = 0, .shift = 0,
@@ -1270,6 +1612,23 @@ static struct clk_regmap_div dsi1pll_pll_out_div = {
}, },
}; };
static struct clk_regmap_div dsi1pll_shadow_pll_out_div = {
.reg = PLL_PLL_OUTDIV_RATE,
.shift = 0,
.width = 2,
.flags = CLK_DIVIDER_POWER_OF_TWO,
.clkr = {
.hw.init = &(struct clk_init_data){
.name = "dsi1pll_shadow_pll_out_div",
.parent_names = (const char *[]){
"dsi1pll_shadow_vco_clk"},
.num_parents = 1,
.flags = (CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT),
.ops = &clk_regmap_div_ops,
},
},
};
static struct clk_regmap_div dsi0pll_bitclk_src = { static struct clk_regmap_div dsi0pll_bitclk_src = {
.shift = 0, .shift = 0,
.width = 4, .width = 4,
@@ -1284,6 +1643,21 @@ static struct clk_regmap_div dsi0pll_bitclk_src = {
}, },
}; };
static struct clk_regmap_div dsi0pll_shadow_bitclk_src = {
.shift = 0,
.width = 4,
.clkr = {
.hw.init = &(struct clk_init_data){
.name = "dsi0pll_shadow_bitclk_src",
.parent_names = (const char *[]){
"dsi0pll_shadow_pll_out_div"},
.num_parents = 1,
.flags = (CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT),
.ops = &clk_regmap_div_ops,
},
},
};
static struct clk_regmap_div dsi1pll_bitclk_src = { static struct clk_regmap_div dsi1pll_bitclk_src = {
.shift = 0, .shift = 0,
.width = 4, .width = 4,
@@ -1298,6 +1672,21 @@ static struct clk_regmap_div dsi1pll_bitclk_src = {
}, },
}; };
static struct clk_regmap_div dsi1pll_shadow_bitclk_src = {
.shift = 0,
.width = 4,
.clkr = {
.hw.init = &(struct clk_init_data){
.name = "dsi1pll_shadow_bitclk_src",
.parent_names = (const char *[]){
"dsi1pll_shadow_pll_out_div"},
.num_parents = 1,
.flags = (CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT),
.ops = &clk_regmap_div_ops,
},
},
};
static struct clk_fixed_factor dsi0pll_post_vco_div = { static struct clk_fixed_factor dsi0pll_post_vco_div = {
.div = 4, .div = 4,
.mult = 1, .mult = 1,
@@ -1305,7 +1694,19 @@ static struct clk_fixed_factor dsi0pll_post_vco_div = {
.name = "dsi0pll_post_vco_div", .name = "dsi0pll_post_vco_div",
.parent_names = (const char *[]){"dsi0pll_pll_out_div"}, .parent_names = (const char *[]){"dsi0pll_pll_out_div"},
.num_parents = 1, .num_parents = 1,
.flags = (CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT), .flags = CLK_GET_RATE_NOCACHE,
.ops = &clk_fixed_factor_ops,
},
};
static struct clk_fixed_factor dsi0pll_shadow_post_vco_div = {
.div = 4,
.mult = 1,
.hw.init = &(struct clk_init_data){
.name = "dsi0pll_shadow_post_vco_div",
.parent_names = (const char *[]){"dsi0pll_shadow_pll_out_div"},
.num_parents = 1,
.flags = CLK_GET_RATE_NOCACHE,
.ops = &clk_fixed_factor_ops, .ops = &clk_fixed_factor_ops,
}, },
}; };
@@ -1317,7 +1718,19 @@ static struct clk_fixed_factor dsi1pll_post_vco_div = {
.name = "dsi1pll_post_vco_div", .name = "dsi1pll_post_vco_div",
.parent_names = (const char *[]){"dsi1pll_pll_out_div"}, .parent_names = (const char *[]){"dsi1pll_pll_out_div"},
.num_parents = 1, .num_parents = 1,
.flags = (CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT), .flags = CLK_GET_RATE_NOCACHE,
.ops = &clk_fixed_factor_ops,
},
};
static struct clk_fixed_factor dsi1pll_shadow_post_vco_div = {
.div = 4,
.mult = 1,
.hw.init = &(struct clk_init_data){
.name = "dsi1pll_shadow_post_vco_div",
.parent_names = (const char *[]){"dsi1pll_shadow_pll_out_div"},
.num_parents = 1,
.flags = CLK_GET_RATE_NOCACHE,
.ops = &clk_fixed_factor_ops, .ops = &clk_fixed_factor_ops,
}, },
}; };
@@ -1334,6 +1747,18 @@ static struct clk_fixed_factor dsi0pll_byteclk_src = {
}, },
}; };
static struct clk_fixed_factor dsi0pll_shadow_byteclk_src = {
.div = 8,
.mult = 1,
.hw.init = &(struct clk_init_data){
.name = "dsi0pll_shadow_byteclk_src",
.parent_names = (const char *[]){"dsi0pll_shadow_bitclk_src"},
.num_parents = 1,
.flags = (CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT),
.ops = &clk_fixed_factor_ops,
},
};
static struct clk_fixed_factor dsi1pll_byteclk_src = { static struct clk_fixed_factor dsi1pll_byteclk_src = {
.div = 8, .div = 8,
.mult = 1, .mult = 1,
@@ -1346,6 +1771,18 @@ static struct clk_fixed_factor dsi1pll_byteclk_src = {
}, },
}; };
static struct clk_fixed_factor dsi1pll_shadow_byteclk_src = {
.div = 8,
.mult = 1,
.hw.init = &(struct clk_init_data){
.name = "dsi1pll_shadow_byteclk_src",
.parent_names = (const char *[]){"dsi1pll_shadow_bitclk_src"},
.num_parents = 1,
.flags = (CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT),
.ops = &clk_fixed_factor_ops,
},
};
static struct clk_fixed_factor dsi0pll_post_bit_div = { static struct clk_fixed_factor dsi0pll_post_bit_div = {
.div = 2, .div = 2,
.mult = 1, .mult = 1,
@@ -1358,6 +1795,18 @@ static struct clk_fixed_factor dsi0pll_post_bit_div = {
}, },
}; };
static struct clk_fixed_factor dsi0pll_shadow_post_bit_div = {
.div = 2,
.mult = 1,
.hw.init = &(struct clk_init_data){
.name = "dsi0pll_shadow_post_bit_div",
.parent_names = (const char *[]){"dsi0pll_shadow_bitclk_src"},
.num_parents = 1,
.flags = CLK_GET_RATE_NOCACHE,
.ops = &clk_fixed_factor_ops,
},
};
static struct clk_fixed_factor dsi1pll_post_bit_div = { static struct clk_fixed_factor dsi1pll_post_bit_div = {
.div = 2, .div = 2,
.mult = 1, .mult = 1,
@@ -1370,15 +1819,29 @@ static struct clk_fixed_factor dsi1pll_post_bit_div = {
}, },
}; };
static struct clk_fixed_factor dsi1pll_shadow_post_bit_div = {
.div = 2,
.mult = 1,
.hw.init = &(struct clk_init_data){
.name = "dsi1pll_shadow_post_bit_div",
.parent_names = (const char *[]){"dsi1pll_shadow_bitclk_src"},
.num_parents = 1,
.flags = CLK_GET_RATE_NOCACHE,
.ops = &clk_fixed_factor_ops,
},
};
static struct clk_regmap_mux dsi0pll_byteclk_mux = { static struct clk_regmap_mux dsi0pll_byteclk_mux = {
.shift = 0, .shift = 0,
.width = 1, .width = 1,
.clkr = { .clkr = {
.hw.init = &(struct clk_init_data){ .hw.init = &(struct clk_init_data){
.name = "dsi0_phy_pll_out_byteclk", .name = "dsi0_phy_pll_out_byteclk",
.parent_names = (const char *[]){"dsi0pll_byteclk_src"}, .parent_names = (const char *[]){"dsi0pll_byteclk_src",
.num_parents = 1, "dsi0pll_shadow_byteclk_src"},
.flags = (CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT), .num_parents = 2,
.flags = (CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT |
CLK_SET_RATE_NO_REPARENT),
.ops = &clk_regmap_mux_closest_ops, .ops = &clk_regmap_mux_closest_ops,
}, },
}, },
@@ -1390,9 +1853,11 @@ static struct clk_regmap_mux dsi1pll_byteclk_mux = {
.clkr = { .clkr = {
.hw.init = &(struct clk_init_data){ .hw.init = &(struct clk_init_data){
.name = "dsi1_phy_pll_out_byteclk", .name = "dsi1_phy_pll_out_byteclk",
.parent_names = (const char *[]){"dsi1pll_byteclk_src"}, .parent_names = (const char *[]){"dsi1pll_byteclk_src",
.num_parents = 1, "dsi1pll_shadow_byteclk_src"},
.flags = (CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT), .num_parents = 2,
.flags = (CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT |
CLK_SET_RATE_NO_REPARENT),
.ops = &clk_regmap_mux_closest_ops, .ops = &clk_regmap_mux_closest_ops,
}, },
}, },
@@ -1416,6 +1881,25 @@ static struct clk_regmap_mux dsi0pll_pclk_src_mux = {
}, },
}; };
static struct clk_regmap_mux dsi0pll_shadow_pclk_src_mux = {
.reg = PHY_CMN_CLK_CFG1,
.shift = 0,
.width = 2,
.clkr = {
.hw.init = &(struct clk_init_data){
.name = "dsi0pll_shadow_pclk_src_mux",
.parent_names = (const char *[]){
"dsi0pll_shadow_bitclk_src",
"dsi0pll_shadow_post_bit_div",
"dsi0pll_shadow_pll_out_div",
"dsi0pll_shadow_post_vco_div"},
.num_parents = 4,
.flags = CLK_GET_RATE_NOCACHE,
.ops = &clk_regmap_mux_closest_ops,
},
},
};
static struct clk_regmap_mux dsi1pll_pclk_src_mux = { static struct clk_regmap_mux dsi1pll_pclk_src_mux = {
.reg = PHY_CMN_CLK_CFG1, .reg = PHY_CMN_CLK_CFG1,
.shift = 0, .shift = 0,
@@ -1434,6 +1918,25 @@ static struct clk_regmap_mux dsi1pll_pclk_src_mux = {
}, },
}; };
static struct clk_regmap_mux dsi1pll_shadow_pclk_src_mux = {
.reg = PHY_CMN_CLK_CFG1,
.shift = 0,
.width = 2,
.clkr = {
.hw.init = &(struct clk_init_data){
.name = "dsi1pll_shadow_pclk_src_mux",
.parent_names = (const char *[]){
"dsi1pll_shadow_bitclk_src",
"dsi1pll_shadow_post_bit_div",
"dsi1pll_shadow_pll_out_div",
"dsi1pll_shadow_post_vco_div"},
.num_parents = 4,
.flags = CLK_GET_RATE_NOCACHE,
.ops = &clk_regmap_mux_closest_ops,
},
},
};
static struct clk_regmap_div dsi0pll_pclk_src = { static struct clk_regmap_div dsi0pll_pclk_src = {
.shift = 0, .shift = 0,
.width = 4, .width = 4,
@@ -1449,6 +1952,21 @@ static struct clk_regmap_div dsi0pll_pclk_src = {
}, },
}; };
static struct clk_regmap_div dsi0pll_shadow_pclk_src = {
.shift = 0,
.width = 4,
.clkr = {
.hw.init = &(struct clk_init_data){
.name = "dsi0pll_shadow_pclk_src",
.parent_names = (const char *[]){
"dsi0pll_shadow_pclk_src_mux"},
.num_parents = 1,
.flags = (CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT),
.ops = &clk_regmap_div_ops,
},
},
};
static struct clk_regmap_div dsi1pll_pclk_src = { static struct clk_regmap_div dsi1pll_pclk_src = {
.shift = 0, .shift = 0,
.width = 4, .width = 4,
@@ -1464,15 +1982,32 @@ static struct clk_regmap_div dsi1pll_pclk_src = {
}, },
}; };
static struct clk_regmap_div dsi1pll_shadow_pclk_src = {
.shift = 0,
.width = 4,
.clkr = {
.hw.init = &(struct clk_init_data){
.name = "dsi1pll_shadow_pclk_src",
.parent_names = (const char *[]){
"dsi1pll_shadow_pclk_src_mux"},
.num_parents = 1,
.flags = (CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT),
.ops = &clk_regmap_div_ops,
},
},
};
static struct clk_regmap_mux dsi0pll_pclk_mux = { static struct clk_regmap_mux dsi0pll_pclk_mux = {
.shift = 0, .shift = 0,
.width = 1, .width = 1,
.clkr = { .clkr = {
.hw.init = &(struct clk_init_data){ .hw.init = &(struct clk_init_data){
.name = "dsi0_phy_pll_out_dsiclk", .name = "dsi0_phy_pll_out_dsiclk",
.parent_names = (const char *[]){"dsi0pll_pclk_src"}, .parent_names = (const char *[]){"dsi0pll_pclk_src",
.num_parents = 1, "dsi0pll_shadow_pclk_src"},
.flags = (CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT), .num_parents = 2,
.flags = (CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT |
CLK_SET_RATE_NO_REPARENT),
.ops = &clk_regmap_mux_closest_ops, .ops = &clk_regmap_mux_closest_ops,
}, },
}, },
@@ -1484,9 +2019,11 @@ static struct clk_regmap_mux dsi1pll_pclk_mux = {
.clkr = { .clkr = {
.hw.init = &(struct clk_init_data){ .hw.init = &(struct clk_init_data){
.name = "dsi1_phy_pll_out_dsiclk", .name = "dsi1_phy_pll_out_dsiclk",
.parent_names = (const char *[]){"dsi1pll_pclk_src"}, .parent_names = (const char *[]){"dsi1pll_pclk_src",
.num_parents = 1, "dsi1pll_shadow_pclk_src"},
.flags = (CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT), .num_parents = 2,
.flags = (CLK_GET_RATE_NOCACHE | CLK_SET_RATE_PARENT |
CLK_SET_RATE_NO_REPARENT),
.ops = &clk_regmap_mux_closest_ops, .ops = &clk_regmap_mux_closest_ops,
}, },
}, },
@@ -1503,6 +2040,14 @@ static struct clk_hw *mdss_dsi_pllcc_10nm[] = {
[PCLK_SRC_MUX_0_CLK] = &dsi0pll_pclk_src_mux.clkr.hw, [PCLK_SRC_MUX_0_CLK] = &dsi0pll_pclk_src_mux.clkr.hw,
[PCLK_SRC_0_CLK] = &dsi0pll_pclk_src.clkr.hw, [PCLK_SRC_0_CLK] = &dsi0pll_pclk_src.clkr.hw,
[PCLK_MUX_0_CLK] = &dsi0pll_pclk_mux.clkr.hw, [PCLK_MUX_0_CLK] = &dsi0pll_pclk_mux.clkr.hw,
[SHADOW_VCO_CLK_0] = &dsi0pll_shadow_vco_clk.hw,
[SHADOW_PLL_OUT_DIV_0_CLK] = &dsi0pll_shadow_pll_out_div.clkr.hw,
[SHADOW_BITCLK_SRC_0_CLK] = &dsi0pll_shadow_bitclk_src.clkr.hw,
[SHADOW_BYTECLK_SRC_0_CLK] = &dsi0pll_shadow_byteclk_src.hw,
[SHADOW_POST_BIT_DIV_0_CLK] = &dsi0pll_shadow_post_bit_div.hw,
[SHADOW_POST_VCO_DIV_0_CLK] = &dsi0pll_shadow_post_vco_div.hw,
[SHADOW_PCLK_SRC_MUX_0_CLK] = &dsi0pll_shadow_pclk_src_mux.clkr.hw,
[SHADOW_PCLK_SRC_0_CLK] = &dsi0pll_shadow_pclk_src.clkr.hw,
[VCO_CLK_1] = &dsi1pll_vco_clk.hw, [VCO_CLK_1] = &dsi1pll_vco_clk.hw,
[PLL_OUT_DIV_1_CLK] = &dsi1pll_pll_out_div.clkr.hw, [PLL_OUT_DIV_1_CLK] = &dsi1pll_pll_out_div.clkr.hw,
[BITCLK_SRC_1_CLK] = &dsi1pll_bitclk_src.clkr.hw, [BITCLK_SRC_1_CLK] = &dsi1pll_bitclk_src.clkr.hw,
@@ -1513,6 +2058,14 @@ static struct clk_hw *mdss_dsi_pllcc_10nm[] = {
[PCLK_SRC_MUX_1_CLK] = &dsi1pll_pclk_src_mux.clkr.hw, [PCLK_SRC_MUX_1_CLK] = &dsi1pll_pclk_src_mux.clkr.hw,
[PCLK_SRC_1_CLK] = &dsi1pll_pclk_src.clkr.hw, [PCLK_SRC_1_CLK] = &dsi1pll_pclk_src.clkr.hw,
[PCLK_MUX_1_CLK] = &dsi1pll_pclk_mux.clkr.hw, [PCLK_MUX_1_CLK] = &dsi1pll_pclk_mux.clkr.hw,
[SHADOW_VCO_CLK_1] = &dsi1pll_shadow_vco_clk.hw,
[SHADOW_PLL_OUT_DIV_1_CLK] = &dsi1pll_shadow_pll_out_div.clkr.hw,
[SHADOW_BITCLK_SRC_1_CLK] = &dsi1pll_shadow_bitclk_src.clkr.hw,
[SHADOW_BYTECLK_SRC_1_CLK] = &dsi1pll_shadow_byteclk_src.hw,
[SHADOW_POST_BIT_DIV_1_CLK] = &dsi1pll_shadow_post_bit_div.hw,
[SHADOW_POST_VCO_DIV_1_CLK] = &dsi1pll_shadow_post_vco_div.hw,
[SHADOW_PCLK_SRC_MUX_1_CLK] = &dsi1pll_shadow_pclk_src_mux.clkr.hw,
[SHADOW_PCLK_SRC_1_CLK] = &dsi1pll_shadow_pclk_src.clkr.hw,
}; };
int dsi_pll_clock_register_10nm(struct platform_device *pdev, int dsi_pll_clock_register_10nm(struct platform_device *pdev,
@@ -1549,18 +2102,20 @@ int dsi_pll_clock_register_10nm(struct platform_device *pdev,
/* Establish client data */ /* Establish client data */
if (ndx == 0) { if (ndx == 0) {
rmap = devm_regmap_init(&pdev->dev, &pll_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &pll_regmap_bus,
pll_res, &dsi_pll_10nm_config); pll_res, &dsi_pll_10nm_config);
dsi0pll_pll_out_div.clkr.regmap = rmap; dsi0pll_pll_out_div.clkr.regmap = rmap;
dsi0pll_shadow_pll_out_div.clkr.regmap = rmap;
rmap = devm_regmap_init(&pdev->dev, &bitclk_src_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &bitclk_src_regmap_bus,
pll_res, &dsi_pll_10nm_config); pll_res, &dsi_pll_10nm_config);
dsi0pll_bitclk_src.clkr.regmap = rmap; dsi0pll_bitclk_src.clkr.regmap = rmap;
dsi0pll_shadow_bitclk_src.clkr.regmap = rmap;
rmap = devm_regmap_init(&pdev->dev, &pclk_src_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &pclk_src_regmap_bus,
pll_res, &dsi_pll_10nm_config); pll_res, &dsi_pll_10nm_config);
dsi0pll_pclk_src.clkr.regmap = rmap; dsi0pll_pclk_src.clkr.regmap = rmap;
dsi0pll_shadow_pclk_src.clkr.regmap = rmap;
rmap = devm_regmap_init(&pdev->dev, &mdss_mux_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &mdss_mux_regmap_bus,
pll_res, &dsi_pll_10nm_config); pll_res, &dsi_pll_10nm_config);
@@ -1569,12 +2124,16 @@ int dsi_pll_clock_register_10nm(struct platform_device *pdev,
rmap = devm_regmap_init(&pdev->dev, &pclk_src_mux_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &pclk_src_mux_regmap_bus,
pll_res, &dsi_pll_10nm_config); pll_res, &dsi_pll_10nm_config);
dsi0pll_pclk_src_mux.clkr.regmap = rmap; dsi0pll_pclk_src_mux.clkr.regmap = rmap;
dsi0pll_shadow_pclk_src_mux.clkr.regmap = rmap;
rmap = devm_regmap_init(&pdev->dev, &mdss_mux_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &mdss_mux_regmap_bus,
pll_res, &dsi_pll_10nm_config); pll_res, &dsi_pll_10nm_config);
dsi0pll_byteclk_mux.clkr.regmap = rmap; dsi0pll_byteclk_mux.clkr.regmap = rmap;
dsi0pll_vco_clk.priv = pll_res; dsi0pll_vco_clk.priv = pll_res;
for (i = VCO_CLK_0; i <= PCLK_MUX_0_CLK; i++) { dsi0pll_shadow_vco_clk.priv = pll_res;
for (i = VCO_CLK_0; i <= SHADOW_PCLK_SRC_0_CLK; i++) {
clk = devm_clk_register(&pdev->dev, clk = devm_clk_register(&pdev->dev,
mdss_dsi_pllcc_10nm[i]); mdss_dsi_pllcc_10nm[i]);
if (IS_ERR(clk)) { if (IS_ERR(clk)) {
@@ -1589,20 +2148,21 @@ int dsi_pll_clock_register_10nm(struct platform_device *pdev,
rc = of_clk_add_provider(pdev->dev.of_node, rc = of_clk_add_provider(pdev->dev.of_node,
of_clk_src_onecell_get, clk_data); of_clk_src_onecell_get, clk_data);
} else { } else {
rmap = devm_regmap_init(&pdev->dev, &pll_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &pll_regmap_bus,
pll_res, &dsi_pll_10nm_config); pll_res, &dsi_pll_10nm_config);
dsi1pll_pll_out_div.clkr.regmap = rmap; dsi1pll_pll_out_div.clkr.regmap = rmap;
dsi1pll_shadow_pll_out_div.clkr.regmap = rmap;
rmap = devm_regmap_init(&pdev->dev, &bitclk_src_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &bitclk_src_regmap_bus,
pll_res, &dsi_pll_10nm_config); pll_res, &dsi_pll_10nm_config);
dsi1pll_bitclk_src.clkr.regmap = rmap; dsi1pll_bitclk_src.clkr.regmap = rmap;
dsi1pll_shadow_bitclk_src.clkr.regmap = rmap;
rmap = devm_regmap_init(&pdev->dev, &pclk_src_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &pclk_src_regmap_bus,
pll_res, &dsi_pll_10nm_config); pll_res, &dsi_pll_10nm_config);
dsi1pll_pclk_src.clkr.regmap = rmap; dsi1pll_pclk_src.clkr.regmap = rmap;
dsi1pll_shadow_pclk_src.clkr.regmap = rmap;
rmap = devm_regmap_init(&pdev->dev, &mdss_mux_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &mdss_mux_regmap_bus,
pll_res, &dsi_pll_10nm_config); pll_res, &dsi_pll_10nm_config);
@@ -1611,12 +2171,16 @@ int dsi_pll_clock_register_10nm(struct platform_device *pdev,
rmap = devm_regmap_init(&pdev->dev, &pclk_src_mux_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &pclk_src_mux_regmap_bus,
pll_res, &dsi_pll_10nm_config); pll_res, &dsi_pll_10nm_config);
dsi1pll_pclk_src_mux.clkr.regmap = rmap; dsi1pll_pclk_src_mux.clkr.regmap = rmap;
dsi1pll_shadow_pclk_src_mux.clkr.regmap = rmap;
rmap = devm_regmap_init(&pdev->dev, &mdss_mux_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &mdss_mux_regmap_bus,
pll_res, &dsi_pll_10nm_config); pll_res, &dsi_pll_10nm_config);
dsi1pll_byteclk_mux.clkr.regmap = rmap; dsi1pll_byteclk_mux.clkr.regmap = rmap;
dsi1pll_vco_clk.priv = pll_res;
for (i = VCO_CLK_1; i <= PCLK_MUX_1_CLK; i++) { dsi1pll_vco_clk.priv = pll_res;
dsi1pll_shadow_vco_clk.priv = pll_res;
for (i = VCO_CLK_1; i <= SHADOW_PCLK_SRC_1_CLK; i++) {
clk = devm_clk_register(&pdev->dev, clk = devm_clk_register(&pdev->dev,
mdss_dsi_pllcc_10nm[i]); mdss_dsi_pllcc_10nm[i]);
if (IS_ERR(clk)) { if (IS_ERR(clk)) {

View File

@@ -34,12 +34,12 @@ static int mdss_pll_read_stored_trim_codes(
goto end_read; goto end_read;
} }
for (i = 0; i < dsi_pll_res->dfps->panel_dfps.frame_rate_cnt; i++) { for (i = 0; i < dsi_pll_res->dfps->vco_rate_cnt; i++) {
struct dfps_codes_info *codes_info = struct dfps_codes_info *codes_info =
&dsi_pll_res->dfps->codes_dfps[i]; &dsi_pll_res->dfps->codes_dfps[i];
pr_debug("valid=%d frame_rate=%d, vco_rate=%d, code %d %d\n", pr_debug("valid=%d frame_rate=%d, code %d %d\n",
codes_info->is_valid, codes_info->frame_rate, codes_info->is_valid,
codes_info->clk_rate, codes_info->pll_codes.pll_codes_1, codes_info->clk_rate, codes_info->pll_codes.pll_codes_1,
codes_info->pll_codes.pll_codes_2); codes_info->pll_codes.pll_codes_2);

View File

@@ -11,7 +11,7 @@
#include <linux/delay.h> #include <linux/delay.h>
#include "dsi_pll.h" #include "dsi_pll.h"
#include "pll_drv.h" #include "pll_drv.h"
#include <dt-bindings/clock/mdss-10nm-pll-clk.h> #include <dt-bindings/clock/mdss-7nm-pll-clk.h>
#define VCO_DELAY_USEC 1 #define VCO_DELAY_USEC 1
@@ -182,6 +182,7 @@
#define PHY_CMN_CLK_CFG1 0x014 #define PHY_CMN_CLK_CFG1 0x014
#define PHY_CMN_RBUF_CTRL 0x01C #define PHY_CMN_RBUF_CTRL 0x01C
#define PHY_CMN_CTRL_0 0x024 #define PHY_CMN_CTRL_0 0x024
#define PHY_CMN_CTRL_2 0x02C
#define PHY_CMN_CTRL_3 0x030 #define PHY_CMN_CTRL_3 0x030
#define PHY_CMN_PLL_CNTRL 0x03C #define PHY_CMN_PLL_CNTRL 0x03C
#define PHY_CMN_GLBL_DIGTOP_SPARE4 0x128 #define PHY_CMN_GLBL_DIGTOP_SPARE4 0x128
@@ -196,6 +197,43 @@
#define SSC_START BIT(6) #define SSC_START BIT(6)
#define SSC_START_MUX BIT(7) #define SSC_START_MUX BIT(7)
/* Dynamic Refresh Control Registers */
#define DSI_DYNAMIC_REFRESH_PLL_CTRL0 (0x014)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL1 (0x018)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL2 (0x01C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL3 (0x020)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL4 (0x024)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL5 (0x028)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL6 (0x02C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL7 (0x030)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL8 (0x034)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL9 (0x038)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL10 (0x03C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL11 (0x040)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL12 (0x044)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL13 (0x048)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL14 (0x04C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL15 (0x050)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL16 (0x054)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL17 (0x058)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL18 (0x05C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL19 (0x060)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL20 (0x064)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL21 (0x068)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL22 (0x06C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL23 (0x070)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL24 (0x074)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL25 (0x078)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL26 (0x07C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL27 (0x080)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL28 (0x084)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL29 (0x088)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL30 (0x08C)
#define DSI_DYNAMIC_REFRESH_PLL_CTRL31 (0x090)
#define DSI_DYNAMIC_REFRESH_PLL_UPPER_ADDR (0x094)
#define DSI_DYNAMIC_REFRESH_PLL_UPPER_ADDR2 (0x098)
#define DSI_PHY_TO_PLL_OFFSET (0x500)
enum { enum {
DSI_PLL_0, DSI_PLL_0,
DSI_PLL_1, DSI_PLL_1,
@@ -554,6 +592,7 @@ static void dsi_pll_calc_dec_frac(struct dsi_pll_7nm *pll,
regs->frac_div_start_low = (frac & 0xff); regs->frac_div_start_low = (frac & 0xff);
regs->frac_div_start_mid = (frac & 0xff00) >> 8; regs->frac_div_start_mid = (frac & 0xff00) >> 8;
regs->frac_div_start_high = (frac & 0x30000) >> 16; regs->frac_div_start_high = (frac & 0x30000) >> 16;
regs->pll_prop_gain_rate = 10;
} }
static void dsi_pll_calc_ssc(struct dsi_pll_7nm *pll, static void dsi_pll_calc_ssc(struct dsi_pll_7nm *pll,
@@ -889,6 +928,7 @@ static int vco_7nm_set_rate(struct clk_hw *hw, unsigned long rate,
rsc->vco_current_rate = rate; rsc->vco_current_rate = rate;
rsc->vco_ref_clk_rate = vco->ref_clk_rate; rsc->vco_ref_clk_rate = vco->ref_clk_rate;
rsc->dfps_trigger = false;
rc = mdss_pll_resource_enable(rsc, true); rc = mdss_pll_resource_enable(rsc, true);
if (rc) { if (rc) {
@@ -919,6 +959,289 @@ static int vco_7nm_set_rate(struct clk_hw *hw, unsigned long rate,
return 0; return 0;
} }
static int dsi_pll_read_stored_trim_codes(struct mdss_pll_resources *pll_res,
unsigned long vco_clk_rate)
{
int i;
bool found = false;
if (!pll_res->dfps)
return -EINVAL;
for (i = 0; i < pll_res->dfps->vco_rate_cnt; i++) {
struct dfps_codes_info *codes_info =
&pll_res->dfps->codes_dfps[i];
pr_debug("valid=%d vco_rate=%d, code %d %d %d\n",
codes_info->is_valid, codes_info->clk_rate,
codes_info->pll_codes.pll_codes_1,
codes_info->pll_codes.pll_codes_2,
codes_info->pll_codes.pll_codes_3);
if (vco_clk_rate != codes_info->clk_rate &&
codes_info->is_valid)
continue;
pll_res->cache_pll_trim_codes[0] =
codes_info->pll_codes.pll_codes_1;
pll_res->cache_pll_trim_codes[1] =
codes_info->pll_codes.pll_codes_2;
pll_res->cache_pll_trim_codes[2] =
codes_info->pll_codes.pll_codes_3;
found = true;
break;
}
if (!found)
return -EINVAL;
pr_debug("trim_code_0=0x%x trim_code_1=0x%x trim_code_2=0x%x\n",
pll_res->cache_pll_trim_codes[0],
pll_res->cache_pll_trim_codes[1],
pll_res->cache_pll_trim_codes[2]);
return 0;
}
static void shadow_dsi_pll_dynamic_refresh_7nm(struct dsi_pll_7nm *pll,
struct mdss_pll_resources *rsc)
{
u32 data;
u32 offset = DSI_PHY_TO_PLL_OFFSET;
u32 upper_addr = 0;
u32 upper_addr2 = 0;
struct dsi_pll_regs *reg = &pll->reg_setup;
data = MDSS_PLL_REG_R(rsc->phy_base, PHY_CMN_CLK_CFG1);
data &= ~BIT(5);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL0,
PHY_CMN_CLK_CFG1, PHY_CMN_PLL_CNTRL, data, 0);
upper_addr |= (upper_8_bit(PHY_CMN_CLK_CFG1) << 0);
upper_addr |= (upper_8_bit(PHY_CMN_PLL_CNTRL) << 1);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL1,
PHY_CMN_RBUF_CTRL,
(PLL_CORE_INPUT_OVERRIDE + offset),
0, 0x12);
upper_addr |= (upper_8_bit(PHY_CMN_RBUF_CTRL) << 2);
upper_addr |= (upper_8_bit(PLL_CORE_INPUT_OVERRIDE + offset) << 3);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL2,
(PLL_DECIMAL_DIV_START_1 + offset),
(PLL_FRAC_DIV_START_LOW_1 + offset),
reg->decimal_div_start, reg->frac_div_start_low);
upper_addr |= (upper_8_bit(PLL_DECIMAL_DIV_START_1 + offset) << 4);
upper_addr |= (upper_8_bit(PLL_FRAC_DIV_START_LOW_1 + offset) << 5);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL3,
(PLL_FRAC_DIV_START_MID_1 + offset),
(PLL_FRAC_DIV_START_HIGH_1 + offset),
reg->frac_div_start_mid, reg->frac_div_start_high);
upper_addr |= (upper_8_bit(PLL_FRAC_DIV_START_MID_1 + offset) << 6);
upper_addr |= (upper_8_bit(PLL_FRAC_DIV_START_HIGH_1 + offset) << 7);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL4,
(PLL_SYSTEM_MUXES + offset),
(PLL_PLL_LOCKDET_RATE_1 + offset),
0xc0, 0x40);
upper_addr |= (upper_8_bit(PLL_SYSTEM_MUXES + offset) << 8);
upper_addr |= (upper_8_bit(PLL_PLL_LOCKDET_RATE_1 + offset) << 9);
data = MDSS_PLL_REG_R(rsc->pll_base, PLL_PLL_OUTDIV_RATE) & 0x03;
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL5,
(PLL_PLL_OUTDIV_RATE + offset),
(PLL_PLL_LOCK_DELAY + offset),
data, 0x06);
upper_addr |= (upper_8_bit(PLL_PLL_OUTDIV_RATE + offset) << 10);
upper_addr |= (upper_8_bit(PLL_PLL_LOCK_DELAY + offset) << 11);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL6,
(PLL_CMODE_1 + offset),
(PLL_CLOCK_INVERTERS_1 + offset),
0x10, reg->pll_clock_inverters);
upper_addr |=
(upper_8_bit(PLL_CMODE_1 + offset) << 12);
upper_addr |= (upper_8_bit(PLL_CLOCK_INVERTERS_1 + offset) << 13);
data = MDSS_PLL_REG_R(rsc->pll_base, PLL_VCO_CONFIG_1);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL7,
(PLL_ANALOG_CONTROLS_FIVE_1 + offset),
(PLL_VCO_CONFIG_1 + offset),
0x01, data);
upper_addr |= (upper_8_bit(PLL_ANALOG_CONTROLS_FIVE_1 + offset) << 14);
upper_addr |= (upper_8_bit(PLL_VCO_CONFIG_1 + offset) << 15);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL8,
(PLL_ANALOG_CONTROLS_FIVE + offset),
(PLL_DSM_DIVIDER + offset), 0x01, 0);
upper_addr |= (upper_8_bit(PLL_ANALOG_CONTROLS_FIVE + offset) << 16);
upper_addr |= (upper_8_bit(PLL_DSM_DIVIDER + offset) << 17);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL9,
(PLL_FEEDBACK_DIVIDER + offset),
(PLL_CALIBRATION_SETTINGS + offset), 0x4E, 0x40);
upper_addr |= (upper_8_bit(PLL_FEEDBACK_DIVIDER + offset) << 18);
upper_addr |= (upper_8_bit(PLL_CALIBRATION_SETTINGS + offset) << 19);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL10,
(PLL_BAND_SEL_CAL_SETTINGS_THREE + offset),
(PLL_FREQ_DETECT_SETTINGS_ONE + offset), 0xBA, 0x0C);
upper_addr |= (upper_8_bit(PLL_BAND_SEL_CAL_SETTINGS_THREE + offset)
<< 20);
upper_addr |= (upper_8_bit(PLL_FREQ_DETECT_SETTINGS_ONE + offset)
<< 21);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL11,
(PLL_OUTDIV + offset),
(PLL_CORE_OVERRIDE + offset), 0, 0);
upper_addr |= (upper_8_bit(PLL_OUTDIV + offset) << 22);
upper_addr |= (upper_8_bit(PLL_CORE_OVERRIDE + offset) << 23);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL12,
(PLL_PLL_DIGITAL_TIMERS_TWO + offset),
(PLL_PLL_PROP_GAIN_RATE_1 + offset),
0x08, reg->pll_prop_gain_rate);
upper_addr |= (upper_8_bit(PLL_PLL_DIGITAL_TIMERS_TWO + offset) << 24);
upper_addr |= (upper_8_bit(PLL_PLL_PROP_GAIN_RATE_1 + offset) << 25);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL13,
(PLL_PLL_BAND_SEL_RATE_1 + offset),
(PLL_PLL_INT_GAIN_IFILT_BAND_1 + offset),
0xC0, 0x82);
upper_addr |= (upper_8_bit(PLL_PLL_BAND_SEL_RATE_1 + offset) << 26);
upper_addr |= (upper_8_bit(PLL_PLL_INT_GAIN_IFILT_BAND_1 + offset)
<< 27);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL14,
(PLL_PLL_FL_INT_GAIN_PFILT_BAND_1 + offset),
(PLL_PLL_LOCK_OVERRIDE + offset),
0x4c, 0x80);
upper_addr |= (upper_8_bit(PLL_PLL_FL_INT_GAIN_PFILT_BAND_1 + offset)
<< 28);
upper_addr |= (upper_8_bit(PLL_PLL_LOCK_OVERRIDE + offset) << 29);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL15,
(PLL_PFILT + offset),
(PLL_IFILT + offset),
0x2f, 0x3f);
upper_addr |= (upper_8_bit(PLL_PFILT + offset) << 30);
upper_addr |= (upper_8_bit(PLL_IFILT + offset) << 31);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL16,
(PLL_FREQ_TUNE_ACCUM_INIT_HIGH + offset),
(PLL_FREQ_TUNE_ACCUM_INIT_MID + offset),
rsc->cache_pll_trim_codes[0], rsc->cache_pll_trim_codes[1] );
upper_addr2 |= (upper_8_bit(PLL_FREQ_TUNE_ACCUM_INIT_HIGH + offset) << 0);
upper_addr2 |= (upper_8_bit(PLL_FREQ_TUNE_ACCUM_INIT_MID + offset) << 1);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL17,
(PLL_PLL_BAND_SEL_RATE_1 + offset),
( PLL_PLL_BAND_SEL_RATE_1+ offset),
rsc->cache_pll_trim_codes[2], rsc->cache_pll_trim_codes[2]);
upper_addr2 |= (upper_8_bit(PLL_PLL_BAND_SEL_RATE_1 + offset) << 0);
upper_addr2 |= (upper_8_bit(PLL_PLL_BAND_SEL_RATE_1 + offset) << 1);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL18,
(PLL_SYSTEM_MUXES + offset),
(PLL_CALIBRATION_SETTINGS + offset),
0xc0, 0x40);
upper_addr2 |= (upper_8_bit(PLL_BAND_SEL_CAL + offset) << 2);
upper_addr2 |= (upper_8_bit(PLL_CALIBRATION_SETTINGS + offset) << 3);
data = MDSS_PLL_REG_R(rsc->phy_base, PHY_CMN_CLK_CFG0);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL27,
PHY_CMN_CTRL_2, PHY_CMN_CLK_CFG0, 0x40, data);
if (rsc->slave)
MDSS_DYN_PLL_REG_W(rsc->slave->dyn_pll_base,
DSI_DYNAMIC_REFRESH_PLL_CTRL10,
PHY_CMN_CLK_CFG0, PHY_CMN_CTRL_0,
data, 0x7f);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL28,
PHY_CMN_PLL_CNTRL, PHY_CMN_PLL_CNTRL, 0x01, 0x01);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL29,
PHY_CMN_PLL_CNTRL, PHY_CMN_PLL_CNTRL, 0x01, 0x01);
data = MDSS_PLL_REG_R(rsc->phy_base, PHY_CMN_CLK_CFG1) | BIT(5);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL30,
PHY_CMN_CLK_CFG1, PHY_CMN_RBUF_CTRL, data, 0x01);
MDSS_DYN_PLL_REG_W(rsc->dyn_pll_base, DSI_DYNAMIC_REFRESH_PLL_CTRL31,
PHY_CMN_CLK_CFG1, PHY_CMN_CLK_CFG1, data, data);
if (rsc->slave) {
data = MDSS_PLL_REG_R(rsc->slave->phy_base, PHY_CMN_CLK_CFG1) |
BIT(5);
MDSS_DYN_PLL_REG_W(rsc->slave->dyn_pll_base,
DSI_DYNAMIC_REFRESH_PLL_CTRL30,
PHY_CMN_CLK_CFG1, PHY_CMN_RBUF_CTRL,
data, 0x01);
MDSS_DYN_PLL_REG_W(rsc->slave->dyn_pll_base,
DSI_DYNAMIC_REFRESH_PLL_CTRL31,
PHY_CMN_CLK_CFG1, PHY_CMN_CLK_CFG1,
data, data);
}
MDSS_PLL_REG_W(rsc->dyn_pll_base,
DSI_DYNAMIC_REFRESH_PLL_UPPER_ADDR, upper_addr);
MDSS_PLL_REG_W(rsc->dyn_pll_base,
DSI_DYNAMIC_REFRESH_PLL_UPPER_ADDR2, upper_addr2);
wmb(); /* commit register writes */
}
static int shadow_vco_7nm_set_rate(struct clk_hw *hw, unsigned long rate,
unsigned long parent_rate)
{
int rc;
struct dsi_pll_7nm *pll;
struct dsi_pll_vco_clk *vco = to_vco_clk_hw(hw);
struct mdss_pll_resources *rsc = vco->priv;
if (!rsc) {
pr_err("pll resource not found\n");
return -EINVAL;
}
pll = rsc->priv;
if (!pll) {
pr_err("pll configuration not found\n");
return -EINVAL;
}
rc = dsi_pll_read_stored_trim_codes(rsc, rate);
if (rc) {
pr_err("cannot find pll codes rate=%ld\n", rate);
return -EINVAL;
}
pr_debug("ndx=%d, rate=%lu\n", rsc->index, rate);
rc = mdss_pll_resource_enable(rsc, true);
if (rc) {
pr_err("failed to enable mdss dsi pll(%d), rc=%d\n",
rsc->index, rc);
return rc;
}
rsc->vco_current_rate = rate;
rsc->vco_ref_clk_rate = vco->ref_clk_rate;
dsi_pll_setup_config(pll, rsc);
dsi_pll_calc_dec_frac(pll, rsc);
/* program dynamic refresh control registers */
shadow_dsi_pll_dynamic_refresh_7nm(pll, rsc);
/* update cached vco rate */
rsc->vco_cached_rate = rate;
rsc->dfps_trigger = true;
mdss_pll_resource_enable(rsc, false);
return 0;
}
static int dsi_pll_7nm_lock_status(struct mdss_pll_resources *pll) static int dsi_pll_7nm_lock_status(struct mdss_pll_resources *pll)
{ {
int rc; int rc;
@@ -1002,7 +1325,7 @@ static int dsi_pll_enable(struct dsi_pll_vco_clk *vco)
phy_reg_update_bits_sub(rsc, PHY_CMN_CLK_CFG1, 0x03, rsc->cached_cfg1); phy_reg_update_bits_sub(rsc, PHY_CMN_CLK_CFG1, 0x03, rsc->cached_cfg1);
if (rsc->slave) if (rsc->slave)
phy_reg_update_bits_sub(rsc->slave, PHY_CMN_CLK_CFG1, phy_reg_update_bits_sub(rsc->slave, PHY_CMN_CLK_CFG1,
0x03, rsc->cached_cfg1); 0x03, rsc->slave->cached_cfg1);
wmb(); /* ensure dsiclk_sel is always programmed before pll start */ wmb(); /* ensure dsiclk_sel is always programmed before pll start */
/* Start PLL */ /* Start PLL */
@@ -1057,6 +1380,7 @@ static void dsi_pll_disable(struct dsi_pll_vco_clk *vco)
} }
rsc->handoff_resources = false; rsc->handoff_resources = false;
rsc->dfps_trigger = false;
pr_debug("stop PLL (%d)\n", rsc->index); pr_debug("stop PLL (%d)\n", rsc->index);
@@ -1108,8 +1432,14 @@ static void vco_7nm_unprepare(struct clk_hw *hw)
/* /*
* During unprepare in continuous splash use case we want driver * During unprepare in continuous splash use case we want driver
* to pick all dividers instead of retaining bootloader configurations. * to pick all dividers instead of retaining bootloader configurations.
* Also handle the usecases when dynamic refresh gets triggered while
* handoff_resources flag is still set. For video mode, this flag does
* not get cleared until first suspend. Whereas for command mode, it
* doesnt get cleared until first idle power collapse. We need to make
* sure that we save and restore the divider settings when dynamic FPS
* is triggered.
*/ */
if (!pll->handoff_resources) { if (!pll->handoff_resources || pll->dfps_trigger) {
pll->cached_cfg0 = MDSS_PLL_REG_R(pll->phy_base, pll->cached_cfg0 = MDSS_PLL_REG_R(pll->phy_base,
PHY_CMN_CLK_CFG0); PHY_CMN_CLK_CFG0);
pll->cached_outdiv = MDSS_PLL_REG_R(pll->pll_base, pll->cached_outdiv = MDSS_PLL_REG_R(pll->pll_base,
@@ -1117,7 +1447,7 @@ static void vco_7nm_unprepare(struct clk_hw *hw)
pr_debug("cfg0=%d,cfg1=%d, outdiv=%d\n", pll->cached_cfg0, pr_debug("cfg0=%d,cfg1=%d, outdiv=%d\n", pll->cached_cfg0,
pll->cached_cfg1, pll->cached_outdiv); pll->cached_cfg1, pll->cached_outdiv);
pll->vco_cached_rate = clk_hw_get_rate(hw); pll->vco_cached_rate = clk_get_rate(hw->clk);
} }
/* /*
@@ -1127,9 +1457,14 @@ static void vco_7nm_unprepare(struct clk_hw *hw)
* does not change.For such usecases, we need to ensure that the cached * does not change.For such usecases, we need to ensure that the cached
* value is programmed prior to PLL being locked * value is programmed prior to PLL being locked
*/ */
if (pll->handoff_resources) if (pll->handoff_resources) {
pll->cached_cfg1 = MDSS_PLL_REG_R(pll->phy_base, pll->cached_cfg1 = MDSS_PLL_REG_R(pll->phy_base,
PHY_CMN_CLK_CFG1); PHY_CMN_CLK_CFG1);
if (pll->slave)
pll->slave->cached_cfg1 =
MDSS_PLL_REG_R(pll->slave->phy_base,
PHY_CMN_CLK_CFG1);
}
dsi_pll_disable(vco); dsi_pll_disable(vco);
mdss_pll_resource_enable(pll, false); mdss_pll_resource_enable(pll, false);
@@ -1171,6 +1506,9 @@ static int vco_7nm_prepare(struct clk_hw *hw)
pll->cached_cfg1); pll->cached_cfg1);
MDSS_PLL_REG_W(pll->phy_base, PHY_CMN_CLK_CFG0, MDSS_PLL_REG_W(pll->phy_base, PHY_CMN_CLK_CFG0,
pll->cached_cfg0); pll->cached_cfg0);
if (pll->slave)
MDSS_PLL_REG_W(pll->slave->phy_base, PHY_CMN_CLK_CFG0,
pll->cached_cfg0);
MDSS_PLL_REG_W(pll->pll_base, PLL_PLL_OUTDIV_RATE, MDSS_PLL_REG_W(pll->pll_base, PLL_PLL_OUTDIV_RATE,
pll->cached_outdiv); pll->cached_outdiv);
} }
@@ -1253,6 +1591,14 @@ static void pixel_clk_set_div_sub(struct mdss_pll_resources *pll, int div)
reg_val &= ~0xF0; reg_val &= ~0xF0;
reg_val |= (div << 4); reg_val |= (div << 4);
MDSS_PLL_REG_W(pll->phy_base, PHY_CMN_CLK_CFG0, reg_val); MDSS_PLL_REG_W(pll->phy_base, PHY_CMN_CLK_CFG0, reg_val);
/*
* cache the current parent index for cases where parent
* is not changing but rate is changing. In that case
* clock framework won't call parent_set and hence dsiclk_sel
* bit won't be programmed. e.g. dfps update use case.
*/
pll->cached_cfg0 = reg_val;
} }
static int pixel_clk_set_div(void *context, unsigned int reg, unsigned int div) static int pixel_clk_set_div(void *context, unsigned int reg, unsigned int div)
@@ -1372,6 +1718,12 @@ static const struct clk_ops clk_ops_vco_7nm = {
.unprepare = vco_7nm_unprepare, .unprepare = vco_7nm_unprepare,
}; };
static const struct clk_ops clk_ops_shadow_vco_7nm = {
.recalc_rate = vco_7nm_recalc_rate,
.set_rate = shadow_vco_7nm_set_rate,
.round_rate = vco_7nm_round_rate,
};
static struct regmap_bus mdss_mux_regmap_bus = { static struct regmap_bus mdss_mux_regmap_bus = {
.reg_write = mdss_set_mux_sel, .reg_write = mdss_set_mux_sel,
.reg_read = mdss_get_mux_sel, .reg_read = mdss_get_mux_sel,
@@ -1445,6 +1797,18 @@ static struct dsi_pll_vco_clk dsi0pll_vco_clk = {
}, },
}; };
static struct dsi_pll_vco_clk dsi0pll_shadow_vco_clk = {
.ref_clk_rate = 19200000UL,
.min_rate = 1000000000UL,
.max_rate = 3500000000UL,
.hw.init = &(struct clk_init_data){
.name = "dsi0pll_shadow_vco_clk",
.parent_names = (const char *[]){"bi_tcxo"},
.num_parents = 1,
.ops = &clk_ops_shadow_vco_7nm,
},
};
static struct dsi_pll_vco_clk dsi1pll_vco_clk = { static struct dsi_pll_vco_clk dsi1pll_vco_clk = {
.ref_clk_rate = 19200000UL, .ref_clk_rate = 19200000UL,
.min_rate = 1000000000UL, .min_rate = 1000000000UL,
@@ -1457,6 +1821,18 @@ static struct dsi_pll_vco_clk dsi1pll_vco_clk = {
}, },
}; };
static struct dsi_pll_vco_clk dsi1pll_shadow_vco_clk = {
.ref_clk_rate = 19200000UL,
.min_rate = 1000000000UL,
.max_rate = 3500000000UL,
.hw.init = &(struct clk_init_data){
.name = "dsi1pll_shadow_vco_clk",
.parent_names = (const char *[]){"bi_tcxo"},
.num_parents = 1,
.ops = &clk_ops_shadow_vco_7nm,
},
};
static struct clk_regmap_div dsi0pll_pll_out_div = { static struct clk_regmap_div dsi0pll_pll_out_div = {
.reg = PLL_PLL_OUTDIV_RATE, .reg = PLL_PLL_OUTDIV_RATE,
.shift = 0, .shift = 0,
@@ -1473,6 +1849,23 @@ static struct clk_regmap_div dsi0pll_pll_out_div = {
}, },
}; };
static struct clk_regmap_div dsi0pll_shadow_pll_out_div = {
.reg = PLL_PLL_OUTDIV_RATE,
.shift = 0,
.width = 2,
.flags = CLK_DIVIDER_POWER_OF_TWO,
.clkr = {
.hw.init = &(struct clk_init_data){
.name = "dsi0pll_shadow_pll_out_div",
.parent_names = (const char *[]){
"dsi0pll_shadow_vco_clk"},
.num_parents = 1,
.flags = CLK_SET_RATE_PARENT,
.ops = &clk_regmap_div_ops,
},
},
};
static struct clk_regmap_div dsi1pll_pll_out_div = { static struct clk_regmap_div dsi1pll_pll_out_div = {
.reg = PLL_PLL_OUTDIV_RATE, .reg = PLL_PLL_OUTDIV_RATE,
.shift = 0, .shift = 0,
@@ -1489,6 +1882,23 @@ static struct clk_regmap_div dsi1pll_pll_out_div = {
}, },
}; };
static struct clk_regmap_div dsi1pll_shadow_pll_out_div = {
.reg = PLL_PLL_OUTDIV_RATE,
.shift = 0,
.width = 2,
.flags = CLK_DIVIDER_POWER_OF_TWO,
.clkr = {
.hw.init = &(struct clk_init_data){
.name = "dsi1pll_shadow_pll_out_div",
.parent_names = (const char *[]){
"dsi1pll_shadow_vco_clk"},
.num_parents = 1,
.flags = CLK_SET_RATE_PARENT,
.ops = &clk_regmap_div_ops,
},
},
};
static struct clk_regmap_div dsi0pll_bitclk_src = { static struct clk_regmap_div dsi0pll_bitclk_src = {
.shift = 0, .shift = 0,
.width = 4, .width = 4,
@@ -1504,6 +1914,22 @@ static struct clk_regmap_div dsi0pll_bitclk_src = {
}, },
}; };
static struct clk_regmap_div dsi0pll_shadow_bitclk_src = {
.shift = 0,
.width = 4,
.flags = CLK_DIVIDER_ONE_BASED | CLK_DIVIDER_ALLOW_ZERO,
.clkr = {
.hw.init = &(struct clk_init_data){
.name = "dsi0pll_shadow_bitclk_src",
.parent_names = (const char *[]){
"dsi0pll_shadow_pll_out_div"},
.num_parents = 1,
.flags = CLK_SET_RATE_PARENT,
.ops = &clk_regmap_div_ops,
},
},
};
static struct clk_regmap_div dsi1pll_bitclk_src = { static struct clk_regmap_div dsi1pll_bitclk_src = {
.shift = 0, .shift = 0,
.width = 4, .width = 4,
@@ -1519,6 +1945,22 @@ static struct clk_regmap_div dsi1pll_bitclk_src = {
}, },
}; };
static struct clk_regmap_div dsi1pll_shadow_bitclk_src = {
.shift = 0,
.width = 4,
.flags = CLK_DIVIDER_ONE_BASED | CLK_DIVIDER_ALLOW_ZERO,
.clkr = {
.hw.init = &(struct clk_init_data){
.name = "dsi1pll_shadow_bitclk_src",
.parent_names = (const char *[]){
"dsi1pll_shadow_pll_out_div"},
.num_parents = 1,
.flags = CLK_SET_RATE_PARENT,
.ops = &clk_regmap_div_ops,
},
},
};
static struct clk_fixed_factor dsi0pll_post_vco_div = { static struct clk_fixed_factor dsi0pll_post_vco_div = {
.div = 4, .div = 4,
.mult = 1, .mult = 1,
@@ -1526,7 +1968,17 @@ static struct clk_fixed_factor dsi0pll_post_vco_div = {
.name = "dsi0pll_post_vco_div", .name = "dsi0pll_post_vco_div",
.parent_names = (const char *[]){"dsi0pll_pll_out_div"}, .parent_names = (const char *[]){"dsi0pll_pll_out_div"},
.num_parents = 1, .num_parents = 1,
.flags = CLK_SET_RATE_PARENT, .ops = &clk_fixed_factor_ops,
},
};
static struct clk_fixed_factor dsi0pll_shadow_post_vco_div = {
.div = 4,
.mult = 1,
.hw.init = &(struct clk_init_data){
.name = "dsi0pll_shadow_post_vco_div",
.parent_names = (const char *[]){"dsi0pll_shadow_pll_out_div"},
.num_parents = 1,
.ops = &clk_fixed_factor_ops, .ops = &clk_fixed_factor_ops,
}, },
}; };
@@ -1538,7 +1990,17 @@ static struct clk_fixed_factor dsi1pll_post_vco_div = {
.name = "dsi1pll_post_vco_div", .name = "dsi1pll_post_vco_div",
.parent_names = (const char *[]){"dsi1pll_pll_out_div"}, .parent_names = (const char *[]){"dsi1pll_pll_out_div"},
.num_parents = 1, .num_parents = 1,
.flags = CLK_SET_RATE_PARENT, .ops = &clk_fixed_factor_ops,
},
};
static struct clk_fixed_factor dsi1pll_shadow_post_vco_div = {
.div = 4,
.mult = 1,
.hw.init = &(struct clk_init_data){
.name = "dsi1pll_shadow_post_vco_div",
.parent_names = (const char *[]){"dsi1pll_shadow_pll_out_div"},
.num_parents = 1,
.ops = &clk_fixed_factor_ops, .ops = &clk_fixed_factor_ops,
}, },
}; };
@@ -1555,6 +2017,18 @@ static struct clk_fixed_factor dsi0pll_byteclk_src = {
}, },
}; };
static struct clk_fixed_factor dsi0pll_shadow_byteclk_src = {
.div = 8,
.mult = 1,
.hw.init = &(struct clk_init_data){
.name = "dsi0pll_shadow_byteclk_src",
.parent_names = (const char *[]){"dsi0pll_shadow_bitclk_src"},
.num_parents = 1,
.flags = CLK_SET_RATE_PARENT,
.ops = &clk_fixed_factor_ops,
},
};
static struct clk_fixed_factor dsi1pll_byteclk_src = { static struct clk_fixed_factor dsi1pll_byteclk_src = {
.div = 8, .div = 8,
.mult = 1, .mult = 1,
@@ -1567,6 +2041,18 @@ static struct clk_fixed_factor dsi1pll_byteclk_src = {
}, },
}; };
static struct clk_fixed_factor dsi1pll_shadow_byteclk_src = {
.div = 8,
.mult = 1,
.hw.init = &(struct clk_init_data){
.name = "dsi1pll_shadow_byteclk_src",
.parent_names = (const char *[]){"dsi1pll_shadow_bitclk_src"},
.num_parents = 1,
.flags = CLK_SET_RATE_PARENT,
.ops = &clk_fixed_factor_ops,
},
};
static struct clk_fixed_factor dsi0pll_post_bit_div = { static struct clk_fixed_factor dsi0pll_post_bit_div = {
.div = 2, .div = 2,
.mult = 1, .mult = 1,
@@ -1578,6 +2064,17 @@ static struct clk_fixed_factor dsi0pll_post_bit_div = {
}, },
}; };
static struct clk_fixed_factor dsi0pll_shadow_post_bit_div = {
.div = 2,
.mult = 1,
.hw.init = &(struct clk_init_data){
.name = "dsi0pll_shadow_post_bit_div",
.parent_names = (const char *[]){"dsi0pll_shadow_bitclk_src"},
.num_parents = 1,
.ops = &clk_fixed_factor_ops,
},
};
static struct clk_fixed_factor dsi1pll_post_bit_div = { static struct clk_fixed_factor dsi1pll_post_bit_div = {
.div = 2, .div = 2,
.mult = 1, .mult = 1,
@@ -1589,15 +2086,28 @@ static struct clk_fixed_factor dsi1pll_post_bit_div = {
}, },
}; };
static struct clk_fixed_factor dsi1pll_shadow_post_bit_div = {
.div = 2,
.mult = 1,
.hw.init = &(struct clk_init_data){
.name = "dsi1pll_shadow_post_bit_div",
.parent_names = (const char *[]){"dsi1pll_shadow_bitclk_src"},
.num_parents = 1,
.ops = &clk_fixed_factor_ops,
},
};
static struct clk_regmap_mux dsi0pll_byteclk_mux = { static struct clk_regmap_mux dsi0pll_byteclk_mux = {
.shift = 0, .shift = 0,
.width = 1, .width = 1,
.clkr = { .clkr = {
.hw.init = &(struct clk_init_data){ .hw.init = &(struct clk_init_data){
.name = "dsi0_phy_pll_out_byteclk", .name = "dsi0_phy_pll_out_byteclk",
.parent_names = (const char *[]){"dsi0pll_byteclk_src"}, .parent_names = (const char *[]){"dsi0pll_byteclk_src",
.num_parents = 1, "dsi0pll_shadow_byteclk_src"},
.flags = CLK_SET_RATE_PARENT, .num_parents = 2,
.flags = (CLK_SET_RATE_PARENT |
CLK_SET_RATE_NO_REPARENT),
.ops = &clk_regmap_mux_closest_ops, .ops = &clk_regmap_mux_closest_ops,
}, },
}, },
@@ -1609,9 +2119,11 @@ static struct clk_regmap_mux dsi1pll_byteclk_mux = {
.clkr = { .clkr = {
.hw.init = &(struct clk_init_data){ .hw.init = &(struct clk_init_data){
.name = "dsi1_phy_pll_out_byteclk", .name = "dsi1_phy_pll_out_byteclk",
.parent_names = (const char *[]){"dsi1pll_byteclk_src"}, .parent_names = (const char *[]){"dsi1pll_byteclk_src",
.num_parents = 1, "dsi1pll_shadow_byteclk_src"},
.flags = CLK_SET_RATE_PARENT, .num_parents = 2,
.flags = (CLK_SET_RATE_PARENT |
CLK_SET_RATE_NO_REPARENT),
.ops = &clk_regmap_mux_closest_ops, .ops = &clk_regmap_mux_closest_ops,
}, },
}, },
@@ -1634,6 +2146,24 @@ static struct clk_regmap_mux dsi0pll_pclk_src_mux = {
}, },
}; };
static struct clk_regmap_mux dsi0pll_shadow_pclk_src_mux = {
.reg = PHY_CMN_CLK_CFG1,
.shift = 0,
.width = 2,
.clkr = {
.hw.init = &(struct clk_init_data){
.name = "dsi0pll_shadow_pclk_src_mux",
.parent_names = (const char *[]){
"dsi0pll_shadow_bitclk_src",
"dsi0pll_shadow_post_bit_div",
"dsi0pll_shadow_pll_out_div",
"dsi0pll_shadow_post_vco_div"},
.num_parents = 4,
.ops = &clk_regmap_mux_closest_ops,
},
},
};
static struct clk_regmap_mux dsi1pll_pclk_src_mux = { static struct clk_regmap_mux dsi1pll_pclk_src_mux = {
.reg = PHY_CMN_CLK_CFG1, .reg = PHY_CMN_CLK_CFG1,
.shift = 0, .shift = 0,
@@ -1651,6 +2181,24 @@ static struct clk_regmap_mux dsi1pll_pclk_src_mux = {
}, },
}; };
static struct clk_regmap_mux dsi1pll_shadow_pclk_src_mux = {
.reg = PHY_CMN_CLK_CFG1,
.shift = 0,
.width = 2,
.clkr = {
.hw.init = &(struct clk_init_data){
.name = "dsi1pll_shadow_pclk_src_mux",
.parent_names = (const char *[]){
"dsi1pll_shadow_bitclk_src",
"dsi1pll_shadow_post_bit_div",
"dsi1pll_shadow_pll_out_div",
"dsi1pll_shadow_post_vco_div"},
.num_parents = 4,
.ops = &clk_regmap_mux_closest_ops,
},
},
};
static struct clk_regmap_div dsi0pll_pclk_src = { static struct clk_regmap_div dsi0pll_pclk_src = {
.shift = 0, .shift = 0,
.width = 4, .width = 4,
@@ -1667,6 +2215,22 @@ static struct clk_regmap_div dsi0pll_pclk_src = {
}, },
}; };
static struct clk_regmap_div dsi0pll_shadow_pclk_src = {
.shift = 0,
.width = 4,
.flags = CLK_DIVIDER_ONE_BASED | CLK_DIVIDER_ALLOW_ZERO,
.clkr = {
.hw.init = &(struct clk_init_data){
.name = "dsi0pll_shadow_pclk_src",
.parent_names = (const char *[]){
"dsi0pll_shadow_pclk_src_mux"},
.num_parents = 1,
.flags = CLK_SET_RATE_PARENT,
.ops = &clk_regmap_div_ops,
},
},
};
static struct clk_regmap_div dsi1pll_pclk_src = { static struct clk_regmap_div dsi1pll_pclk_src = {
.shift = 0, .shift = 0,
.width = 4, .width = 4,
@@ -1683,15 +2247,33 @@ static struct clk_regmap_div dsi1pll_pclk_src = {
}, },
}; };
static struct clk_regmap_div dsi1pll_shadow_pclk_src = {
.shift = 0,
.width = 4,
.flags = CLK_DIVIDER_ONE_BASED | CLK_DIVIDER_ALLOW_ZERO,
.clkr = {
.hw.init = &(struct clk_init_data){
.name = "dsi1pll_shadow_pclk_src",
.parent_names = (const char *[]){
"dsi1pll_shadow_pclk_src_mux"},
.num_parents = 1,
.flags = CLK_SET_RATE_PARENT,
.ops = &clk_regmap_div_ops,
},
},
};
static struct clk_regmap_mux dsi0pll_pclk_mux = { static struct clk_regmap_mux dsi0pll_pclk_mux = {
.shift = 0, .shift = 0,
.width = 1, .width = 1,
.clkr = { .clkr = {
.hw.init = &(struct clk_init_data){ .hw.init = &(struct clk_init_data){
.name = "dsi0_phy_pll_out_dsiclk", .name = "dsi0_phy_pll_out_dsiclk",
.parent_names = (const char *[]){"dsi0pll_pclk_src"}, .parent_names = (const char *[]){"dsi0pll_pclk_src",
.num_parents = 1, "dsi0pll_shadow_pclk_src"},
.flags = CLK_SET_RATE_PARENT, .num_parents = 2,
.flags = (CLK_SET_RATE_PARENT |
CLK_SET_RATE_NO_REPARENT),
.ops = &clk_regmap_mux_closest_ops, .ops = &clk_regmap_mux_closest_ops,
}, },
}, },
@@ -1703,9 +2285,11 @@ static struct clk_regmap_mux dsi1pll_pclk_mux = {
.clkr = { .clkr = {
.hw.init = &(struct clk_init_data){ .hw.init = &(struct clk_init_data){
.name = "dsi1_phy_pll_out_dsiclk", .name = "dsi1_phy_pll_out_dsiclk",
.parent_names = (const char *[]){"dsi1pll_pclk_src"}, .parent_names = (const char *[]){"dsi1pll_pclk_src",
.num_parents = 1, "dsi1pll_shadow_pclk_src"},
.flags = CLK_SET_RATE_PARENT, .num_parents = 2,
.flags = (CLK_SET_RATE_PARENT |
CLK_SET_RATE_NO_REPARENT),
.ops = &clk_regmap_mux_closest_ops, .ops = &clk_regmap_mux_closest_ops,
}, },
}, },
@@ -1722,6 +2306,14 @@ static struct clk_hw *mdss_dsi_pllcc_7nm[] = {
[PCLK_SRC_MUX_0_CLK] = &dsi0pll_pclk_src_mux.clkr.hw, [PCLK_SRC_MUX_0_CLK] = &dsi0pll_pclk_src_mux.clkr.hw,
[PCLK_SRC_0_CLK] = &dsi0pll_pclk_src.clkr.hw, [PCLK_SRC_0_CLK] = &dsi0pll_pclk_src.clkr.hw,
[PCLK_MUX_0_CLK] = &dsi0pll_pclk_mux.clkr.hw, [PCLK_MUX_0_CLK] = &dsi0pll_pclk_mux.clkr.hw,
[SHADOW_VCO_CLK_0] = &dsi0pll_shadow_vco_clk.hw,
[SHADOW_PLL_OUT_DIV_0_CLK] = &dsi0pll_shadow_pll_out_div.clkr.hw,
[SHADOW_BITCLK_SRC_0_CLK] = &dsi0pll_shadow_bitclk_src.clkr.hw,
[SHADOW_BYTECLK_SRC_0_CLK] = &dsi0pll_shadow_byteclk_src.hw,
[SHADOW_POST_BIT_DIV_0_CLK] = &dsi0pll_shadow_post_bit_div.hw,
[SHADOW_POST_VCO_DIV_0_CLK] = &dsi0pll_shadow_post_vco_div.hw,
[SHADOW_PCLK_SRC_MUX_0_CLK] = &dsi0pll_shadow_pclk_src_mux.clkr.hw,
[SHADOW_PCLK_SRC_0_CLK] = &dsi0pll_shadow_pclk_src.clkr.hw,
[VCO_CLK_1] = &dsi1pll_vco_clk.hw, [VCO_CLK_1] = &dsi1pll_vco_clk.hw,
[PLL_OUT_DIV_1_CLK] = &dsi1pll_pll_out_div.clkr.hw, [PLL_OUT_DIV_1_CLK] = &dsi1pll_pll_out_div.clkr.hw,
[BITCLK_SRC_1_CLK] = &dsi1pll_bitclk_src.clkr.hw, [BITCLK_SRC_1_CLK] = &dsi1pll_bitclk_src.clkr.hw,
@@ -1732,6 +2324,14 @@ static struct clk_hw *mdss_dsi_pllcc_7nm[] = {
[PCLK_SRC_MUX_1_CLK] = &dsi1pll_pclk_src_mux.clkr.hw, [PCLK_SRC_MUX_1_CLK] = &dsi1pll_pclk_src_mux.clkr.hw,
[PCLK_SRC_1_CLK] = &dsi1pll_pclk_src.clkr.hw, [PCLK_SRC_1_CLK] = &dsi1pll_pclk_src.clkr.hw,
[PCLK_MUX_1_CLK] = &dsi1pll_pclk_mux.clkr.hw, [PCLK_MUX_1_CLK] = &dsi1pll_pclk_mux.clkr.hw,
[SHADOW_VCO_CLK_1] = &dsi1pll_shadow_vco_clk.hw,
[SHADOW_PLL_OUT_DIV_1_CLK] = &dsi1pll_shadow_pll_out_div.clkr.hw,
[SHADOW_BITCLK_SRC_1_CLK] = &dsi1pll_shadow_bitclk_src.clkr.hw,
[SHADOW_BYTECLK_SRC_1_CLK] = &dsi1pll_shadow_byteclk_src.hw,
[SHADOW_POST_BIT_DIV_1_CLK] = &dsi1pll_shadow_post_bit_div.hw,
[SHADOW_POST_VCO_DIV_1_CLK] = &dsi1pll_shadow_post_vco_div.hw,
[SHADOW_PCLK_SRC_MUX_1_CLK] = &dsi1pll_shadow_pclk_src_mux.clkr.hw,
[SHADOW_PCLK_SRC_1_CLK] = &dsi1pll_shadow_pclk_src.clkr.hw,
}; };
int dsi_pll_clock_register_7nm(struct platform_device *pdev, int dsi_pll_clock_register_7nm(struct platform_device *pdev,
@@ -1743,6 +2343,12 @@ int dsi_pll_clock_register_7nm(struct platform_device *pdev,
int num_clks = ARRAY_SIZE(mdss_dsi_pllcc_7nm); int num_clks = ARRAY_SIZE(mdss_dsi_pllcc_7nm);
struct regmap *rmap; struct regmap *rmap;
if (!pdev || !pdev->dev.of_node ||
!pll_res || !pll_res->pll_base || !pll_res->phy_base) {
pr_err("Invalid params\n");
return -EINVAL;
}
ndx = pll_res->index; ndx = pll_res->index;
if (ndx >= DSI_PLL_MAX) { if (ndx >= DSI_PLL_MAX) {
@@ -1755,12 +2361,13 @@ int dsi_pll_clock_register_7nm(struct platform_device *pdev,
pll_res->priv = &plls[ndx]; pll_res->priv = &plls[ndx];
pll_res->vco_delay = VCO_DELAY_USEC; pll_res->vco_delay = VCO_DELAY_USEC;
clk_data = devm_kzalloc(&pdev->dev, sizeof(*clk_data), GFP_KERNEL); clk_data = devm_kzalloc(&pdev->dev, sizeof(struct clk_onecell_data),
GFP_KERNEL);
if (!clk_data) if (!clk_data)
return -ENOMEM; return -ENOMEM;
clk_data->clks = devm_kcalloc(&pdev->dev, num_clks, clk_data->clks = devm_kzalloc(&pdev->dev, (num_clks *
sizeof(struct clk *), GFP_KERNEL); sizeof(struct clk *)), GFP_KERNEL);
if (!clk_data->clks) if (!clk_data->clks)
return -ENOMEM; return -ENOMEM;
@@ -1768,18 +2375,20 @@ int dsi_pll_clock_register_7nm(struct platform_device *pdev,
/* Establish client data */ /* Establish client data */
if (ndx == 0) { if (ndx == 0) {
rmap = devm_regmap_init(&pdev->dev, &pll_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &pll_regmap_bus,
pll_res, &dsi_pll_7nm_config); pll_res, &dsi_pll_7nm_config);
dsi0pll_pll_out_div.clkr.regmap = rmap; dsi0pll_pll_out_div.clkr.regmap = rmap;
dsi0pll_shadow_pll_out_div.clkr.regmap = rmap;
rmap = devm_regmap_init(&pdev->dev, &bitclk_src_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &bitclk_src_regmap_bus,
pll_res, &dsi_pll_7nm_config); pll_res, &dsi_pll_7nm_config);
dsi0pll_bitclk_src.clkr.regmap = rmap; dsi0pll_bitclk_src.clkr.regmap = rmap;
dsi0pll_shadow_bitclk_src.clkr.regmap = rmap;
rmap = devm_regmap_init(&pdev->dev, &pclk_src_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &pclk_src_regmap_bus,
pll_res, &dsi_pll_7nm_config); pll_res, &dsi_pll_7nm_config);
dsi0pll_pclk_src.clkr.regmap = rmap; dsi0pll_pclk_src.clkr.regmap = rmap;
dsi0pll_shadow_pclk_src.clkr.regmap = rmap;
rmap = devm_regmap_init(&pdev->dev, &mdss_mux_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &mdss_mux_regmap_bus,
pll_res, &dsi_pll_7nm_config); pll_res, &dsi_pll_7nm_config);
@@ -1788,18 +2397,21 @@ int dsi_pll_clock_register_7nm(struct platform_device *pdev,
rmap = devm_regmap_init(&pdev->dev, &pclk_src_mux_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &pclk_src_mux_regmap_bus,
pll_res, &dsi_pll_7nm_config); pll_res, &dsi_pll_7nm_config);
dsi0pll_pclk_src_mux.clkr.regmap = rmap; dsi0pll_pclk_src_mux.clkr.regmap = rmap;
dsi0pll_shadow_pclk_src_mux.clkr.regmap = rmap;
rmap = devm_regmap_init(&pdev->dev, &mdss_mux_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &mdss_mux_regmap_bus,
pll_res, &dsi_pll_7nm_config); pll_res, &dsi_pll_7nm_config);
dsi0pll_byteclk_mux.clkr.regmap = rmap; dsi0pll_byteclk_mux.clkr.regmap = rmap;
dsi0pll_vco_clk.priv = pll_res; dsi0pll_vco_clk.priv = pll_res;
dsi0pll_shadow_vco_clk.priv = pll_res;
if (dsi_pll_7nm_is_hw_revision_v4_1(pll_res)) { if (dsi_pll_7nm_is_hw_revision_v4_1(pll_res)) {
dsi0pll_vco_clk.min_rate = 600000000; dsi0pll_vco_clk.min_rate = 600000000;
dsi0pll_vco_clk.max_rate = 5000000000; dsi0pll_vco_clk.max_rate = 5000000000;
} }
for (i = VCO_CLK_0; i <= PCLK_MUX_0_CLK; i++) { for (i = VCO_CLK_0; i <= SHADOW_PCLK_SRC_0_CLK; i++) {
clk = devm_clk_register(&pdev->dev, clk = devm_clk_register(&pdev->dev,
mdss_dsi_pllcc_7nm[i]); mdss_dsi_pllcc_7nm[i]);
if (IS_ERR(clk)) { if (IS_ERR(clk)) {
@@ -1814,20 +2426,21 @@ int dsi_pll_clock_register_7nm(struct platform_device *pdev,
rc = of_clk_add_provider(pdev->dev.of_node, rc = of_clk_add_provider(pdev->dev.of_node,
of_clk_src_onecell_get, clk_data); of_clk_src_onecell_get, clk_data);
} else { } else {
rmap = devm_regmap_init(&pdev->dev, &pll_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &pll_regmap_bus,
pll_res, &dsi_pll_7nm_config); pll_res, &dsi_pll_7nm_config);
dsi1pll_pll_out_div.clkr.regmap = rmap; dsi1pll_pll_out_div.clkr.regmap = rmap;
dsi1pll_shadow_pll_out_div.clkr.regmap = rmap;
rmap = devm_regmap_init(&pdev->dev, &bitclk_src_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &bitclk_src_regmap_bus,
pll_res, &dsi_pll_7nm_config); pll_res, &dsi_pll_7nm_config);
dsi1pll_bitclk_src.clkr.regmap = rmap; dsi1pll_bitclk_src.clkr.regmap = rmap;
dsi1pll_shadow_bitclk_src.clkr.regmap = rmap;
rmap = devm_regmap_init(&pdev->dev, &pclk_src_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &pclk_src_regmap_bus,
pll_res, &dsi_pll_7nm_config); pll_res, &dsi_pll_7nm_config);
dsi1pll_pclk_src.clkr.regmap = rmap; dsi1pll_pclk_src.clkr.regmap = rmap;
dsi1pll_shadow_pclk_src.clkr.regmap = rmap;
rmap = devm_regmap_init(&pdev->dev, &mdss_mux_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &mdss_mux_regmap_bus,
pll_res, &dsi_pll_7nm_config); pll_res, &dsi_pll_7nm_config);
@@ -1836,17 +2449,21 @@ int dsi_pll_clock_register_7nm(struct platform_device *pdev,
rmap = devm_regmap_init(&pdev->dev, &pclk_src_mux_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &pclk_src_mux_regmap_bus,
pll_res, &dsi_pll_7nm_config); pll_res, &dsi_pll_7nm_config);
dsi1pll_pclk_src_mux.clkr.regmap = rmap; dsi1pll_pclk_src_mux.clkr.regmap = rmap;
dsi1pll_shadow_pclk_src_mux.clkr.regmap = rmap;
rmap = devm_regmap_init(&pdev->dev, &mdss_mux_regmap_bus, rmap = devm_regmap_init(&pdev->dev, &mdss_mux_regmap_bus,
pll_res, &dsi_pll_7nm_config); pll_res, &dsi_pll_7nm_config);
dsi1pll_byteclk_mux.clkr.regmap = rmap; dsi1pll_byteclk_mux.clkr.regmap = rmap;
dsi1pll_vco_clk.priv = pll_res; dsi1pll_vco_clk.priv = pll_res;
dsi1pll_shadow_vco_clk.priv = pll_res;
if (dsi_pll_7nm_is_hw_revision_v4_1(pll_res)) { if (dsi_pll_7nm_is_hw_revision_v4_1(pll_res)) {
dsi1pll_vco_clk.min_rate = 600000000; dsi1pll_vco_clk.min_rate = 600000000;
dsi1pll_vco_clk.max_rate = 5000000000; dsi1pll_vco_clk.max_rate = 5000000000;
} }
for (i = VCO_CLK_1; i <= PCLK_MUX_1_CLK; i++) { for (i = VCO_CLK_1; i <= SHADOW_PCLK_SRC_1_CLK; i++) {
clk = devm_clk_register(&pdev->dev, clk = devm_clk_register(&pdev->dev,
mdss_dsi_pllcc_7nm[i]); mdss_dsi_pllcc_7nm[i]);
if (IS_ERR(clk)) { if (IS_ERR(clk)) {

View File

@@ -33,6 +33,8 @@
writel_relaxed(PLL_CALC_DATA(addr0, addr1, data0, data1), \ writel_relaxed(PLL_CALC_DATA(addr0, addr1, data0, data1), \
(base) + (offset)) (base) + (offset))
#define upper_8_bit(x) ((((x) >> 2) & 0x100) >> 8)
enum { enum {
MDSS_DSI_PLL_10NM, MDSS_DSI_PLL_10NM,
MDSS_DP_PLL_10NM, MDSS_DP_PLL_10NM,
@@ -52,30 +54,23 @@ enum {
MDSS_PLL_TARGET_8996, MDSS_PLL_TARGET_8996,
}; };
#define DFPS_MAX_NUM_OF_FRAME_RATES 20 #define DFPS_MAX_NUM_OF_FRAME_RATES 16
struct dfps_panel_info {
uint32_t enabled;
uint32_t frame_rate_cnt;
uint32_t frame_rate[DFPS_MAX_NUM_OF_FRAME_RATES]; /* hz */
};
struct dfps_pll_codes { struct dfps_pll_codes {
uint32_t pll_codes_1; uint32_t pll_codes_1;
uint32_t pll_codes_2; uint32_t pll_codes_2;
uint32_t pll_codes_3;
}; };
struct dfps_codes_info { struct dfps_codes_info {
uint32_t is_valid; uint32_t is_valid;
uint32_t frame_rate; /* hz */
uint32_t clk_rate; /* hz */ uint32_t clk_rate; /* hz */
struct dfps_pll_codes pll_codes; struct dfps_pll_codes pll_codes;
}; };
struct dfps_info { struct dfps_info {
struct dfps_panel_info panel_dfps; uint32_t vco_rate_cnt;
struct dfps_codes_info codes_dfps[DFPS_MAX_NUM_OF_FRAME_RATES]; struct dfps_codes_info codes_dfps[DFPS_MAX_NUM_OF_FRAME_RATES];
void *dfps_fb_base;
}; };
struct mdss_pll_resources { struct mdss_pll_resources {
@@ -154,7 +149,7 @@ struct mdss_pll_resources {
/* /*
* caching the pll trim codes in the case of dynamic refresh * caching the pll trim codes in the case of dynamic refresh
*/ */
int cache_pll_trim_codes[2]; int cache_pll_trim_codes[3];
/* /*
* for maintaining the status of saving trim codes * for maintaining the status of saving trim codes
@@ -196,6 +191,11 @@ struct mdss_pll_resources {
*/ */
struct dfps_info *dfps; struct dfps_info *dfps;
/*
* for cases where dfps trigger happens before first
* suspend/resume and handoff is not finished.
*/
bool dfps_trigger;
}; };
struct mdss_pll_vco_calc { struct mdss_pll_vco_calc {