|
@@ -1361,9 +1361,7 @@ static void dsi_kickoff_msg_tx(struct dsi_ctrl *dsi_ctrl,
|
|
|
|
|
|
if (!(flags & DSI_CTRL_CMD_DEFER_TRIGGER)) {
|
|
|
dsi_ctrl_wait_for_video_done(dsi_ctrl);
|
|
|
- if (dsi_hw_ops.mask_error_intr)
|
|
|
- dsi_hw_ops.mask_error_intr(&dsi_ctrl->hw,
|
|
|
- BIT(DSI_FIFO_OVERFLOW), true);
|
|
|
+ dsi_ctrl_mask_overflow(dsi_ctrl, true);
|
|
|
|
|
|
atomic_set(&dsi_ctrl->dma_irq_trig, 0);
|
|
|
dsi_ctrl_enable_status_interrupt(dsi_ctrl,
|
|
@@ -1407,9 +1405,8 @@ static void dsi_kickoff_msg_tx(struct dsi_ctrl *dsi_ctrl,
|
|
|
dsi_ctrl_dma_cmd_wait_for_done(&dsi_ctrl->dma_cmd_wait);
|
|
|
}
|
|
|
|
|
|
- if (dsi_hw_ops.mask_error_intr && !dsi_ctrl->esd_check_underway)
|
|
|
- dsi_hw_ops.mask_error_intr(&dsi_ctrl->hw,
|
|
|
- BIT(DSI_FIFO_OVERFLOW), false);
|
|
|
+ dsi_ctrl_mask_overflow(dsi_ctrl, false);
|
|
|
+
|
|
|
dsi_hw_ops.reset_cmd_fifo(&dsi_ctrl->hw);
|
|
|
|
|
|
/*
|
|
@@ -3316,6 +3313,28 @@ error:
|
|
|
return rc;
|
|
|
}
|
|
|
|
|
|
+/**
|
|
|
+ * dsi_ctrl_mask_overflow() - API to mask/unmask overflow error.
|
|
|
+ * @dsi_ctrl: DSI controller handle.
|
|
|
+ * @enable: variable to control masking/unmasking.
|
|
|
+ */
|
|
|
+void dsi_ctrl_mask_overflow(struct dsi_ctrl *dsi_ctrl, bool enable)
|
|
|
+{
|
|
|
+ struct dsi_ctrl_hw_ops dsi_hw_ops;
|
|
|
+
|
|
|
+ dsi_hw_ops = dsi_ctrl->hw.ops;
|
|
|
+
|
|
|
+ if (enable) {
|
|
|
+ if (dsi_hw_ops.mask_error_intr)
|
|
|
+ dsi_hw_ops.mask_error_intr(&dsi_ctrl->hw,
|
|
|
+ BIT(DSI_FIFO_OVERFLOW), true);
|
|
|
+ } else {
|
|
|
+ if (dsi_hw_ops.mask_error_intr && !dsi_ctrl->esd_check_underway)
|
|
|
+ dsi_hw_ops.mask_error_intr(&dsi_ctrl->hw,
|
|
|
+ BIT(DSI_FIFO_OVERFLOW), false);
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
/**
|
|
|
* dsi_ctrl_cmd_tx_trigger() - Trigger a deferred command.
|
|
|
* @dsi_ctrl: DSI controller handle.
|
|
@@ -3358,9 +3377,6 @@ int dsi_ctrl_cmd_tx_trigger(struct dsi_ctrl *dsi_ctrl, u32 flags)
|
|
|
if ((flags & DSI_CTRL_CMD_BROADCAST) &&
|
|
|
(flags & DSI_CTRL_CMD_BROADCAST_MASTER)) {
|
|
|
dsi_ctrl_wait_for_video_done(dsi_ctrl);
|
|
|
- if (dsi_hw_ops.mask_error_intr)
|
|
|
- dsi_hw_ops.mask_error_intr(&dsi_ctrl->hw,
|
|
|
- BIT(DSI_FIFO_OVERFLOW), true);
|
|
|
atomic_set(&dsi_ctrl->dma_irq_trig, 0);
|
|
|
dsi_ctrl_enable_status_interrupt(dsi_ctrl,
|
|
|
DSI_SINT_CMD_MODE_DMA_DONE, NULL);
|
|
@@ -3388,11 +3404,6 @@ int dsi_ctrl_cmd_tx_trigger(struct dsi_ctrl *dsi_ctrl, u32 flags)
|
|
|
dsi_ctrl_dma_cmd_wait_for_done(&dsi_ctrl->dma_cmd_wait);
|
|
|
}
|
|
|
|
|
|
- if (dsi_hw_ops.mask_error_intr &&
|
|
|
- !dsi_ctrl->esd_check_underway)
|
|
|
- dsi_hw_ops.mask_error_intr(&dsi_ctrl->hw,
|
|
|
- BIT(DSI_FIFO_OVERFLOW), false);
|
|
|
-
|
|
|
if (flags & DSI_CTRL_CMD_NON_EMBEDDED_MODE) {
|
|
|
if (dsi_ctrl->version < DSI_CTRL_VERSION_2_4)
|
|
|
dsi_hw_ops.soft_reset(&dsi_ctrl->hw);
|