sprd_dsi.c 27 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (C) 2020 Unisoc Inc.
  4. */
  5. #include <linux/component.h>
  6. #include <linux/module.h>
  7. #include <linux/of_address.h>
  8. #include <linux/of_device.h>
  9. #include <linux/of_irq.h>
  10. #include <linux/of_graph.h>
  11. #include <video/mipi_display.h>
  12. #include <drm/drm_atomic_helper.h>
  13. #include <drm/drm_bridge.h>
  14. #include <drm/drm_crtc_helper.h>
  15. #include <drm/drm_of.h>
  16. #include <drm/drm_probe_helper.h>
  17. #include "sprd_drm.h"
  18. #include "sprd_dpu.h"
  19. #include "sprd_dsi.h"
  20. #define SOFT_RESET 0x04
  21. #define MASK_PROTOCOL_INT 0x0C
  22. #define MASK_INTERNAL_INT 0x14
  23. #define DSI_MODE_CFG 0x18
  24. #define VIRTUAL_CHANNEL_ID 0x1C
  25. #define GEN_RX_VCID GENMASK(1, 0)
  26. #define VIDEO_PKT_VCID GENMASK(3, 2)
  27. #define DPI_VIDEO_FORMAT 0x20
  28. #define DPI_VIDEO_MODE_FORMAT GENMASK(5, 0)
  29. #define LOOSELY18_EN BIT(6)
  30. #define VIDEO_PKT_CONFIG 0x24
  31. #define VIDEO_PKT_SIZE GENMASK(15, 0)
  32. #define VIDEO_LINE_CHUNK_NUM GENMASK(31, 16)
  33. #define VIDEO_LINE_HBLK_TIME 0x28
  34. #define VIDEO_LINE_HBP_TIME GENMASK(15, 0)
  35. #define VIDEO_LINE_HSA_TIME GENMASK(31, 16)
  36. #define VIDEO_LINE_TIME 0x2C
  37. #define VIDEO_VBLK_LINES 0x30
  38. #define VFP_LINES GENMASK(9, 0)
  39. #define VBP_LINES GENMASK(19, 10)
  40. #define VSA_LINES GENMASK(29, 20)
  41. #define VIDEO_VACTIVE_LINES 0x34
  42. #define VID_MODE_CFG 0x38
  43. #define VID_MODE_TYPE GENMASK(1, 0)
  44. #define LP_VSA_EN BIT(8)
  45. #define LP_VBP_EN BIT(9)
  46. #define LP_VFP_EN BIT(10)
  47. #define LP_VACT_EN BIT(11)
  48. #define LP_HBP_EN BIT(12)
  49. #define LP_HFP_EN BIT(13)
  50. #define FRAME_BTA_ACK_EN BIT(14)
  51. #define TIMEOUT_CNT_CLK_CONFIG 0x40
  52. #define HTX_TO_CONFIG 0x44
  53. #define LRX_H_TO_CONFIG 0x48
  54. #define TX_ESC_CLK_CONFIG 0x5C
  55. #define CMD_MODE_CFG 0x68
  56. #define TEAR_FX_EN BIT(0)
  57. #define GEN_HDR 0x6C
  58. #define GEN_DT GENMASK(5, 0)
  59. #define GEN_VC GENMASK(7, 6)
  60. #define GEN_PLD_DATA 0x70
  61. #define PHY_CLK_LANE_LP_CTRL 0x74
  62. #define PHY_CLKLANE_TX_REQ_HS BIT(0)
  63. #define AUTO_CLKLANE_CTRL_EN BIT(1)
  64. #define PHY_INTERFACE_CTRL 0x78
  65. #define RF_PHY_SHUTDOWN BIT(0)
  66. #define RF_PHY_RESET_N BIT(1)
  67. #define RF_PHY_CLK_EN BIT(2)
  68. #define CMD_MODE_STATUS 0x98
  69. #define GEN_CMD_RDATA_FIFO_EMPTY BIT(1)
  70. #define GEN_CMD_WDATA_FIFO_EMPTY BIT(3)
  71. #define GEN_CMD_CMD_FIFO_EMPTY BIT(5)
  72. #define GEN_CMD_RDCMD_DONE BIT(7)
  73. #define PHY_STATUS 0x9C
  74. #define PHY_LOCK BIT(1)
  75. #define PHY_MIN_STOP_TIME 0xA0
  76. #define PHY_LANE_NUM_CONFIG 0xA4
  77. #define PHY_CLKLANE_TIME_CONFIG 0xA8
  78. #define PHY_CLKLANE_LP_TO_HS_TIME GENMASK(15, 0)
  79. #define PHY_CLKLANE_HS_TO_LP_TIME GENMASK(31, 16)
  80. #define PHY_DATALANE_TIME_CONFIG 0xAC
  81. #define PHY_DATALANE_LP_TO_HS_TIME GENMASK(15, 0)
  82. #define PHY_DATALANE_HS_TO_LP_TIME GENMASK(31, 16)
  83. #define MAX_READ_TIME 0xB0
  84. #define RX_PKT_CHECK_CONFIG 0xB4
  85. #define RX_PKT_ECC_EN BIT(0)
  86. #define RX_PKT_CRC_EN BIT(1)
  87. #define TA_EN 0xB8
  88. #define EOTP_EN 0xBC
  89. #define TX_EOTP_EN BIT(0)
  90. #define RX_EOTP_EN BIT(1)
  91. #define VIDEO_NULLPKT_SIZE 0xC0
  92. #define DCS_WM_PKT_SIZE 0xC4
  93. #define VIDEO_SIG_DELAY_CONFIG 0xD0
  94. #define VIDEO_SIG_DELAY GENMASK(23, 0)
  95. #define PHY_TST_CTRL0 0xF0
  96. #define PHY_TESTCLR BIT(0)
  97. #define PHY_TESTCLK BIT(1)
  98. #define PHY_TST_CTRL1 0xF4
  99. #define PHY_TESTDIN GENMASK(7, 0)
  100. #define PHY_TESTDOUT GENMASK(15, 8)
  101. #define PHY_TESTEN BIT(16)
  102. #define host_to_dsi(host) \
  103. container_of(host, struct sprd_dsi, host)
  104. static inline u32
  105. dsi_reg_rd(struct dsi_context *ctx, u32 offset, u32 mask,
  106. u32 shift)
  107. {
  108. return (readl(ctx->base + offset) & mask) >> shift;
  109. }
  110. static inline void
  111. dsi_reg_wr(struct dsi_context *ctx, u32 offset, u32 mask,
  112. u32 shift, u32 val)
  113. {
  114. u32 ret;
  115. ret = readl(ctx->base + offset);
  116. ret &= ~mask;
  117. ret |= (val << shift) & mask;
  118. writel(ret, ctx->base + offset);
  119. }
  120. static inline void
  121. dsi_reg_up(struct dsi_context *ctx, u32 offset, u32 mask,
  122. u32 val)
  123. {
  124. u32 ret = readl(ctx->base + offset);
  125. writel((ret & ~mask) | (val & mask), ctx->base + offset);
  126. }
  127. static int regmap_tst_io_write(void *context, u32 reg, u32 val)
  128. {
  129. struct sprd_dsi *dsi = context;
  130. struct dsi_context *ctx = &dsi->ctx;
  131. if (val > 0xff || reg > 0xff)
  132. return -EINVAL;
  133. drm_dbg(dsi->drm, "reg = 0x%02x, val = 0x%02x\n", reg, val);
  134. dsi_reg_up(ctx, PHY_TST_CTRL1, PHY_TESTEN, PHY_TESTEN);
  135. dsi_reg_wr(ctx, PHY_TST_CTRL1, PHY_TESTDIN, 0, reg);
  136. dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, PHY_TESTCLK);
  137. dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, 0);
  138. dsi_reg_up(ctx, PHY_TST_CTRL1, PHY_TESTEN, 0);
  139. dsi_reg_wr(ctx, PHY_TST_CTRL1, PHY_TESTDIN, 0, val);
  140. dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, PHY_TESTCLK);
  141. dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, 0);
  142. return 0;
  143. }
  144. static int regmap_tst_io_read(void *context, u32 reg, u32 *val)
  145. {
  146. struct sprd_dsi *dsi = context;
  147. struct dsi_context *ctx = &dsi->ctx;
  148. int ret;
  149. if (reg > 0xff)
  150. return -EINVAL;
  151. dsi_reg_up(ctx, PHY_TST_CTRL1, PHY_TESTEN, PHY_TESTEN);
  152. dsi_reg_wr(ctx, PHY_TST_CTRL1, PHY_TESTDIN, 0, reg);
  153. dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, PHY_TESTCLK);
  154. dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, 0);
  155. dsi_reg_up(ctx, PHY_TST_CTRL1, PHY_TESTEN, 0);
  156. udelay(1);
  157. ret = dsi_reg_rd(ctx, PHY_TST_CTRL1, PHY_TESTDOUT, 8);
  158. if (ret < 0)
  159. return ret;
  160. *val = ret;
  161. drm_dbg(dsi->drm, "reg = 0x%02x, val = 0x%02x\n", reg, *val);
  162. return 0;
  163. }
  164. static struct regmap_bus regmap_tst_io = {
  165. .reg_write = regmap_tst_io_write,
  166. .reg_read = regmap_tst_io_read,
  167. };
  168. static const struct regmap_config byte_config = {
  169. .reg_bits = 8,
  170. .val_bits = 8,
  171. };
  172. static int dphy_wait_pll_locked(struct dsi_context *ctx)
  173. {
  174. struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
  175. int i;
  176. for (i = 0; i < 50000; i++) {
  177. if (dsi_reg_rd(ctx, PHY_STATUS, PHY_LOCK, 1))
  178. return 0;
  179. udelay(3);
  180. }
  181. drm_err(dsi->drm, "dphy pll can not be locked\n");
  182. return -ETIMEDOUT;
  183. }
  184. static int dsi_wait_tx_payload_fifo_empty(struct dsi_context *ctx)
  185. {
  186. int i;
  187. for (i = 0; i < 5000; i++) {
  188. if (dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_WDATA_FIFO_EMPTY, 3))
  189. return 0;
  190. udelay(1);
  191. }
  192. return -ETIMEDOUT;
  193. }
  194. static int dsi_wait_tx_cmd_fifo_empty(struct dsi_context *ctx)
  195. {
  196. int i;
  197. for (i = 0; i < 5000; i++) {
  198. if (dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_CMD_FIFO_EMPTY, 5))
  199. return 0;
  200. udelay(1);
  201. }
  202. return -ETIMEDOUT;
  203. }
  204. static int dsi_wait_rd_resp_completed(struct dsi_context *ctx)
  205. {
  206. int i;
  207. for (i = 0; i < 10000; i++) {
  208. if (dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_RDCMD_DONE, 7))
  209. return 0;
  210. udelay(10);
  211. }
  212. return -ETIMEDOUT;
  213. }
  214. static u16 calc_bytes_per_pixel_x100(int coding)
  215. {
  216. u16 bpp_x100;
  217. switch (coding) {
  218. case COLOR_CODE_16BIT_CONFIG1:
  219. case COLOR_CODE_16BIT_CONFIG2:
  220. case COLOR_CODE_16BIT_CONFIG3:
  221. bpp_x100 = 200;
  222. break;
  223. case COLOR_CODE_18BIT_CONFIG1:
  224. case COLOR_CODE_18BIT_CONFIG2:
  225. bpp_x100 = 225;
  226. break;
  227. case COLOR_CODE_24BIT:
  228. bpp_x100 = 300;
  229. break;
  230. case COLOR_CODE_COMPRESSTION:
  231. bpp_x100 = 100;
  232. break;
  233. case COLOR_CODE_20BIT_YCC422_LOOSELY:
  234. bpp_x100 = 250;
  235. break;
  236. case COLOR_CODE_24BIT_YCC422:
  237. bpp_x100 = 300;
  238. break;
  239. case COLOR_CODE_16BIT_YCC422:
  240. bpp_x100 = 200;
  241. break;
  242. case COLOR_CODE_30BIT:
  243. bpp_x100 = 375;
  244. break;
  245. case COLOR_CODE_36BIT:
  246. bpp_x100 = 450;
  247. break;
  248. case COLOR_CODE_12BIT_YCC420:
  249. bpp_x100 = 150;
  250. break;
  251. default:
  252. DRM_ERROR("invalid color coding");
  253. bpp_x100 = 0;
  254. break;
  255. }
  256. return bpp_x100;
  257. }
  258. static u8 calc_video_size_step(int coding)
  259. {
  260. u8 video_size_step;
  261. switch (coding) {
  262. case COLOR_CODE_16BIT_CONFIG1:
  263. case COLOR_CODE_16BIT_CONFIG2:
  264. case COLOR_CODE_16BIT_CONFIG3:
  265. case COLOR_CODE_18BIT_CONFIG1:
  266. case COLOR_CODE_18BIT_CONFIG2:
  267. case COLOR_CODE_24BIT:
  268. case COLOR_CODE_COMPRESSTION:
  269. return video_size_step = 1;
  270. case COLOR_CODE_20BIT_YCC422_LOOSELY:
  271. case COLOR_CODE_24BIT_YCC422:
  272. case COLOR_CODE_16BIT_YCC422:
  273. case COLOR_CODE_30BIT:
  274. case COLOR_CODE_36BIT:
  275. case COLOR_CODE_12BIT_YCC420:
  276. return video_size_step = 2;
  277. default:
  278. DRM_ERROR("invalid color coding");
  279. return 0;
  280. }
  281. }
  282. static u16 round_video_size(int coding, u16 video_size)
  283. {
  284. switch (coding) {
  285. case COLOR_CODE_16BIT_YCC422:
  286. case COLOR_CODE_24BIT_YCC422:
  287. case COLOR_CODE_20BIT_YCC422_LOOSELY:
  288. case COLOR_CODE_12BIT_YCC420:
  289. /* round up active H pixels to a multiple of 2 */
  290. if ((video_size % 2) != 0)
  291. video_size += 1;
  292. break;
  293. default:
  294. break;
  295. }
  296. return video_size;
  297. }
  298. #define SPRD_MIPI_DSI_FMT_DSC 0xff
  299. static u32 fmt_to_coding(u32 fmt)
  300. {
  301. switch (fmt) {
  302. case MIPI_DSI_FMT_RGB565:
  303. return COLOR_CODE_16BIT_CONFIG1;
  304. case MIPI_DSI_FMT_RGB666:
  305. case MIPI_DSI_FMT_RGB666_PACKED:
  306. return COLOR_CODE_18BIT_CONFIG1;
  307. case MIPI_DSI_FMT_RGB888:
  308. return COLOR_CODE_24BIT;
  309. case SPRD_MIPI_DSI_FMT_DSC:
  310. return COLOR_CODE_COMPRESSTION;
  311. default:
  312. DRM_ERROR("Unsupported format (%d)\n", fmt);
  313. return COLOR_CODE_24BIT;
  314. }
  315. }
  316. #define ns_to_cycle(ns, byte_clk) \
  317. DIV_ROUND_UP((ns) * (byte_clk), 1000000)
  318. static void sprd_dsi_init(struct dsi_context *ctx)
  319. {
  320. struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
  321. u32 byte_clk = dsi->slave->hs_rate / 8;
  322. u16 data_hs2lp, data_lp2hs, clk_hs2lp, clk_lp2hs;
  323. u16 max_rd_time;
  324. int div;
  325. writel(0, ctx->base + SOFT_RESET);
  326. writel(0xffffffff, ctx->base + MASK_PROTOCOL_INT);
  327. writel(0xffffffff, ctx->base + MASK_INTERNAL_INT);
  328. writel(1, ctx->base + DSI_MODE_CFG);
  329. dsi_reg_up(ctx, EOTP_EN, RX_EOTP_EN, 0);
  330. dsi_reg_up(ctx, EOTP_EN, TX_EOTP_EN, 0);
  331. dsi_reg_up(ctx, RX_PKT_CHECK_CONFIG, RX_PKT_ECC_EN, RX_PKT_ECC_EN);
  332. dsi_reg_up(ctx, RX_PKT_CHECK_CONFIG, RX_PKT_CRC_EN, RX_PKT_CRC_EN);
  333. writel(1, ctx->base + TA_EN);
  334. dsi_reg_up(ctx, VIRTUAL_CHANNEL_ID, VIDEO_PKT_VCID, 0);
  335. dsi_reg_up(ctx, VIRTUAL_CHANNEL_ID, GEN_RX_VCID, 0);
  336. div = DIV_ROUND_UP(byte_clk, dsi->slave->lp_rate);
  337. writel(div, ctx->base + TX_ESC_CLK_CONFIG);
  338. max_rd_time = ns_to_cycle(ctx->max_rd_time, byte_clk);
  339. writel(max_rd_time, ctx->base + MAX_READ_TIME);
  340. data_hs2lp = ns_to_cycle(ctx->data_hs2lp, byte_clk);
  341. data_lp2hs = ns_to_cycle(ctx->data_lp2hs, byte_clk);
  342. clk_hs2lp = ns_to_cycle(ctx->clk_hs2lp, byte_clk);
  343. clk_lp2hs = ns_to_cycle(ctx->clk_lp2hs, byte_clk);
  344. dsi_reg_wr(ctx, PHY_DATALANE_TIME_CONFIG,
  345. PHY_DATALANE_HS_TO_LP_TIME, 16, data_hs2lp);
  346. dsi_reg_wr(ctx, PHY_DATALANE_TIME_CONFIG,
  347. PHY_DATALANE_LP_TO_HS_TIME, 0, data_lp2hs);
  348. dsi_reg_wr(ctx, PHY_CLKLANE_TIME_CONFIG,
  349. PHY_CLKLANE_HS_TO_LP_TIME, 16, clk_hs2lp);
  350. dsi_reg_wr(ctx, PHY_CLKLANE_TIME_CONFIG,
  351. PHY_CLKLANE_LP_TO_HS_TIME, 0, clk_lp2hs);
  352. writel(1, ctx->base + SOFT_RESET);
  353. }
  354. /*
  355. * Free up resources and shutdown host controller and PHY
  356. */
  357. static void sprd_dsi_fini(struct dsi_context *ctx)
  358. {
  359. writel(0xffffffff, ctx->base + MASK_PROTOCOL_INT);
  360. writel(0xffffffff, ctx->base + MASK_INTERNAL_INT);
  361. writel(0, ctx->base + SOFT_RESET);
  362. }
  363. /*
  364. * If not in burst mode, it will compute the video and null packet sizes
  365. * according to necessity.
  366. * Configure timers for data lanes and/or clock lane to return to LP when
  367. * bandwidth is not filled by data.
  368. */
  369. static int sprd_dsi_dpi_video(struct dsi_context *ctx)
  370. {
  371. struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
  372. struct videomode *vm = &ctx->vm;
  373. u32 byte_clk = dsi->slave->hs_rate / 8;
  374. u16 bpp_x100;
  375. u16 video_size;
  376. u32 ratio_x1000;
  377. u16 null_pkt_size = 0;
  378. u8 video_size_step;
  379. u32 hs_to;
  380. u32 total_bytes;
  381. u32 bytes_per_chunk;
  382. u32 chunks = 0;
  383. u32 bytes_left = 0;
  384. u32 chunk_overhead;
  385. const u8 pkt_header = 6;
  386. u8 coding;
  387. int div;
  388. u16 hline;
  389. u16 byte_cycle;
  390. coding = fmt_to_coding(dsi->slave->format);
  391. video_size = round_video_size(coding, vm->hactive);
  392. bpp_x100 = calc_bytes_per_pixel_x100(coding);
  393. video_size_step = calc_video_size_step(coding);
  394. ratio_x1000 = byte_clk * 1000 / (vm->pixelclock / 1000);
  395. hline = vm->hactive + vm->hsync_len + vm->hfront_porch +
  396. vm->hback_porch;
  397. writel(0, ctx->base + SOFT_RESET);
  398. dsi_reg_wr(ctx, VID_MODE_CFG, FRAME_BTA_ACK_EN, 15, ctx->frame_ack_en);
  399. dsi_reg_wr(ctx, DPI_VIDEO_FORMAT, DPI_VIDEO_MODE_FORMAT, 0, coding);
  400. dsi_reg_wr(ctx, VID_MODE_CFG, VID_MODE_TYPE, 0, ctx->burst_mode);
  401. byte_cycle = 95 * hline * ratio_x1000 / 100000;
  402. dsi_reg_wr(ctx, VIDEO_SIG_DELAY_CONFIG, VIDEO_SIG_DELAY, 0, byte_cycle);
  403. byte_cycle = hline * ratio_x1000 / 1000;
  404. writel(byte_cycle, ctx->base + VIDEO_LINE_TIME);
  405. byte_cycle = vm->hsync_len * ratio_x1000 / 1000;
  406. dsi_reg_wr(ctx, VIDEO_LINE_HBLK_TIME, VIDEO_LINE_HSA_TIME, 16, byte_cycle);
  407. byte_cycle = vm->hback_porch * ratio_x1000 / 1000;
  408. dsi_reg_wr(ctx, VIDEO_LINE_HBLK_TIME, VIDEO_LINE_HBP_TIME, 0, byte_cycle);
  409. writel(vm->vactive, ctx->base + VIDEO_VACTIVE_LINES);
  410. dsi_reg_wr(ctx, VIDEO_VBLK_LINES, VFP_LINES, 0, vm->vfront_porch);
  411. dsi_reg_wr(ctx, VIDEO_VBLK_LINES, VBP_LINES, 10, vm->vback_porch);
  412. dsi_reg_wr(ctx, VIDEO_VBLK_LINES, VSA_LINES, 20, vm->vsync_len);
  413. dsi_reg_up(ctx, VID_MODE_CFG, LP_HBP_EN | LP_HFP_EN | LP_VACT_EN |
  414. LP_VFP_EN | LP_VBP_EN | LP_VSA_EN, LP_HBP_EN | LP_HFP_EN |
  415. LP_VACT_EN | LP_VFP_EN | LP_VBP_EN | LP_VSA_EN);
  416. hs_to = (hline * vm->vactive) + (2 * bpp_x100) / 100;
  417. for (div = 0x80; (div < hs_to) && (div > 2); div--) {
  418. if ((hs_to % div) == 0) {
  419. writel(div, ctx->base + TIMEOUT_CNT_CLK_CONFIG);
  420. writel(hs_to / div, ctx->base + LRX_H_TO_CONFIG);
  421. writel(hs_to / div, ctx->base + HTX_TO_CONFIG);
  422. break;
  423. }
  424. }
  425. if (ctx->burst_mode == VIDEO_BURST_WITH_SYNC_PULSES) {
  426. dsi_reg_wr(ctx, VIDEO_PKT_CONFIG, VIDEO_PKT_SIZE, 0, video_size);
  427. writel(0, ctx->base + VIDEO_NULLPKT_SIZE);
  428. dsi_reg_up(ctx, VIDEO_PKT_CONFIG, VIDEO_LINE_CHUNK_NUM, 0);
  429. } else {
  430. /* non burst transmission */
  431. null_pkt_size = 0;
  432. /* bytes to be sent - first as one chunk */
  433. bytes_per_chunk = vm->hactive * bpp_x100 / 100 + pkt_header;
  434. /* hline total bytes from the DPI interface */
  435. total_bytes = (vm->hactive + vm->hfront_porch) *
  436. ratio_x1000 / dsi->slave->lanes / 1000;
  437. /* check if the pixels actually fit on the DSI link */
  438. if (total_bytes < bytes_per_chunk) {
  439. drm_err(dsi->drm, "current resolution can not be set\n");
  440. return -EINVAL;
  441. }
  442. chunk_overhead = total_bytes - bytes_per_chunk;
  443. /* overhead higher than 1 -> enable multi packets */
  444. if (chunk_overhead > 1) {
  445. /* multi packets */
  446. for (video_size = video_size_step;
  447. video_size < vm->hactive;
  448. video_size += video_size_step) {
  449. if (vm->hactive * 1000 / video_size % 1000)
  450. continue;
  451. chunks = vm->hactive / video_size;
  452. bytes_per_chunk = bpp_x100 * video_size / 100
  453. + pkt_header;
  454. if (total_bytes >= (bytes_per_chunk * chunks)) {
  455. bytes_left = total_bytes -
  456. bytes_per_chunk * chunks;
  457. break;
  458. }
  459. }
  460. /* prevent overflow (unsigned - unsigned) */
  461. if (bytes_left > (pkt_header * chunks)) {
  462. null_pkt_size = (bytes_left -
  463. pkt_header * chunks) / chunks;
  464. /* avoid register overflow */
  465. if (null_pkt_size > 1023)
  466. null_pkt_size = 1023;
  467. }
  468. } else {
  469. /* single packet */
  470. chunks = 1;
  471. /* must be a multiple of 4 except 18 loosely */
  472. for (video_size = vm->hactive;
  473. (video_size % video_size_step) != 0;
  474. video_size++)
  475. ;
  476. }
  477. dsi_reg_wr(ctx, VIDEO_PKT_CONFIG, VIDEO_PKT_SIZE, 0, video_size);
  478. writel(null_pkt_size, ctx->base + VIDEO_NULLPKT_SIZE);
  479. dsi_reg_wr(ctx, VIDEO_PKT_CONFIG, VIDEO_LINE_CHUNK_NUM, 16, chunks);
  480. }
  481. writel(ctx->int0_mask, ctx->base + MASK_PROTOCOL_INT);
  482. writel(ctx->int1_mask, ctx->base + MASK_INTERNAL_INT);
  483. writel(1, ctx->base + SOFT_RESET);
  484. return 0;
  485. }
  486. static void sprd_dsi_edpi_video(struct dsi_context *ctx)
  487. {
  488. struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
  489. const u32 fifo_depth = 1096;
  490. const u32 word_length = 4;
  491. u32 hactive = ctx->vm.hactive;
  492. u32 bpp_x100;
  493. u32 max_fifo_len;
  494. u8 coding;
  495. coding = fmt_to_coding(dsi->slave->format);
  496. bpp_x100 = calc_bytes_per_pixel_x100(coding);
  497. max_fifo_len = word_length * fifo_depth * 100 / bpp_x100;
  498. writel(0, ctx->base + SOFT_RESET);
  499. dsi_reg_wr(ctx, DPI_VIDEO_FORMAT, DPI_VIDEO_MODE_FORMAT, 0, coding);
  500. dsi_reg_wr(ctx, CMD_MODE_CFG, TEAR_FX_EN, 0, ctx->te_ack_en);
  501. if (max_fifo_len > hactive)
  502. writel(hactive, ctx->base + DCS_WM_PKT_SIZE);
  503. else
  504. writel(max_fifo_len, ctx->base + DCS_WM_PKT_SIZE);
  505. writel(ctx->int0_mask, ctx->base + MASK_PROTOCOL_INT);
  506. writel(ctx->int1_mask, ctx->base + MASK_INTERNAL_INT);
  507. writel(1, ctx->base + SOFT_RESET);
  508. }
  509. /*
  510. * Send a packet on the generic interface,
  511. * this function has an active delay to wait for the buffer to clear.
  512. * The delay is limited to:
  513. * (param_length / 4) x DSIH_FIFO_ACTIVE_WAIT x register access time
  514. * the controller restricts the sending of.
  515. *
  516. * This function will not be able to send Null and Blanking packets due to
  517. * controller restriction
  518. */
  519. static int sprd_dsi_wr_pkt(struct dsi_context *ctx, u8 vc, u8 type,
  520. const u8 *param, u16 len)
  521. {
  522. struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
  523. u8 wc_lsbyte, wc_msbyte;
  524. u32 payload;
  525. int i, j, ret;
  526. if (vc > 3)
  527. return -EINVAL;
  528. /* 1st: for long packet, must config payload first */
  529. ret = dsi_wait_tx_payload_fifo_empty(ctx);
  530. if (ret) {
  531. drm_err(dsi->drm, "tx payload fifo is not empty\n");
  532. return ret;
  533. }
  534. if (len > 2) {
  535. for (i = 0, j = 0; i < len; i += j) {
  536. payload = 0;
  537. for (j = 0; (j < 4) && ((j + i) < (len)); j++)
  538. payload |= param[i + j] << (j * 8);
  539. writel(payload, ctx->base + GEN_PLD_DATA);
  540. }
  541. wc_lsbyte = len & 0xff;
  542. wc_msbyte = len >> 8;
  543. } else {
  544. wc_lsbyte = (len > 0) ? param[0] : 0;
  545. wc_msbyte = (len > 1) ? param[1] : 0;
  546. }
  547. /* 2nd: then set packet header */
  548. ret = dsi_wait_tx_cmd_fifo_empty(ctx);
  549. if (ret) {
  550. drm_err(dsi->drm, "tx cmd fifo is not empty\n");
  551. return ret;
  552. }
  553. writel(type | (vc << 6) | (wc_lsbyte << 8) | (wc_msbyte << 16),
  554. ctx->base + GEN_HDR);
  555. return 0;
  556. }
  557. /*
  558. * Send READ packet to peripheral using the generic interface,
  559. * this will force command mode and stop video mode (because of BTA).
  560. *
  561. * This function has an active delay to wait for the buffer to clear,
  562. * the delay is limited to 2 x DSIH_FIFO_ACTIVE_WAIT
  563. * (waiting for command buffer, and waiting for receiving)
  564. * @note this function will enable BTA
  565. */
  566. static int sprd_dsi_rd_pkt(struct dsi_context *ctx, u8 vc, u8 type,
  567. u8 msb_byte, u8 lsb_byte,
  568. u8 *buffer, u8 bytes_to_read)
  569. {
  570. struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
  571. int i, ret;
  572. int count = 0;
  573. u32 temp;
  574. if (vc > 3)
  575. return -EINVAL;
  576. /* 1st: send read command to peripheral */
  577. ret = dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_CMD_FIFO_EMPTY, 5);
  578. if (!ret)
  579. return -EIO;
  580. writel(type | (vc << 6) | (lsb_byte << 8) | (msb_byte << 16),
  581. ctx->base + GEN_HDR);
  582. /* 2nd: wait peripheral response completed */
  583. ret = dsi_wait_rd_resp_completed(ctx);
  584. if (ret) {
  585. drm_err(dsi->drm, "wait read response time out\n");
  586. return ret;
  587. }
  588. /* 3rd: get data from rx payload fifo */
  589. ret = dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_RDATA_FIFO_EMPTY, 1);
  590. if (ret) {
  591. drm_err(dsi->drm, "rx payload fifo empty\n");
  592. return -EIO;
  593. }
  594. for (i = 0; i < 100; i++) {
  595. temp = readl(ctx->base + GEN_PLD_DATA);
  596. if (count < bytes_to_read)
  597. buffer[count++] = temp & 0xff;
  598. if (count < bytes_to_read)
  599. buffer[count++] = (temp >> 8) & 0xff;
  600. if (count < bytes_to_read)
  601. buffer[count++] = (temp >> 16) & 0xff;
  602. if (count < bytes_to_read)
  603. buffer[count++] = (temp >> 24) & 0xff;
  604. ret = dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_RDATA_FIFO_EMPTY, 1);
  605. if (ret)
  606. return count;
  607. }
  608. return 0;
  609. }
  610. static void sprd_dsi_set_work_mode(struct dsi_context *ctx, u8 mode)
  611. {
  612. if (mode == DSI_MODE_CMD)
  613. writel(1, ctx->base + DSI_MODE_CFG);
  614. else
  615. writel(0, ctx->base + DSI_MODE_CFG);
  616. }
  617. static void sprd_dsi_state_reset(struct dsi_context *ctx)
  618. {
  619. writel(0, ctx->base + SOFT_RESET);
  620. udelay(100);
  621. writel(1, ctx->base + SOFT_RESET);
  622. }
  623. static int sprd_dphy_init(struct dsi_context *ctx)
  624. {
  625. struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
  626. int ret;
  627. dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_RESET_N, 0);
  628. dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_SHUTDOWN, 0);
  629. dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_CLK_EN, 0);
  630. dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLR, 0);
  631. dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLR, PHY_TESTCLR);
  632. dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLR, 0);
  633. dphy_pll_config(ctx);
  634. dphy_timing_config(ctx);
  635. dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_SHUTDOWN, RF_PHY_SHUTDOWN);
  636. dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_RESET_N, RF_PHY_RESET_N);
  637. writel(0x1C, ctx->base + PHY_MIN_STOP_TIME);
  638. dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_CLK_EN, RF_PHY_CLK_EN);
  639. writel(dsi->slave->lanes - 1, ctx->base + PHY_LANE_NUM_CONFIG);
  640. ret = dphy_wait_pll_locked(ctx);
  641. if (ret) {
  642. drm_err(dsi->drm, "dphy initial failed\n");
  643. return ret;
  644. }
  645. return 0;
  646. }
  647. static void sprd_dphy_fini(struct dsi_context *ctx)
  648. {
  649. dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_RESET_N, 0);
  650. dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_SHUTDOWN, 0);
  651. dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_RESET_N, RF_PHY_RESET_N);
  652. }
  653. static void sprd_dsi_encoder_mode_set(struct drm_encoder *encoder,
  654. struct drm_display_mode *mode,
  655. struct drm_display_mode *adj_mode)
  656. {
  657. struct sprd_dsi *dsi = encoder_to_dsi(encoder);
  658. drm_display_mode_to_videomode(adj_mode, &dsi->ctx.vm);
  659. }
  660. static void sprd_dsi_encoder_enable(struct drm_encoder *encoder)
  661. {
  662. struct sprd_dsi *dsi = encoder_to_dsi(encoder);
  663. struct sprd_dpu *dpu = to_sprd_crtc(encoder->crtc);
  664. struct dsi_context *ctx = &dsi->ctx;
  665. if (ctx->enabled) {
  666. drm_warn(dsi->drm, "dsi is initialized\n");
  667. return;
  668. }
  669. sprd_dsi_init(ctx);
  670. if (ctx->work_mode == DSI_MODE_VIDEO)
  671. sprd_dsi_dpi_video(ctx);
  672. else
  673. sprd_dsi_edpi_video(ctx);
  674. sprd_dphy_init(ctx);
  675. sprd_dsi_set_work_mode(ctx, ctx->work_mode);
  676. sprd_dsi_state_reset(ctx);
  677. if (dsi->slave->mode_flags & MIPI_DSI_CLOCK_NON_CONTINUOUS) {
  678. dsi_reg_up(ctx, PHY_CLK_LANE_LP_CTRL, AUTO_CLKLANE_CTRL_EN,
  679. AUTO_CLKLANE_CTRL_EN);
  680. } else {
  681. dsi_reg_up(ctx, PHY_CLK_LANE_LP_CTRL, RF_PHY_CLK_EN, RF_PHY_CLK_EN);
  682. dsi_reg_up(ctx, PHY_CLK_LANE_LP_CTRL, PHY_CLKLANE_TX_REQ_HS,
  683. PHY_CLKLANE_TX_REQ_HS);
  684. dphy_wait_pll_locked(ctx);
  685. }
  686. sprd_dpu_run(dpu);
  687. ctx->enabled = true;
  688. }
  689. static void sprd_dsi_encoder_disable(struct drm_encoder *encoder)
  690. {
  691. struct sprd_dsi *dsi = encoder_to_dsi(encoder);
  692. struct sprd_dpu *dpu = to_sprd_crtc(encoder->crtc);
  693. struct dsi_context *ctx = &dsi->ctx;
  694. if (!ctx->enabled) {
  695. drm_warn(dsi->drm, "dsi isn't initialized\n");
  696. return;
  697. }
  698. sprd_dpu_stop(dpu);
  699. sprd_dphy_fini(ctx);
  700. sprd_dsi_fini(ctx);
  701. ctx->enabled = false;
  702. }
  703. static const struct drm_encoder_helper_funcs sprd_encoder_helper_funcs = {
  704. .mode_set = sprd_dsi_encoder_mode_set,
  705. .enable = sprd_dsi_encoder_enable,
  706. .disable = sprd_dsi_encoder_disable
  707. };
  708. static const struct drm_encoder_funcs sprd_encoder_funcs = {
  709. .destroy = drm_encoder_cleanup,
  710. };
  711. static int sprd_dsi_encoder_init(struct sprd_dsi *dsi,
  712. struct device *dev)
  713. {
  714. struct drm_encoder *encoder = &dsi->encoder;
  715. u32 crtc_mask;
  716. int ret;
  717. crtc_mask = drm_of_find_possible_crtcs(dsi->drm, dev->of_node);
  718. if (!crtc_mask) {
  719. drm_err(dsi->drm, "failed to find crtc mask\n");
  720. return -EINVAL;
  721. }
  722. drm_dbg(dsi->drm, "find possible crtcs: 0x%08x\n", crtc_mask);
  723. encoder->possible_crtcs = crtc_mask;
  724. ret = drm_encoder_init(dsi->drm, encoder, &sprd_encoder_funcs,
  725. DRM_MODE_ENCODER_DSI, NULL);
  726. if (ret) {
  727. drm_err(dsi->drm, "failed to init dsi encoder\n");
  728. return ret;
  729. }
  730. drm_encoder_helper_add(encoder, &sprd_encoder_helper_funcs);
  731. return 0;
  732. }
  733. static int sprd_dsi_bridge_init(struct sprd_dsi *dsi,
  734. struct device *dev)
  735. {
  736. int ret;
  737. dsi->panel_bridge = devm_drm_of_get_bridge(dev, dev->of_node, 1, 0);
  738. if (IS_ERR(dsi->panel_bridge))
  739. return PTR_ERR(dsi->panel_bridge);
  740. ret = drm_bridge_attach(&dsi->encoder, dsi->panel_bridge, NULL, 0);
  741. if (ret)
  742. return ret;
  743. return 0;
  744. }
  745. static int sprd_dsi_context_init(struct sprd_dsi *dsi,
  746. struct device *dev)
  747. {
  748. struct platform_device *pdev = to_platform_device(dev);
  749. struct dsi_context *ctx = &dsi->ctx;
  750. struct resource *res;
  751. res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
  752. if (!res) {
  753. dev_err(dev, "failed to get I/O resource\n");
  754. return -EINVAL;
  755. }
  756. ctx->base = devm_ioremap(dev, res->start, resource_size(res));
  757. if (!ctx->base) {
  758. drm_err(dsi->drm, "failed to map dsi host registers\n");
  759. return -ENXIO;
  760. }
  761. ctx->regmap = devm_regmap_init(dev, &regmap_tst_io, dsi, &byte_config);
  762. if (IS_ERR(ctx->regmap)) {
  763. drm_err(dsi->drm, "dphy regmap init failed\n");
  764. return PTR_ERR(ctx->regmap);
  765. }
  766. ctx->data_hs2lp = 120;
  767. ctx->data_lp2hs = 500;
  768. ctx->clk_hs2lp = 4;
  769. ctx->clk_lp2hs = 15;
  770. ctx->max_rd_time = 6000;
  771. ctx->int0_mask = 0xffffffff;
  772. ctx->int1_mask = 0xffffffff;
  773. ctx->enabled = true;
  774. return 0;
  775. }
  776. static int sprd_dsi_bind(struct device *dev, struct device *master, void *data)
  777. {
  778. struct drm_device *drm = data;
  779. struct sprd_dsi *dsi = dev_get_drvdata(dev);
  780. int ret;
  781. dsi->drm = drm;
  782. ret = sprd_dsi_encoder_init(dsi, dev);
  783. if (ret)
  784. return ret;
  785. ret = sprd_dsi_bridge_init(dsi, dev);
  786. if (ret)
  787. return ret;
  788. ret = sprd_dsi_context_init(dsi, dev);
  789. if (ret)
  790. return ret;
  791. return 0;
  792. }
  793. static void sprd_dsi_unbind(struct device *dev,
  794. struct device *master, void *data)
  795. {
  796. struct sprd_dsi *dsi = dev_get_drvdata(dev);
  797. drm_of_panel_bridge_remove(dev->of_node, 1, 0);
  798. drm_encoder_cleanup(&dsi->encoder);
  799. }
  800. static const struct component_ops dsi_component_ops = {
  801. .bind = sprd_dsi_bind,
  802. .unbind = sprd_dsi_unbind,
  803. };
  804. static int sprd_dsi_host_attach(struct mipi_dsi_host *host,
  805. struct mipi_dsi_device *slave)
  806. {
  807. struct sprd_dsi *dsi = host_to_dsi(host);
  808. struct dsi_context *ctx = &dsi->ctx;
  809. dsi->slave = slave;
  810. if (slave->mode_flags & MIPI_DSI_MODE_VIDEO)
  811. ctx->work_mode = DSI_MODE_VIDEO;
  812. else
  813. ctx->work_mode = DSI_MODE_CMD;
  814. if (slave->mode_flags & MIPI_DSI_MODE_VIDEO_BURST)
  815. ctx->burst_mode = VIDEO_BURST_WITH_SYNC_PULSES;
  816. else if (slave->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE)
  817. ctx->burst_mode = VIDEO_NON_BURST_WITH_SYNC_PULSES;
  818. else
  819. ctx->burst_mode = VIDEO_NON_BURST_WITH_SYNC_EVENTS;
  820. return component_add(host->dev, &dsi_component_ops);
  821. }
  822. static int sprd_dsi_host_detach(struct mipi_dsi_host *host,
  823. struct mipi_dsi_device *slave)
  824. {
  825. component_del(host->dev, &dsi_component_ops);
  826. return 0;
  827. }
  828. static ssize_t sprd_dsi_host_transfer(struct mipi_dsi_host *host,
  829. const struct mipi_dsi_msg *msg)
  830. {
  831. struct sprd_dsi *dsi = host_to_dsi(host);
  832. const u8 *tx_buf = msg->tx_buf;
  833. if (msg->rx_buf && msg->rx_len) {
  834. u8 lsb = (msg->tx_len > 0) ? tx_buf[0] : 0;
  835. u8 msb = (msg->tx_len > 1) ? tx_buf[1] : 0;
  836. return sprd_dsi_rd_pkt(&dsi->ctx, msg->channel, msg->type,
  837. msb, lsb, msg->rx_buf, msg->rx_len);
  838. }
  839. if (msg->tx_buf && msg->tx_len)
  840. return sprd_dsi_wr_pkt(&dsi->ctx, msg->channel, msg->type,
  841. tx_buf, msg->tx_len);
  842. return 0;
  843. }
  844. static const struct mipi_dsi_host_ops sprd_dsi_host_ops = {
  845. .attach = sprd_dsi_host_attach,
  846. .detach = sprd_dsi_host_detach,
  847. .transfer = sprd_dsi_host_transfer,
  848. };
  849. static const struct of_device_id dsi_match_table[] = {
  850. { .compatible = "sprd,sharkl3-dsi-host" },
  851. { /* sentinel */ },
  852. };
  853. static int sprd_dsi_probe(struct platform_device *pdev)
  854. {
  855. struct device *dev = &pdev->dev;
  856. struct sprd_dsi *dsi;
  857. dsi = devm_kzalloc(dev, sizeof(*dsi), GFP_KERNEL);
  858. if (!dsi)
  859. return -ENOMEM;
  860. dev_set_drvdata(dev, dsi);
  861. dsi->host.ops = &sprd_dsi_host_ops;
  862. dsi->host.dev = dev;
  863. return mipi_dsi_host_register(&dsi->host);
  864. }
  865. static int sprd_dsi_remove(struct platform_device *pdev)
  866. {
  867. struct sprd_dsi *dsi = dev_get_drvdata(&pdev->dev);
  868. mipi_dsi_host_unregister(&dsi->host);
  869. return 0;
  870. }
  871. struct platform_driver sprd_dsi_driver = {
  872. .probe = sprd_dsi_probe,
  873. .remove = sprd_dsi_remove,
  874. .driver = {
  875. .name = "sprd-dsi-drv",
  876. .of_match_table = dsi_match_table,
  877. },
  878. };
  879. MODULE_AUTHOR("Leon He <[email protected]>");
  880. MODULE_AUTHOR("Kevin Tang <[email protected]>");
  881. MODULE_DESCRIPTION("Unisoc MIPI DSI HOST Controller Driver");
  882. MODULE_LICENSE("GPL v2");