vc4_crtc.c 40 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * Copyright (C) 2015 Broadcom
  4. */
  5. /**
  6. * DOC: VC4 CRTC module
  7. *
  8. * In VC4, the Pixel Valve is what most closely corresponds to the
  9. * DRM's concept of a CRTC. The PV generates video timings from the
  10. * encoder's clock plus its configuration. It pulls scaled pixels from
  11. * the HVS at that timing, and feeds it to the encoder.
  12. *
  13. * However, the DRM CRTC also collects the configuration of all the
  14. * DRM planes attached to it. As a result, the CRTC is also
  15. * responsible for writing the display list for the HVS channel that
  16. * the CRTC will use.
  17. *
  18. * The 2835 has 3 different pixel valves. pv0 in the audio power
  19. * domain feeds DSI0 or DPI, while pv1 feeds DS1 or SMI. pv2 in the
  20. * image domain can feed either HDMI or the SDTV controller. The
  21. * pixel valve chooses from the CPRMAN clocks (HSM for HDMI, VEC for
  22. * SDTV, etc.) according to which output type is chosen in the mux.
  23. *
  24. * For power management, the pixel valve's registers are all clocked
  25. * by the AXI clock, while the timings and FIFOs make use of the
  26. * output-specific clock. Since the encoders also directly consume
  27. * the CPRMAN clocks, and know what timings they need, they are the
  28. * ones that set the clock.
  29. */
  30. #include <linux/clk.h>
  31. #include <linux/component.h>
  32. #include <linux/of_device.h>
  33. #include <linux/pm_runtime.h>
  34. #include <drm/drm_atomic.h>
  35. #include <drm/drm_atomic_helper.h>
  36. #include <drm/drm_atomic_uapi.h>
  37. #include <drm/drm_fb_dma_helper.h>
  38. #include <drm/drm_framebuffer.h>
  39. #include <drm/drm_drv.h>
  40. #include <drm/drm_print.h>
  41. #include <drm/drm_probe_helper.h>
  42. #include <drm/drm_vblank.h>
  43. #include "vc4_drv.h"
  44. #include "vc4_hdmi.h"
  45. #include "vc4_regs.h"
  46. #define HVS_FIFO_LATENCY_PIX 6
  47. #define CRTC_WRITE(offset, val) writel(val, vc4_crtc->regs + (offset))
  48. #define CRTC_READ(offset) readl(vc4_crtc->regs + (offset))
  49. static const struct debugfs_reg32 crtc_regs[] = {
  50. VC4_REG32(PV_CONTROL),
  51. VC4_REG32(PV_V_CONTROL),
  52. VC4_REG32(PV_VSYNCD_EVEN),
  53. VC4_REG32(PV_HORZA),
  54. VC4_REG32(PV_HORZB),
  55. VC4_REG32(PV_VERTA),
  56. VC4_REG32(PV_VERTB),
  57. VC4_REG32(PV_VERTA_EVEN),
  58. VC4_REG32(PV_VERTB_EVEN),
  59. VC4_REG32(PV_INTEN),
  60. VC4_REG32(PV_INTSTAT),
  61. VC4_REG32(PV_STAT),
  62. VC4_REG32(PV_HACT_ACT),
  63. };
  64. static unsigned int
  65. vc4_crtc_get_cob_allocation(struct vc4_dev *vc4, unsigned int channel)
  66. {
  67. struct vc4_hvs *hvs = vc4->hvs;
  68. u32 dispbase = HVS_READ(SCALER_DISPBASEX(channel));
  69. /* Top/base are supposed to be 4-pixel aligned, but the
  70. * Raspberry Pi firmware fills the low bits (which are
  71. * presumably ignored).
  72. */
  73. u32 top = VC4_GET_FIELD(dispbase, SCALER_DISPBASEX_TOP) & ~3;
  74. u32 base = VC4_GET_FIELD(dispbase, SCALER_DISPBASEX_BASE) & ~3;
  75. return top - base + 4;
  76. }
  77. static bool vc4_crtc_get_scanout_position(struct drm_crtc *crtc,
  78. bool in_vblank_irq,
  79. int *vpos, int *hpos,
  80. ktime_t *stime, ktime_t *etime,
  81. const struct drm_display_mode *mode)
  82. {
  83. struct drm_device *dev = crtc->dev;
  84. struct vc4_dev *vc4 = to_vc4_dev(dev);
  85. struct vc4_hvs *hvs = vc4->hvs;
  86. struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
  87. struct vc4_crtc_state *vc4_crtc_state = to_vc4_crtc_state(crtc->state);
  88. unsigned int cob_size;
  89. u32 val;
  90. int fifo_lines;
  91. int vblank_lines;
  92. bool ret = false;
  93. /* preempt_disable_rt() should go right here in PREEMPT_RT patchset. */
  94. /* Get optional system timestamp before query. */
  95. if (stime)
  96. *stime = ktime_get();
  97. /*
  98. * Read vertical scanline which is currently composed for our
  99. * pixelvalve by the HVS, and also the scaler status.
  100. */
  101. val = HVS_READ(SCALER_DISPSTATX(vc4_crtc_state->assigned_channel));
  102. /* Get optional system timestamp after query. */
  103. if (etime)
  104. *etime = ktime_get();
  105. /* preempt_enable_rt() should go right here in PREEMPT_RT patchset. */
  106. /* Vertical position of hvs composed scanline. */
  107. *vpos = VC4_GET_FIELD(val, SCALER_DISPSTATX_LINE);
  108. *hpos = 0;
  109. if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
  110. *vpos /= 2;
  111. /* Use hpos to correct for field offset in interlaced mode. */
  112. if (vc4_hvs_get_fifo_frame_count(hvs, vc4_crtc_state->assigned_channel) % 2)
  113. *hpos += mode->crtc_htotal / 2;
  114. }
  115. cob_size = vc4_crtc_get_cob_allocation(vc4, vc4_crtc_state->assigned_channel);
  116. /* This is the offset we need for translating hvs -> pv scanout pos. */
  117. fifo_lines = cob_size / mode->crtc_hdisplay;
  118. if (fifo_lines > 0)
  119. ret = true;
  120. /* HVS more than fifo_lines into frame for compositing? */
  121. if (*vpos > fifo_lines) {
  122. /*
  123. * We are in active scanout and can get some meaningful results
  124. * from HVS. The actual PV scanout can not trail behind more
  125. * than fifo_lines as that is the fifo's capacity. Assume that
  126. * in active scanout the HVS and PV work in lockstep wrt. HVS
  127. * refilling the fifo and PV consuming from the fifo, ie.
  128. * whenever the PV consumes and frees up a scanline in the
  129. * fifo, the HVS will immediately refill it, therefore
  130. * incrementing vpos. Therefore we choose HVS read position -
  131. * fifo size in scanlines as a estimate of the real scanout
  132. * position of the PV.
  133. */
  134. *vpos -= fifo_lines + 1;
  135. return ret;
  136. }
  137. /*
  138. * Less: This happens when we are in vblank and the HVS, after getting
  139. * the VSTART restart signal from the PV, just started refilling its
  140. * fifo with new lines from the top-most lines of the new framebuffers.
  141. * The PV does not scan out in vblank, so does not remove lines from
  142. * the fifo, so the fifo will be full quickly and the HVS has to pause.
  143. * We can't get meaningful readings wrt. scanline position of the PV
  144. * and need to make things up in a approximative but consistent way.
  145. */
  146. vblank_lines = mode->vtotal - mode->vdisplay;
  147. if (in_vblank_irq) {
  148. /*
  149. * Assume the irq handler got called close to first
  150. * line of vblank, so PV has about a full vblank
  151. * scanlines to go, and as a base timestamp use the
  152. * one taken at entry into vblank irq handler, so it
  153. * is not affected by random delays due to lock
  154. * contention on event_lock or vblank_time lock in
  155. * the core.
  156. */
  157. *vpos = -vblank_lines;
  158. if (stime)
  159. *stime = vc4_crtc->t_vblank;
  160. if (etime)
  161. *etime = vc4_crtc->t_vblank;
  162. /*
  163. * If the HVS fifo is not yet full then we know for certain
  164. * we are at the very beginning of vblank, as the hvs just
  165. * started refilling, and the stime and etime timestamps
  166. * truly correspond to start of vblank.
  167. *
  168. * Unfortunately there's no way to report this to upper levels
  169. * and make it more useful.
  170. */
  171. } else {
  172. /*
  173. * No clue where we are inside vblank. Return a vpos of zero,
  174. * which will cause calling code to just return the etime
  175. * timestamp uncorrected. At least this is no worse than the
  176. * standard fallback.
  177. */
  178. *vpos = 0;
  179. }
  180. return ret;
  181. }
  182. static u32 vc4_get_fifo_full_level(struct vc4_crtc *vc4_crtc, u32 format)
  183. {
  184. const struct vc4_crtc_data *crtc_data = vc4_crtc_to_vc4_crtc_data(vc4_crtc);
  185. const struct vc4_pv_data *pv_data = vc4_crtc_to_vc4_pv_data(vc4_crtc);
  186. struct vc4_dev *vc4 = to_vc4_dev(vc4_crtc->base.dev);
  187. u32 fifo_len_bytes = pv_data->fifo_depth;
  188. /*
  189. * Pixels are pulled from the HVS if the number of bytes is
  190. * lower than the FIFO full level.
  191. *
  192. * The latency of the pixel fetch mechanism is 6 pixels, so we
  193. * need to convert those 6 pixels in bytes, depending on the
  194. * format, and then subtract that from the length of the FIFO
  195. * to make sure we never end up in a situation where the FIFO
  196. * is full.
  197. */
  198. switch (format) {
  199. case PV_CONTROL_FORMAT_DSIV_16:
  200. case PV_CONTROL_FORMAT_DSIC_16:
  201. return fifo_len_bytes - 2 * HVS_FIFO_LATENCY_PIX;
  202. case PV_CONTROL_FORMAT_DSIV_18:
  203. return fifo_len_bytes - 14;
  204. case PV_CONTROL_FORMAT_24:
  205. case PV_CONTROL_FORMAT_DSIV_24:
  206. default:
  207. /*
  208. * For some reason, the pixelvalve4 doesn't work with
  209. * the usual formula and will only work with 32.
  210. */
  211. if (crtc_data->hvs_output == 5)
  212. return 32;
  213. /*
  214. * It looks like in some situations, we will overflow
  215. * the PixelValve FIFO (with the bit 10 of PV stat being
  216. * set) and stall the HVS / PV, eventually resulting in
  217. * a page flip timeout.
  218. *
  219. * Displaying the video overlay during a playback with
  220. * Kodi on an RPi3 seems to be a great solution with a
  221. * failure rate around 50%.
  222. *
  223. * Removing 1 from the FIFO full level however
  224. * seems to completely remove that issue.
  225. */
  226. if (!vc4->is_vc5)
  227. return fifo_len_bytes - 3 * HVS_FIFO_LATENCY_PIX - 1;
  228. return fifo_len_bytes - 3 * HVS_FIFO_LATENCY_PIX;
  229. }
  230. }
  231. static u32 vc4_crtc_get_fifo_full_level_bits(struct vc4_crtc *vc4_crtc,
  232. u32 format)
  233. {
  234. u32 level = vc4_get_fifo_full_level(vc4_crtc, format);
  235. u32 ret = 0;
  236. ret |= VC4_SET_FIELD((level >> 6),
  237. PV5_CONTROL_FIFO_LEVEL_HIGH);
  238. return ret | VC4_SET_FIELD(level & 0x3f,
  239. PV_CONTROL_FIFO_LEVEL);
  240. }
  241. /*
  242. * Returns the encoder attached to the CRTC.
  243. *
  244. * VC4 can only scan out to one encoder at a time, while the DRM core
  245. * allows drivers to push pixels to more than one encoder from the
  246. * same CRTC.
  247. */
  248. struct drm_encoder *vc4_get_crtc_encoder(struct drm_crtc *crtc,
  249. struct drm_crtc_state *state)
  250. {
  251. struct drm_encoder *encoder;
  252. WARN_ON(hweight32(state->encoder_mask) > 1);
  253. drm_for_each_encoder_mask(encoder, crtc->dev, state->encoder_mask)
  254. return encoder;
  255. return NULL;
  256. }
  257. static void vc4_crtc_pixelvalve_reset(struct drm_crtc *crtc)
  258. {
  259. struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
  260. struct drm_device *dev = crtc->dev;
  261. int idx;
  262. if (!drm_dev_enter(dev, &idx))
  263. return;
  264. /* The PV needs to be disabled before it can be flushed */
  265. CRTC_WRITE(PV_CONTROL, CRTC_READ(PV_CONTROL) & ~PV_CONTROL_EN);
  266. CRTC_WRITE(PV_CONTROL, CRTC_READ(PV_CONTROL) | PV_CONTROL_FIFO_CLR);
  267. drm_dev_exit(idx);
  268. }
  269. static void vc4_crtc_config_pv(struct drm_crtc *crtc, struct drm_encoder *encoder,
  270. struct drm_atomic_state *state)
  271. {
  272. struct drm_device *dev = crtc->dev;
  273. struct vc4_dev *vc4 = to_vc4_dev(dev);
  274. struct vc4_encoder *vc4_encoder = to_vc4_encoder(encoder);
  275. struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
  276. const struct vc4_pv_data *pv_data = vc4_crtc_to_vc4_pv_data(vc4_crtc);
  277. struct drm_crtc_state *crtc_state = crtc->state;
  278. struct drm_display_mode *mode = &crtc_state->adjusted_mode;
  279. bool interlace = mode->flags & DRM_MODE_FLAG_INTERLACE;
  280. bool is_hdmi = vc4_encoder->type == VC4_ENCODER_TYPE_HDMI0 ||
  281. vc4_encoder->type == VC4_ENCODER_TYPE_HDMI1;
  282. u32 pixel_rep = ((mode->flags & DRM_MODE_FLAG_DBLCLK) && !is_hdmi) ? 2 : 1;
  283. bool is_dsi = (vc4_encoder->type == VC4_ENCODER_TYPE_DSI0 ||
  284. vc4_encoder->type == VC4_ENCODER_TYPE_DSI1);
  285. bool is_dsi1 = vc4_encoder->type == VC4_ENCODER_TYPE_DSI1;
  286. u32 format = is_dsi1 ? PV_CONTROL_FORMAT_DSIV_24 : PV_CONTROL_FORMAT_24;
  287. u8 ppc = pv_data->pixels_per_clock;
  288. bool debug_dump_regs = false;
  289. int idx;
  290. if (!drm_dev_enter(dev, &idx))
  291. return;
  292. if (debug_dump_regs) {
  293. struct drm_printer p = drm_info_printer(&vc4_crtc->pdev->dev);
  294. dev_info(&vc4_crtc->pdev->dev, "CRTC %d regs before:\n",
  295. drm_crtc_index(crtc));
  296. drm_print_regset32(&p, &vc4_crtc->regset);
  297. }
  298. vc4_crtc_pixelvalve_reset(crtc);
  299. CRTC_WRITE(PV_HORZA,
  300. VC4_SET_FIELD((mode->htotal - mode->hsync_end) * pixel_rep / ppc,
  301. PV_HORZA_HBP) |
  302. VC4_SET_FIELD((mode->hsync_end - mode->hsync_start) * pixel_rep / ppc,
  303. PV_HORZA_HSYNC));
  304. CRTC_WRITE(PV_HORZB,
  305. VC4_SET_FIELD((mode->hsync_start - mode->hdisplay) * pixel_rep / ppc,
  306. PV_HORZB_HFP) |
  307. VC4_SET_FIELD(mode->hdisplay * pixel_rep / ppc,
  308. PV_HORZB_HACTIVE));
  309. CRTC_WRITE(PV_VERTA,
  310. VC4_SET_FIELD(mode->crtc_vtotal - mode->crtc_vsync_end +
  311. interlace,
  312. PV_VERTA_VBP) |
  313. VC4_SET_FIELD(mode->crtc_vsync_end - mode->crtc_vsync_start,
  314. PV_VERTA_VSYNC));
  315. CRTC_WRITE(PV_VERTB,
  316. VC4_SET_FIELD(mode->crtc_vsync_start - mode->crtc_vdisplay,
  317. PV_VERTB_VFP) |
  318. VC4_SET_FIELD(mode->crtc_vdisplay, PV_VERTB_VACTIVE));
  319. if (interlace) {
  320. CRTC_WRITE(PV_VERTA_EVEN,
  321. VC4_SET_FIELD(mode->crtc_vtotal -
  322. mode->crtc_vsync_end,
  323. PV_VERTA_VBP) |
  324. VC4_SET_FIELD(mode->crtc_vsync_end -
  325. mode->crtc_vsync_start,
  326. PV_VERTA_VSYNC));
  327. CRTC_WRITE(PV_VERTB_EVEN,
  328. VC4_SET_FIELD(mode->crtc_vsync_start -
  329. mode->crtc_vdisplay,
  330. PV_VERTB_VFP) |
  331. VC4_SET_FIELD(mode->crtc_vdisplay, PV_VERTB_VACTIVE));
  332. /* We set up first field even mode for HDMI. VEC's
  333. * NTSC mode would want first field odd instead, once
  334. * we support it (to do so, set ODD_FIRST and put the
  335. * delay in VSYNCD_EVEN instead).
  336. */
  337. CRTC_WRITE(PV_V_CONTROL,
  338. PV_VCONTROL_CONTINUOUS |
  339. (is_dsi ? PV_VCONTROL_DSI : 0) |
  340. PV_VCONTROL_INTERLACE |
  341. VC4_SET_FIELD(mode->htotal * pixel_rep / (2 * ppc),
  342. PV_VCONTROL_ODD_DELAY));
  343. CRTC_WRITE(PV_VSYNCD_EVEN, 0);
  344. } else {
  345. CRTC_WRITE(PV_V_CONTROL,
  346. PV_VCONTROL_CONTINUOUS |
  347. (is_dsi ? PV_VCONTROL_DSI : 0));
  348. }
  349. if (is_dsi)
  350. CRTC_WRITE(PV_HACT_ACT, mode->hdisplay * pixel_rep);
  351. if (vc4->is_vc5)
  352. CRTC_WRITE(PV_MUX_CFG,
  353. VC4_SET_FIELD(PV_MUX_CFG_RGB_PIXEL_MUX_MODE_NO_SWAP,
  354. PV_MUX_CFG_RGB_PIXEL_MUX_MODE));
  355. CRTC_WRITE(PV_CONTROL, PV_CONTROL_FIFO_CLR |
  356. vc4_crtc_get_fifo_full_level_bits(vc4_crtc, format) |
  357. VC4_SET_FIELD(format, PV_CONTROL_FORMAT) |
  358. VC4_SET_FIELD(pixel_rep - 1, PV_CONTROL_PIXEL_REP) |
  359. PV_CONTROL_CLR_AT_START |
  360. PV_CONTROL_TRIGGER_UNDERFLOW |
  361. PV_CONTROL_WAIT_HSTART |
  362. VC4_SET_FIELD(vc4_encoder->clock_select,
  363. PV_CONTROL_CLK_SELECT));
  364. if (debug_dump_regs) {
  365. struct drm_printer p = drm_info_printer(&vc4_crtc->pdev->dev);
  366. dev_info(&vc4_crtc->pdev->dev, "CRTC %d regs after:\n",
  367. drm_crtc_index(crtc));
  368. drm_print_regset32(&p, &vc4_crtc->regset);
  369. }
  370. drm_dev_exit(idx);
  371. }
  372. static void require_hvs_enabled(struct drm_device *dev)
  373. {
  374. struct vc4_dev *vc4 = to_vc4_dev(dev);
  375. struct vc4_hvs *hvs = vc4->hvs;
  376. WARN_ON_ONCE((HVS_READ(SCALER_DISPCTRL) & SCALER_DISPCTRL_ENABLE) !=
  377. SCALER_DISPCTRL_ENABLE);
  378. }
  379. static int vc4_crtc_disable(struct drm_crtc *crtc,
  380. struct drm_encoder *encoder,
  381. struct drm_atomic_state *state,
  382. unsigned int channel)
  383. {
  384. struct vc4_encoder *vc4_encoder = to_vc4_encoder(encoder);
  385. struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
  386. struct drm_device *dev = crtc->dev;
  387. struct vc4_dev *vc4 = to_vc4_dev(dev);
  388. int idx, ret;
  389. if (!drm_dev_enter(dev, &idx))
  390. return -ENODEV;
  391. CRTC_WRITE(PV_V_CONTROL,
  392. CRTC_READ(PV_V_CONTROL) & ~PV_VCONTROL_VIDEN);
  393. ret = wait_for(!(CRTC_READ(PV_V_CONTROL) & PV_VCONTROL_VIDEN), 1);
  394. WARN_ONCE(ret, "Timeout waiting for !PV_VCONTROL_VIDEN\n");
  395. /*
  396. * This delay is needed to avoid to get a pixel stuck in an
  397. * unflushable FIFO between the pixelvalve and the HDMI
  398. * controllers on the BCM2711.
  399. *
  400. * Timing is fairly sensitive here, so mdelay is the safest
  401. * approach.
  402. *
  403. * If it was to be reworked, the stuck pixel happens on a
  404. * BCM2711 when changing mode with a good probability, so a
  405. * script that changes mode on a regular basis should trigger
  406. * the bug after less than 10 attempts. It manifests itself with
  407. * every pixels being shifted by one to the right, and thus the
  408. * last pixel of a line actually being displayed as the first
  409. * pixel on the next line.
  410. */
  411. mdelay(20);
  412. if (vc4_encoder && vc4_encoder->post_crtc_disable)
  413. vc4_encoder->post_crtc_disable(encoder, state);
  414. vc4_crtc_pixelvalve_reset(crtc);
  415. vc4_hvs_stop_channel(vc4->hvs, channel);
  416. if (vc4_encoder && vc4_encoder->post_crtc_powerdown)
  417. vc4_encoder->post_crtc_powerdown(encoder, state);
  418. drm_dev_exit(idx);
  419. return 0;
  420. }
  421. static struct drm_encoder *vc4_crtc_get_encoder_by_type(struct drm_crtc *crtc,
  422. enum vc4_encoder_type type)
  423. {
  424. struct drm_encoder *encoder;
  425. drm_for_each_encoder(encoder, crtc->dev) {
  426. struct vc4_encoder *vc4_encoder = to_vc4_encoder(encoder);
  427. if (vc4_encoder->type == type)
  428. return encoder;
  429. }
  430. return NULL;
  431. }
  432. int vc4_crtc_disable_at_boot(struct drm_crtc *crtc)
  433. {
  434. struct drm_device *drm = crtc->dev;
  435. struct vc4_dev *vc4 = to_vc4_dev(drm);
  436. struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
  437. enum vc4_encoder_type encoder_type;
  438. const struct vc4_pv_data *pv_data;
  439. struct drm_encoder *encoder;
  440. struct vc4_hdmi *vc4_hdmi;
  441. unsigned encoder_sel;
  442. int channel;
  443. int ret;
  444. if (!(of_device_is_compatible(vc4_crtc->pdev->dev.of_node,
  445. "brcm,bcm2711-pixelvalve2") ||
  446. of_device_is_compatible(vc4_crtc->pdev->dev.of_node,
  447. "brcm,bcm2711-pixelvalve4")))
  448. return 0;
  449. if (!(CRTC_READ(PV_CONTROL) & PV_CONTROL_EN))
  450. return 0;
  451. if (!(CRTC_READ(PV_V_CONTROL) & PV_VCONTROL_VIDEN))
  452. return 0;
  453. channel = vc4_hvs_get_fifo_from_output(vc4->hvs, vc4_crtc->data->hvs_output);
  454. if (channel < 0)
  455. return 0;
  456. encoder_sel = VC4_GET_FIELD(CRTC_READ(PV_CONTROL), PV_CONTROL_CLK_SELECT);
  457. if (WARN_ON(encoder_sel != 0))
  458. return 0;
  459. pv_data = vc4_crtc_to_vc4_pv_data(vc4_crtc);
  460. encoder_type = pv_data->encoder_types[encoder_sel];
  461. encoder = vc4_crtc_get_encoder_by_type(crtc, encoder_type);
  462. if (WARN_ON(!encoder))
  463. return 0;
  464. vc4_hdmi = encoder_to_vc4_hdmi(encoder);
  465. ret = pm_runtime_resume_and_get(&vc4_hdmi->pdev->dev);
  466. if (ret)
  467. return ret;
  468. ret = vc4_crtc_disable(crtc, encoder, NULL, channel);
  469. if (ret)
  470. return ret;
  471. /*
  472. * post_crtc_powerdown will have called pm_runtime_put, so we
  473. * don't need it here otherwise we'll get the reference counting
  474. * wrong.
  475. */
  476. return 0;
  477. }
  478. void vc4_crtc_send_vblank(struct drm_crtc *crtc)
  479. {
  480. struct drm_device *dev = crtc->dev;
  481. unsigned long flags;
  482. if (!crtc->state || !crtc->state->event)
  483. return;
  484. spin_lock_irqsave(&dev->event_lock, flags);
  485. drm_crtc_send_vblank_event(crtc, crtc->state->event);
  486. crtc->state->event = NULL;
  487. spin_unlock_irqrestore(&dev->event_lock, flags);
  488. }
  489. static void vc4_crtc_atomic_disable(struct drm_crtc *crtc,
  490. struct drm_atomic_state *state)
  491. {
  492. struct drm_crtc_state *old_state = drm_atomic_get_old_crtc_state(state,
  493. crtc);
  494. struct vc4_crtc_state *old_vc4_state = to_vc4_crtc_state(old_state);
  495. struct drm_encoder *encoder = vc4_get_crtc_encoder(crtc, old_state);
  496. struct drm_device *dev = crtc->dev;
  497. drm_dbg(dev, "Disabling CRTC %s (%u) connected to Encoder %s (%u)",
  498. crtc->name, crtc->base.id, encoder->name, encoder->base.id);
  499. require_hvs_enabled(dev);
  500. /* Disable vblank irq handling before crtc is disabled. */
  501. drm_crtc_vblank_off(crtc);
  502. vc4_crtc_disable(crtc, encoder, state, old_vc4_state->assigned_channel);
  503. /*
  504. * Make sure we issue a vblank event after disabling the CRTC if
  505. * someone was waiting it.
  506. */
  507. vc4_crtc_send_vblank(crtc);
  508. }
  509. static void vc4_crtc_atomic_enable(struct drm_crtc *crtc,
  510. struct drm_atomic_state *state)
  511. {
  512. struct drm_crtc_state *new_state = drm_atomic_get_new_crtc_state(state,
  513. crtc);
  514. struct drm_device *dev = crtc->dev;
  515. struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
  516. struct drm_encoder *encoder = vc4_get_crtc_encoder(crtc, new_state);
  517. struct vc4_encoder *vc4_encoder = to_vc4_encoder(encoder);
  518. int idx;
  519. drm_dbg(dev, "Enabling CRTC %s (%u) connected to Encoder %s (%u)",
  520. crtc->name, crtc->base.id, encoder->name, encoder->base.id);
  521. if (!drm_dev_enter(dev, &idx))
  522. return;
  523. require_hvs_enabled(dev);
  524. /* Enable vblank irq handling before crtc is started otherwise
  525. * drm_crtc_get_vblank() fails in vc4_crtc_update_dlist().
  526. */
  527. drm_crtc_vblank_on(crtc);
  528. vc4_hvs_atomic_enable(crtc, state);
  529. if (vc4_encoder->pre_crtc_configure)
  530. vc4_encoder->pre_crtc_configure(encoder, state);
  531. vc4_crtc_config_pv(crtc, encoder, state);
  532. CRTC_WRITE(PV_CONTROL, CRTC_READ(PV_CONTROL) | PV_CONTROL_EN);
  533. if (vc4_encoder->pre_crtc_enable)
  534. vc4_encoder->pre_crtc_enable(encoder, state);
  535. /* When feeding the transposer block the pixelvalve is unneeded and
  536. * should not be enabled.
  537. */
  538. CRTC_WRITE(PV_V_CONTROL,
  539. CRTC_READ(PV_V_CONTROL) | PV_VCONTROL_VIDEN);
  540. if (vc4_encoder->post_crtc_enable)
  541. vc4_encoder->post_crtc_enable(encoder, state);
  542. drm_dev_exit(idx);
  543. }
  544. static enum drm_mode_status vc4_crtc_mode_valid(struct drm_crtc *crtc,
  545. const struct drm_display_mode *mode)
  546. {
  547. /* Do not allow doublescan modes from user space */
  548. if (mode->flags & DRM_MODE_FLAG_DBLSCAN) {
  549. DRM_DEBUG_KMS("[CRTC:%d] Doublescan mode rejected.\n",
  550. crtc->base.id);
  551. return MODE_NO_DBLESCAN;
  552. }
  553. return MODE_OK;
  554. }
  555. void vc4_crtc_get_margins(struct drm_crtc_state *state,
  556. unsigned int *left, unsigned int *right,
  557. unsigned int *top, unsigned int *bottom)
  558. {
  559. struct vc4_crtc_state *vc4_state = to_vc4_crtc_state(state);
  560. struct drm_connector_state *conn_state;
  561. struct drm_connector *conn;
  562. int i;
  563. *left = vc4_state->margins.left;
  564. *right = vc4_state->margins.right;
  565. *top = vc4_state->margins.top;
  566. *bottom = vc4_state->margins.bottom;
  567. /* We have to interate over all new connector states because
  568. * vc4_crtc_get_margins() might be called before
  569. * vc4_crtc_atomic_check() which means margins info in vc4_crtc_state
  570. * might be outdated.
  571. */
  572. for_each_new_connector_in_state(state->state, conn, conn_state, i) {
  573. if (conn_state->crtc != state->crtc)
  574. continue;
  575. *left = conn_state->tv.margins.left;
  576. *right = conn_state->tv.margins.right;
  577. *top = conn_state->tv.margins.top;
  578. *bottom = conn_state->tv.margins.bottom;
  579. break;
  580. }
  581. }
  582. static int vc4_crtc_atomic_check(struct drm_crtc *crtc,
  583. struct drm_atomic_state *state)
  584. {
  585. struct drm_crtc_state *crtc_state = drm_atomic_get_new_crtc_state(state,
  586. crtc);
  587. struct vc4_crtc_state *vc4_state = to_vc4_crtc_state(crtc_state);
  588. struct drm_connector *conn;
  589. struct drm_connector_state *conn_state;
  590. struct drm_encoder *encoder;
  591. int ret, i;
  592. ret = vc4_hvs_atomic_check(crtc, state);
  593. if (ret)
  594. return ret;
  595. encoder = vc4_get_crtc_encoder(crtc, crtc_state);
  596. if (encoder) {
  597. const struct drm_display_mode *mode = &crtc_state->adjusted_mode;
  598. struct vc4_encoder *vc4_encoder = to_vc4_encoder(encoder);
  599. if (vc4_encoder->type == VC4_ENCODER_TYPE_HDMI0) {
  600. vc4_state->hvs_load = max(mode->clock * mode->hdisplay / mode->htotal + 8000,
  601. mode->clock * 9 / 10) * 1000;
  602. } else {
  603. vc4_state->hvs_load = mode->clock * 1000;
  604. }
  605. }
  606. for_each_new_connector_in_state(state, conn, conn_state,
  607. i) {
  608. if (conn_state->crtc != crtc)
  609. continue;
  610. vc4_state->margins.left = conn_state->tv.margins.left;
  611. vc4_state->margins.right = conn_state->tv.margins.right;
  612. vc4_state->margins.top = conn_state->tv.margins.top;
  613. vc4_state->margins.bottom = conn_state->tv.margins.bottom;
  614. break;
  615. }
  616. return 0;
  617. }
  618. static int vc4_enable_vblank(struct drm_crtc *crtc)
  619. {
  620. struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
  621. struct drm_device *dev = crtc->dev;
  622. int idx;
  623. if (!drm_dev_enter(dev, &idx))
  624. return -ENODEV;
  625. CRTC_WRITE(PV_INTEN, PV_INT_VFP_START);
  626. drm_dev_exit(idx);
  627. return 0;
  628. }
  629. static void vc4_disable_vblank(struct drm_crtc *crtc)
  630. {
  631. struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
  632. struct drm_device *dev = crtc->dev;
  633. int idx;
  634. if (!drm_dev_enter(dev, &idx))
  635. return;
  636. CRTC_WRITE(PV_INTEN, 0);
  637. drm_dev_exit(idx);
  638. }
  639. static void vc4_crtc_handle_page_flip(struct vc4_crtc *vc4_crtc)
  640. {
  641. struct drm_crtc *crtc = &vc4_crtc->base;
  642. struct drm_device *dev = crtc->dev;
  643. struct vc4_dev *vc4 = to_vc4_dev(dev);
  644. struct vc4_hvs *hvs = vc4->hvs;
  645. u32 chan = vc4_crtc->current_hvs_channel;
  646. unsigned long flags;
  647. spin_lock_irqsave(&dev->event_lock, flags);
  648. spin_lock(&vc4_crtc->irq_lock);
  649. if (vc4_crtc->event &&
  650. (vc4_crtc->current_dlist == HVS_READ(SCALER_DISPLACTX(chan)) ||
  651. vc4_crtc->feeds_txp)) {
  652. drm_crtc_send_vblank_event(crtc, vc4_crtc->event);
  653. vc4_crtc->event = NULL;
  654. drm_crtc_vblank_put(crtc);
  655. /* Wait for the page flip to unmask the underrun to ensure that
  656. * the display list was updated by the hardware. Before that
  657. * happens, the HVS will be using the previous display list with
  658. * the CRTC and encoder already reconfigured, leading to
  659. * underruns. This can be seen when reconfiguring the CRTC.
  660. */
  661. vc4_hvs_unmask_underrun(hvs, chan);
  662. }
  663. spin_unlock(&vc4_crtc->irq_lock);
  664. spin_unlock_irqrestore(&dev->event_lock, flags);
  665. }
  666. void vc4_crtc_handle_vblank(struct vc4_crtc *crtc)
  667. {
  668. crtc->t_vblank = ktime_get();
  669. drm_crtc_handle_vblank(&crtc->base);
  670. vc4_crtc_handle_page_flip(crtc);
  671. }
  672. static irqreturn_t vc4_crtc_irq_handler(int irq, void *data)
  673. {
  674. struct vc4_crtc *vc4_crtc = data;
  675. u32 stat = CRTC_READ(PV_INTSTAT);
  676. irqreturn_t ret = IRQ_NONE;
  677. if (stat & PV_INT_VFP_START) {
  678. CRTC_WRITE(PV_INTSTAT, PV_INT_VFP_START);
  679. vc4_crtc_handle_vblank(vc4_crtc);
  680. ret = IRQ_HANDLED;
  681. }
  682. return ret;
  683. }
  684. struct vc4_async_flip_state {
  685. struct drm_crtc *crtc;
  686. struct drm_framebuffer *fb;
  687. struct drm_framebuffer *old_fb;
  688. struct drm_pending_vblank_event *event;
  689. union {
  690. struct dma_fence_cb fence;
  691. struct vc4_seqno_cb seqno;
  692. } cb;
  693. };
  694. /* Called when the V3D execution for the BO being flipped to is done, so that
  695. * we can actually update the plane's address to point to it.
  696. */
  697. static void
  698. vc4_async_page_flip_complete(struct vc4_async_flip_state *flip_state)
  699. {
  700. struct drm_crtc *crtc = flip_state->crtc;
  701. struct drm_device *dev = crtc->dev;
  702. struct drm_plane *plane = crtc->primary;
  703. vc4_plane_async_set_fb(plane, flip_state->fb);
  704. if (flip_state->event) {
  705. unsigned long flags;
  706. spin_lock_irqsave(&dev->event_lock, flags);
  707. drm_crtc_send_vblank_event(crtc, flip_state->event);
  708. spin_unlock_irqrestore(&dev->event_lock, flags);
  709. }
  710. drm_crtc_vblank_put(crtc);
  711. drm_framebuffer_put(flip_state->fb);
  712. if (flip_state->old_fb)
  713. drm_framebuffer_put(flip_state->old_fb);
  714. kfree(flip_state);
  715. }
  716. static void vc4_async_page_flip_seqno_complete(struct vc4_seqno_cb *cb)
  717. {
  718. struct vc4_async_flip_state *flip_state =
  719. container_of(cb, struct vc4_async_flip_state, cb.seqno);
  720. struct vc4_bo *bo = NULL;
  721. if (flip_state->old_fb) {
  722. struct drm_gem_dma_object *dma_bo =
  723. drm_fb_dma_get_gem_obj(flip_state->old_fb, 0);
  724. bo = to_vc4_bo(&dma_bo->base);
  725. }
  726. vc4_async_page_flip_complete(flip_state);
  727. /*
  728. * Decrement the BO usecnt in order to keep the inc/dec
  729. * calls balanced when the planes are updated through
  730. * the async update path.
  731. *
  732. * FIXME: we should move to generic async-page-flip when
  733. * it's available, so that we can get rid of this
  734. * hand-made cleanup_fb() logic.
  735. */
  736. if (bo)
  737. vc4_bo_dec_usecnt(bo);
  738. }
  739. static void vc4_async_page_flip_fence_complete(struct dma_fence *fence,
  740. struct dma_fence_cb *cb)
  741. {
  742. struct vc4_async_flip_state *flip_state =
  743. container_of(cb, struct vc4_async_flip_state, cb.fence);
  744. vc4_async_page_flip_complete(flip_state);
  745. dma_fence_put(fence);
  746. }
  747. static int vc4_async_set_fence_cb(struct drm_device *dev,
  748. struct vc4_async_flip_state *flip_state)
  749. {
  750. struct drm_framebuffer *fb = flip_state->fb;
  751. struct drm_gem_dma_object *dma_bo = drm_fb_dma_get_gem_obj(fb, 0);
  752. struct vc4_dev *vc4 = to_vc4_dev(dev);
  753. struct dma_fence *fence;
  754. int ret;
  755. if (!vc4->is_vc5) {
  756. struct vc4_bo *bo = to_vc4_bo(&dma_bo->base);
  757. return vc4_queue_seqno_cb(dev, &flip_state->cb.seqno, bo->seqno,
  758. vc4_async_page_flip_seqno_complete);
  759. }
  760. ret = dma_resv_get_singleton(dma_bo->base.resv, DMA_RESV_USAGE_READ, &fence);
  761. if (ret)
  762. return ret;
  763. /* If there's no fence, complete the page flip immediately */
  764. if (!fence) {
  765. vc4_async_page_flip_fence_complete(fence, &flip_state->cb.fence);
  766. return 0;
  767. }
  768. /* If the fence has already been completed, complete the page flip */
  769. if (dma_fence_add_callback(fence, &flip_state->cb.fence,
  770. vc4_async_page_flip_fence_complete))
  771. vc4_async_page_flip_fence_complete(fence, &flip_state->cb.fence);
  772. return 0;
  773. }
  774. static int
  775. vc4_async_page_flip_common(struct drm_crtc *crtc,
  776. struct drm_framebuffer *fb,
  777. struct drm_pending_vblank_event *event,
  778. uint32_t flags)
  779. {
  780. struct drm_device *dev = crtc->dev;
  781. struct drm_plane *plane = crtc->primary;
  782. struct vc4_async_flip_state *flip_state;
  783. flip_state = kzalloc(sizeof(*flip_state), GFP_KERNEL);
  784. if (!flip_state)
  785. return -ENOMEM;
  786. drm_framebuffer_get(fb);
  787. flip_state->fb = fb;
  788. flip_state->crtc = crtc;
  789. flip_state->event = event;
  790. /* Save the current FB before it's replaced by the new one in
  791. * drm_atomic_set_fb_for_plane(). We'll need the old FB in
  792. * vc4_async_page_flip_complete() to decrement the BO usecnt and keep
  793. * it consistent.
  794. * FIXME: we should move to generic async-page-flip when it's
  795. * available, so that we can get rid of this hand-made cleanup_fb()
  796. * logic.
  797. */
  798. flip_state->old_fb = plane->state->fb;
  799. if (flip_state->old_fb)
  800. drm_framebuffer_get(flip_state->old_fb);
  801. WARN_ON(drm_crtc_vblank_get(crtc) != 0);
  802. /* Immediately update the plane's legacy fb pointer, so that later
  803. * modeset prep sees the state that will be present when the semaphore
  804. * is released.
  805. */
  806. drm_atomic_set_fb_for_plane(plane->state, fb);
  807. vc4_async_set_fence_cb(dev, flip_state);
  808. /* Driver takes ownership of state on successful async commit. */
  809. return 0;
  810. }
  811. /* Implements async (non-vblank-synced) page flips.
  812. *
  813. * The page flip ioctl needs to return immediately, so we grab the
  814. * modeset semaphore on the pipe, and queue the address update for
  815. * when V3D is done with the BO being flipped to.
  816. */
  817. static int vc4_async_page_flip(struct drm_crtc *crtc,
  818. struct drm_framebuffer *fb,
  819. struct drm_pending_vblank_event *event,
  820. uint32_t flags)
  821. {
  822. struct drm_device *dev = crtc->dev;
  823. struct vc4_dev *vc4 = to_vc4_dev(dev);
  824. struct drm_gem_dma_object *dma_bo = drm_fb_dma_get_gem_obj(fb, 0);
  825. struct vc4_bo *bo = to_vc4_bo(&dma_bo->base);
  826. int ret;
  827. if (WARN_ON_ONCE(vc4->is_vc5))
  828. return -ENODEV;
  829. /*
  830. * Increment the BO usecnt here, so that we never end up with an
  831. * unbalanced number of vc4_bo_{dec,inc}_usecnt() calls when the
  832. * plane is later updated through the non-async path.
  833. *
  834. * FIXME: we should move to generic async-page-flip when
  835. * it's available, so that we can get rid of this
  836. * hand-made prepare_fb() logic.
  837. */
  838. ret = vc4_bo_inc_usecnt(bo);
  839. if (ret)
  840. return ret;
  841. ret = vc4_async_page_flip_common(crtc, fb, event, flags);
  842. if (ret) {
  843. vc4_bo_dec_usecnt(bo);
  844. return ret;
  845. }
  846. return 0;
  847. }
  848. static int vc5_async_page_flip(struct drm_crtc *crtc,
  849. struct drm_framebuffer *fb,
  850. struct drm_pending_vblank_event *event,
  851. uint32_t flags)
  852. {
  853. return vc4_async_page_flip_common(crtc, fb, event, flags);
  854. }
  855. int vc4_page_flip(struct drm_crtc *crtc,
  856. struct drm_framebuffer *fb,
  857. struct drm_pending_vblank_event *event,
  858. uint32_t flags,
  859. struct drm_modeset_acquire_ctx *ctx)
  860. {
  861. if (flags & DRM_MODE_PAGE_FLIP_ASYNC) {
  862. struct drm_device *dev = crtc->dev;
  863. struct vc4_dev *vc4 = to_vc4_dev(dev);
  864. if (vc4->is_vc5)
  865. return vc5_async_page_flip(crtc, fb, event, flags);
  866. else
  867. return vc4_async_page_flip(crtc, fb, event, flags);
  868. } else {
  869. return drm_atomic_helper_page_flip(crtc, fb, event, flags, ctx);
  870. }
  871. }
  872. struct drm_crtc_state *vc4_crtc_duplicate_state(struct drm_crtc *crtc)
  873. {
  874. struct vc4_crtc_state *vc4_state, *old_vc4_state;
  875. vc4_state = kzalloc(sizeof(*vc4_state), GFP_KERNEL);
  876. if (!vc4_state)
  877. return NULL;
  878. old_vc4_state = to_vc4_crtc_state(crtc->state);
  879. vc4_state->margins = old_vc4_state->margins;
  880. vc4_state->assigned_channel = old_vc4_state->assigned_channel;
  881. __drm_atomic_helper_crtc_duplicate_state(crtc, &vc4_state->base);
  882. return &vc4_state->base;
  883. }
  884. void vc4_crtc_destroy_state(struct drm_crtc *crtc,
  885. struct drm_crtc_state *state)
  886. {
  887. struct vc4_dev *vc4 = to_vc4_dev(crtc->dev);
  888. struct vc4_crtc_state *vc4_state = to_vc4_crtc_state(state);
  889. if (drm_mm_node_allocated(&vc4_state->mm)) {
  890. unsigned long flags;
  891. spin_lock_irqsave(&vc4->hvs->mm_lock, flags);
  892. drm_mm_remove_node(&vc4_state->mm);
  893. spin_unlock_irqrestore(&vc4->hvs->mm_lock, flags);
  894. }
  895. drm_atomic_helper_crtc_destroy_state(crtc, state);
  896. }
  897. void vc4_crtc_reset(struct drm_crtc *crtc)
  898. {
  899. struct vc4_crtc_state *vc4_crtc_state;
  900. if (crtc->state)
  901. vc4_crtc_destroy_state(crtc, crtc->state);
  902. vc4_crtc_state = kzalloc(sizeof(*vc4_crtc_state), GFP_KERNEL);
  903. if (!vc4_crtc_state) {
  904. crtc->state = NULL;
  905. return;
  906. }
  907. vc4_crtc_state->assigned_channel = VC4_HVS_CHANNEL_DISABLED;
  908. __drm_atomic_helper_crtc_reset(crtc, &vc4_crtc_state->base);
  909. }
  910. int vc4_crtc_late_register(struct drm_crtc *crtc)
  911. {
  912. struct drm_device *drm = crtc->dev;
  913. struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
  914. const struct vc4_crtc_data *crtc_data = vc4_crtc_to_vc4_crtc_data(vc4_crtc);
  915. int ret;
  916. ret = vc4_debugfs_add_regset32(drm->primary, crtc_data->debugfs_name,
  917. &vc4_crtc->regset);
  918. if (ret)
  919. return ret;
  920. return 0;
  921. }
  922. static const struct drm_crtc_funcs vc4_crtc_funcs = {
  923. .set_config = drm_atomic_helper_set_config,
  924. .page_flip = vc4_page_flip,
  925. .set_property = NULL,
  926. .cursor_set = NULL, /* handled by drm_mode_cursor_universal */
  927. .cursor_move = NULL, /* handled by drm_mode_cursor_universal */
  928. .reset = vc4_crtc_reset,
  929. .atomic_duplicate_state = vc4_crtc_duplicate_state,
  930. .atomic_destroy_state = vc4_crtc_destroy_state,
  931. .enable_vblank = vc4_enable_vblank,
  932. .disable_vblank = vc4_disable_vblank,
  933. .get_vblank_timestamp = drm_crtc_vblank_helper_get_vblank_timestamp,
  934. .late_register = vc4_crtc_late_register,
  935. };
  936. static const struct drm_crtc_helper_funcs vc4_crtc_helper_funcs = {
  937. .mode_valid = vc4_crtc_mode_valid,
  938. .atomic_check = vc4_crtc_atomic_check,
  939. .atomic_begin = vc4_hvs_atomic_begin,
  940. .atomic_flush = vc4_hvs_atomic_flush,
  941. .atomic_enable = vc4_crtc_atomic_enable,
  942. .atomic_disable = vc4_crtc_atomic_disable,
  943. .get_scanout_position = vc4_crtc_get_scanout_position,
  944. };
  945. static const struct vc4_pv_data bcm2835_pv0_data = {
  946. .base = {
  947. .debugfs_name = "crtc0_regs",
  948. .hvs_available_channels = BIT(0),
  949. .hvs_output = 0,
  950. },
  951. .fifo_depth = 64,
  952. .pixels_per_clock = 1,
  953. .encoder_types = {
  954. [PV_CONTROL_CLK_SELECT_DSI] = VC4_ENCODER_TYPE_DSI0,
  955. [PV_CONTROL_CLK_SELECT_DPI_SMI_HDMI] = VC4_ENCODER_TYPE_DPI,
  956. },
  957. };
  958. static const struct vc4_pv_data bcm2835_pv1_data = {
  959. .base = {
  960. .debugfs_name = "crtc1_regs",
  961. .hvs_available_channels = BIT(2),
  962. .hvs_output = 2,
  963. },
  964. .fifo_depth = 64,
  965. .pixels_per_clock = 1,
  966. .encoder_types = {
  967. [PV_CONTROL_CLK_SELECT_DSI] = VC4_ENCODER_TYPE_DSI1,
  968. [PV_CONTROL_CLK_SELECT_DPI_SMI_HDMI] = VC4_ENCODER_TYPE_SMI,
  969. },
  970. };
  971. static const struct vc4_pv_data bcm2835_pv2_data = {
  972. .base = {
  973. .debugfs_name = "crtc2_regs",
  974. .hvs_available_channels = BIT(1),
  975. .hvs_output = 1,
  976. },
  977. .fifo_depth = 64,
  978. .pixels_per_clock = 1,
  979. .encoder_types = {
  980. [PV_CONTROL_CLK_SELECT_DPI_SMI_HDMI] = VC4_ENCODER_TYPE_HDMI0,
  981. [PV_CONTROL_CLK_SELECT_VEC] = VC4_ENCODER_TYPE_VEC,
  982. },
  983. };
  984. static const struct vc4_pv_data bcm2711_pv0_data = {
  985. .base = {
  986. .debugfs_name = "crtc0_regs",
  987. .hvs_available_channels = BIT(0),
  988. .hvs_output = 0,
  989. },
  990. .fifo_depth = 64,
  991. .pixels_per_clock = 1,
  992. .encoder_types = {
  993. [0] = VC4_ENCODER_TYPE_DSI0,
  994. [1] = VC4_ENCODER_TYPE_DPI,
  995. },
  996. };
  997. static const struct vc4_pv_data bcm2711_pv1_data = {
  998. .base = {
  999. .debugfs_name = "crtc1_regs",
  1000. .hvs_available_channels = BIT(0) | BIT(1) | BIT(2),
  1001. .hvs_output = 3,
  1002. },
  1003. .fifo_depth = 64,
  1004. .pixels_per_clock = 1,
  1005. .encoder_types = {
  1006. [0] = VC4_ENCODER_TYPE_DSI1,
  1007. [1] = VC4_ENCODER_TYPE_SMI,
  1008. },
  1009. };
  1010. static const struct vc4_pv_data bcm2711_pv2_data = {
  1011. .base = {
  1012. .debugfs_name = "crtc2_regs",
  1013. .hvs_available_channels = BIT(0) | BIT(1) | BIT(2),
  1014. .hvs_output = 4,
  1015. },
  1016. .fifo_depth = 256,
  1017. .pixels_per_clock = 2,
  1018. .encoder_types = {
  1019. [0] = VC4_ENCODER_TYPE_HDMI0,
  1020. },
  1021. };
  1022. static const struct vc4_pv_data bcm2711_pv3_data = {
  1023. .base = {
  1024. .debugfs_name = "crtc3_regs",
  1025. .hvs_available_channels = BIT(1),
  1026. .hvs_output = 1,
  1027. },
  1028. .fifo_depth = 64,
  1029. .pixels_per_clock = 1,
  1030. .encoder_types = {
  1031. [PV_CONTROL_CLK_SELECT_VEC] = VC4_ENCODER_TYPE_VEC,
  1032. },
  1033. };
  1034. static const struct vc4_pv_data bcm2711_pv4_data = {
  1035. .base = {
  1036. .debugfs_name = "crtc4_regs",
  1037. .hvs_available_channels = BIT(0) | BIT(1) | BIT(2),
  1038. .hvs_output = 5,
  1039. },
  1040. .fifo_depth = 64,
  1041. .pixels_per_clock = 2,
  1042. .encoder_types = {
  1043. [0] = VC4_ENCODER_TYPE_HDMI1,
  1044. },
  1045. };
  1046. static const struct of_device_id vc4_crtc_dt_match[] = {
  1047. { .compatible = "brcm,bcm2835-pixelvalve0", .data = &bcm2835_pv0_data },
  1048. { .compatible = "brcm,bcm2835-pixelvalve1", .data = &bcm2835_pv1_data },
  1049. { .compatible = "brcm,bcm2835-pixelvalve2", .data = &bcm2835_pv2_data },
  1050. { .compatible = "brcm,bcm2711-pixelvalve0", .data = &bcm2711_pv0_data },
  1051. { .compatible = "brcm,bcm2711-pixelvalve1", .data = &bcm2711_pv1_data },
  1052. { .compatible = "brcm,bcm2711-pixelvalve2", .data = &bcm2711_pv2_data },
  1053. { .compatible = "brcm,bcm2711-pixelvalve3", .data = &bcm2711_pv3_data },
  1054. { .compatible = "brcm,bcm2711-pixelvalve4", .data = &bcm2711_pv4_data },
  1055. {}
  1056. };
  1057. static void vc4_set_crtc_possible_masks(struct drm_device *drm,
  1058. struct drm_crtc *crtc)
  1059. {
  1060. struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
  1061. const struct vc4_pv_data *pv_data = vc4_crtc_to_vc4_pv_data(vc4_crtc);
  1062. const enum vc4_encoder_type *encoder_types = pv_data->encoder_types;
  1063. struct drm_encoder *encoder;
  1064. drm_for_each_encoder(encoder, drm) {
  1065. struct vc4_encoder *vc4_encoder;
  1066. int i;
  1067. if (encoder->encoder_type == DRM_MODE_ENCODER_VIRTUAL)
  1068. continue;
  1069. vc4_encoder = to_vc4_encoder(encoder);
  1070. for (i = 0; i < ARRAY_SIZE(pv_data->encoder_types); i++) {
  1071. if (vc4_encoder->type == encoder_types[i]) {
  1072. vc4_encoder->clock_select = i;
  1073. encoder->possible_crtcs |= drm_crtc_mask(crtc);
  1074. break;
  1075. }
  1076. }
  1077. }
  1078. }
  1079. int vc4_crtc_init(struct drm_device *drm, struct vc4_crtc *vc4_crtc,
  1080. const struct drm_crtc_funcs *crtc_funcs,
  1081. const struct drm_crtc_helper_funcs *crtc_helper_funcs)
  1082. {
  1083. struct vc4_dev *vc4 = to_vc4_dev(drm);
  1084. struct drm_crtc *crtc = &vc4_crtc->base;
  1085. struct drm_plane *primary_plane;
  1086. unsigned int i;
  1087. int ret;
  1088. /* For now, we create just the primary and the legacy cursor
  1089. * planes. We should be able to stack more planes on easily,
  1090. * but to do that we would need to compute the bandwidth
  1091. * requirement of the plane configuration, and reject ones
  1092. * that will take too much.
  1093. */
  1094. primary_plane = vc4_plane_init(drm, DRM_PLANE_TYPE_PRIMARY, 0);
  1095. if (IS_ERR(primary_plane)) {
  1096. dev_err(drm->dev, "failed to construct primary plane\n");
  1097. return PTR_ERR(primary_plane);
  1098. }
  1099. spin_lock_init(&vc4_crtc->irq_lock);
  1100. ret = drmm_crtc_init_with_planes(drm, crtc, primary_plane, NULL,
  1101. crtc_funcs, NULL);
  1102. if (ret)
  1103. return ret;
  1104. drm_crtc_helper_add(crtc, crtc_helper_funcs);
  1105. if (!vc4->is_vc5) {
  1106. drm_mode_crtc_set_gamma_size(crtc, ARRAY_SIZE(vc4_crtc->lut_r));
  1107. drm_crtc_enable_color_mgmt(crtc, 0, false, crtc->gamma_size);
  1108. /* We support CTM, but only for one CRTC at a time. It's therefore
  1109. * implemented as private driver state in vc4_kms, not here.
  1110. */
  1111. drm_crtc_enable_color_mgmt(crtc, 0, true, crtc->gamma_size);
  1112. }
  1113. for (i = 0; i < crtc->gamma_size; i++) {
  1114. vc4_crtc->lut_r[i] = i;
  1115. vc4_crtc->lut_g[i] = i;
  1116. vc4_crtc->lut_b[i] = i;
  1117. }
  1118. return 0;
  1119. }
  1120. static int vc4_crtc_bind(struct device *dev, struct device *master, void *data)
  1121. {
  1122. struct platform_device *pdev = to_platform_device(dev);
  1123. struct drm_device *drm = dev_get_drvdata(master);
  1124. const struct vc4_pv_data *pv_data;
  1125. struct vc4_crtc *vc4_crtc;
  1126. struct drm_crtc *crtc;
  1127. int ret;
  1128. vc4_crtc = drmm_kzalloc(drm, sizeof(*vc4_crtc), GFP_KERNEL);
  1129. if (!vc4_crtc)
  1130. return -ENOMEM;
  1131. crtc = &vc4_crtc->base;
  1132. pv_data = of_device_get_match_data(dev);
  1133. if (!pv_data)
  1134. return -ENODEV;
  1135. vc4_crtc->data = &pv_data->base;
  1136. vc4_crtc->pdev = pdev;
  1137. vc4_crtc->regs = vc4_ioremap_regs(pdev, 0);
  1138. if (IS_ERR(vc4_crtc->regs))
  1139. return PTR_ERR(vc4_crtc->regs);
  1140. vc4_crtc->regset.base = vc4_crtc->regs;
  1141. vc4_crtc->regset.regs = crtc_regs;
  1142. vc4_crtc->regset.nregs = ARRAY_SIZE(crtc_regs);
  1143. ret = vc4_crtc_init(drm, vc4_crtc,
  1144. &vc4_crtc_funcs, &vc4_crtc_helper_funcs);
  1145. if (ret)
  1146. return ret;
  1147. vc4_set_crtc_possible_masks(drm, crtc);
  1148. CRTC_WRITE(PV_INTEN, 0);
  1149. CRTC_WRITE(PV_INTSTAT, PV_INT_VFP_START);
  1150. ret = devm_request_irq(dev, platform_get_irq(pdev, 0),
  1151. vc4_crtc_irq_handler,
  1152. IRQF_SHARED,
  1153. "vc4 crtc", vc4_crtc);
  1154. if (ret)
  1155. return ret;
  1156. platform_set_drvdata(pdev, vc4_crtc);
  1157. return 0;
  1158. }
  1159. static void vc4_crtc_unbind(struct device *dev, struct device *master,
  1160. void *data)
  1161. {
  1162. struct platform_device *pdev = to_platform_device(dev);
  1163. struct vc4_crtc *vc4_crtc = dev_get_drvdata(dev);
  1164. CRTC_WRITE(PV_INTEN, 0);
  1165. platform_set_drvdata(pdev, NULL);
  1166. }
  1167. static const struct component_ops vc4_crtc_ops = {
  1168. .bind = vc4_crtc_bind,
  1169. .unbind = vc4_crtc_unbind,
  1170. };
  1171. static int vc4_crtc_dev_probe(struct platform_device *pdev)
  1172. {
  1173. return component_add(&pdev->dev, &vc4_crtc_ops);
  1174. }
  1175. static int vc4_crtc_dev_remove(struct platform_device *pdev)
  1176. {
  1177. component_del(&pdev->dev, &vc4_crtc_ops);
  1178. return 0;
  1179. }
  1180. struct platform_driver vc4_crtc_driver = {
  1181. .probe = vc4_crtc_dev_probe,
  1182. .remove = vc4_crtc_dev_remove,
  1183. .driver = {
  1184. .name = "vc4_crtc",
  1185. .of_match_table = vc4_crtc_dt_match,
  1186. },
  1187. };