exynos_mixer.c 34 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343
  1. // SPDX-License-Identifier: GPL-2.0-or-later
  2. /*
  3. * Copyright (C) 2011 Samsung Electronics Co.Ltd
  4. * Authors:
  5. * Seung-Woo Kim <[email protected]>
  6. * Inki Dae <[email protected]>
  7. * Joonyoung Shim <[email protected]>
  8. *
  9. * Based on drivers/media/video/s5p-tv/mixer_reg.c
  10. */
  11. #include <linux/clk.h>
  12. #include <linux/component.h>
  13. #include <linux/delay.h>
  14. #include <linux/i2c.h>
  15. #include <linux/interrupt.h>
  16. #include <linux/irq.h>
  17. #include <linux/kernel.h>
  18. #include <linux/ktime.h>
  19. #include <linux/of.h>
  20. #include <linux/of_device.h>
  21. #include <linux/platform_device.h>
  22. #include <linux/pm_runtime.h>
  23. #include <linux/regulator/consumer.h>
  24. #include <linux/spinlock.h>
  25. #include <linux/wait.h>
  26. #include <drm/drm_blend.h>
  27. #include <drm/drm_edid.h>
  28. #include <drm/drm_fourcc.h>
  29. #include <drm/drm_framebuffer.h>
  30. #include <drm/drm_vblank.h>
  31. #include <drm/exynos_drm.h>
  32. #include "exynos_drm_crtc.h"
  33. #include "exynos_drm_drv.h"
  34. #include "exynos_drm_fb.h"
  35. #include "exynos_drm_plane.h"
  36. #include "regs-mixer.h"
  37. #include "regs-vp.h"
  38. #define MIXER_WIN_NR 3
  39. #define VP_DEFAULT_WIN 2
  40. /*
  41. * Mixer color space conversion coefficient triplet.
  42. * Used for CSC from RGB to YCbCr.
  43. * Each coefficient is a 10-bit fixed point number with
  44. * sign and no integer part, i.e.
  45. * [0:8] = fractional part (representing a value y = x / 2^9)
  46. * [9] = sign
  47. * Negative values are encoded with two's complement.
  48. */
  49. #define MXR_CSC_C(x) ((int)((x) * 512.0) & 0x3ff)
  50. #define MXR_CSC_CT(a0, a1, a2) \
  51. ((MXR_CSC_C(a0) << 20) | (MXR_CSC_C(a1) << 10) | (MXR_CSC_C(a2) << 0))
  52. /* YCbCr value, used for mixer background color configuration. */
  53. #define MXR_YCBCR_VAL(y, cb, cr) (((y) << 16) | ((cb) << 8) | ((cr) << 0))
  54. /* The pixelformats that are natively supported by the mixer. */
  55. #define MXR_FORMAT_RGB565 4
  56. #define MXR_FORMAT_ARGB1555 5
  57. #define MXR_FORMAT_ARGB4444 6
  58. #define MXR_FORMAT_ARGB8888 7
  59. enum mixer_version_id {
  60. MXR_VER_0_0_0_16,
  61. MXR_VER_16_0_33_0,
  62. MXR_VER_128_0_0_184,
  63. };
  64. enum mixer_flag_bits {
  65. MXR_BIT_POWERED,
  66. MXR_BIT_VSYNC,
  67. MXR_BIT_INTERLACE,
  68. MXR_BIT_VP_ENABLED,
  69. MXR_BIT_HAS_SCLK,
  70. };
  71. static const uint32_t mixer_formats[] = {
  72. DRM_FORMAT_XRGB4444,
  73. DRM_FORMAT_ARGB4444,
  74. DRM_FORMAT_XRGB1555,
  75. DRM_FORMAT_ARGB1555,
  76. DRM_FORMAT_RGB565,
  77. DRM_FORMAT_XRGB8888,
  78. DRM_FORMAT_ARGB8888,
  79. };
  80. static const uint32_t vp_formats[] = {
  81. DRM_FORMAT_NV12,
  82. DRM_FORMAT_NV21,
  83. };
  84. struct mixer_context {
  85. struct platform_device *pdev;
  86. struct device *dev;
  87. struct drm_device *drm_dev;
  88. void *dma_priv;
  89. struct exynos_drm_crtc *crtc;
  90. struct exynos_drm_plane planes[MIXER_WIN_NR];
  91. unsigned long flags;
  92. int irq;
  93. void __iomem *mixer_regs;
  94. void __iomem *vp_regs;
  95. spinlock_t reg_slock;
  96. struct clk *mixer;
  97. struct clk *vp;
  98. struct clk *hdmi;
  99. struct clk *sclk_mixer;
  100. struct clk *sclk_hdmi;
  101. struct clk *mout_mixer;
  102. enum mixer_version_id mxr_ver;
  103. int scan_value;
  104. };
  105. struct mixer_drv_data {
  106. enum mixer_version_id version;
  107. bool is_vp_enabled;
  108. bool has_sclk;
  109. };
  110. static const struct exynos_drm_plane_config plane_configs[MIXER_WIN_NR] = {
  111. {
  112. .zpos = 0,
  113. .type = DRM_PLANE_TYPE_PRIMARY,
  114. .pixel_formats = mixer_formats,
  115. .num_pixel_formats = ARRAY_SIZE(mixer_formats),
  116. .capabilities = EXYNOS_DRM_PLANE_CAP_DOUBLE |
  117. EXYNOS_DRM_PLANE_CAP_ZPOS |
  118. EXYNOS_DRM_PLANE_CAP_PIX_BLEND |
  119. EXYNOS_DRM_PLANE_CAP_WIN_BLEND,
  120. }, {
  121. .zpos = 1,
  122. .type = DRM_PLANE_TYPE_CURSOR,
  123. .pixel_formats = mixer_formats,
  124. .num_pixel_formats = ARRAY_SIZE(mixer_formats),
  125. .capabilities = EXYNOS_DRM_PLANE_CAP_DOUBLE |
  126. EXYNOS_DRM_PLANE_CAP_ZPOS |
  127. EXYNOS_DRM_PLANE_CAP_PIX_BLEND |
  128. EXYNOS_DRM_PLANE_CAP_WIN_BLEND,
  129. }, {
  130. .zpos = 2,
  131. .type = DRM_PLANE_TYPE_OVERLAY,
  132. .pixel_formats = vp_formats,
  133. .num_pixel_formats = ARRAY_SIZE(vp_formats),
  134. .capabilities = EXYNOS_DRM_PLANE_CAP_SCALE |
  135. EXYNOS_DRM_PLANE_CAP_ZPOS |
  136. EXYNOS_DRM_PLANE_CAP_TILE |
  137. EXYNOS_DRM_PLANE_CAP_WIN_BLEND,
  138. },
  139. };
  140. static const u8 filter_y_horiz_tap8[] = {
  141. 0, -1, -1, -1, -1, -1, -1, -1,
  142. -1, -1, -1, -1, -1, 0, 0, 0,
  143. 0, 2, 4, 5, 6, 6, 6, 6,
  144. 6, 5, 5, 4, 3, 2, 1, 1,
  145. 0, -6, -12, -16, -18, -20, -21, -20,
  146. -20, -18, -16, -13, -10, -8, -5, -2,
  147. 127, 126, 125, 121, 114, 107, 99, 89,
  148. 79, 68, 57, 46, 35, 25, 16, 8,
  149. };
  150. static const u8 filter_y_vert_tap4[] = {
  151. 0, -3, -6, -8, -8, -8, -8, -7,
  152. -6, -5, -4, -3, -2, -1, -1, 0,
  153. 127, 126, 124, 118, 111, 102, 92, 81,
  154. 70, 59, 48, 37, 27, 19, 11, 5,
  155. 0, 5, 11, 19, 27, 37, 48, 59,
  156. 70, 81, 92, 102, 111, 118, 124, 126,
  157. 0, 0, -1, -1, -2, -3, -4, -5,
  158. -6, -7, -8, -8, -8, -8, -6, -3,
  159. };
  160. static const u8 filter_cr_horiz_tap4[] = {
  161. 0, -3, -6, -8, -8, -8, -8, -7,
  162. -6, -5, -4, -3, -2, -1, -1, 0,
  163. 127, 126, 124, 118, 111, 102, 92, 81,
  164. 70, 59, 48, 37, 27, 19, 11, 5,
  165. };
  166. static inline u32 vp_reg_read(struct mixer_context *ctx, u32 reg_id)
  167. {
  168. return readl(ctx->vp_regs + reg_id);
  169. }
  170. static inline void vp_reg_write(struct mixer_context *ctx, u32 reg_id,
  171. u32 val)
  172. {
  173. writel(val, ctx->vp_regs + reg_id);
  174. }
  175. static inline void vp_reg_writemask(struct mixer_context *ctx, u32 reg_id,
  176. u32 val, u32 mask)
  177. {
  178. u32 old = vp_reg_read(ctx, reg_id);
  179. val = (val & mask) | (old & ~mask);
  180. writel(val, ctx->vp_regs + reg_id);
  181. }
  182. static inline u32 mixer_reg_read(struct mixer_context *ctx, u32 reg_id)
  183. {
  184. return readl(ctx->mixer_regs + reg_id);
  185. }
  186. static inline void mixer_reg_write(struct mixer_context *ctx, u32 reg_id,
  187. u32 val)
  188. {
  189. writel(val, ctx->mixer_regs + reg_id);
  190. }
  191. static inline void mixer_reg_writemask(struct mixer_context *ctx,
  192. u32 reg_id, u32 val, u32 mask)
  193. {
  194. u32 old = mixer_reg_read(ctx, reg_id);
  195. val = (val & mask) | (old & ~mask);
  196. writel(val, ctx->mixer_regs + reg_id);
  197. }
  198. static void mixer_regs_dump(struct mixer_context *ctx)
  199. {
  200. #define DUMPREG(reg_id) \
  201. do { \
  202. DRM_DEV_DEBUG_KMS(ctx->dev, #reg_id " = %08x\n", \
  203. (u32)readl(ctx->mixer_regs + reg_id)); \
  204. } while (0)
  205. DUMPREG(MXR_STATUS);
  206. DUMPREG(MXR_CFG);
  207. DUMPREG(MXR_INT_EN);
  208. DUMPREG(MXR_INT_STATUS);
  209. DUMPREG(MXR_LAYER_CFG);
  210. DUMPREG(MXR_VIDEO_CFG);
  211. DUMPREG(MXR_GRAPHIC0_CFG);
  212. DUMPREG(MXR_GRAPHIC0_BASE);
  213. DUMPREG(MXR_GRAPHIC0_SPAN);
  214. DUMPREG(MXR_GRAPHIC0_WH);
  215. DUMPREG(MXR_GRAPHIC0_SXY);
  216. DUMPREG(MXR_GRAPHIC0_DXY);
  217. DUMPREG(MXR_GRAPHIC1_CFG);
  218. DUMPREG(MXR_GRAPHIC1_BASE);
  219. DUMPREG(MXR_GRAPHIC1_SPAN);
  220. DUMPREG(MXR_GRAPHIC1_WH);
  221. DUMPREG(MXR_GRAPHIC1_SXY);
  222. DUMPREG(MXR_GRAPHIC1_DXY);
  223. #undef DUMPREG
  224. }
  225. static void vp_regs_dump(struct mixer_context *ctx)
  226. {
  227. #define DUMPREG(reg_id) \
  228. do { \
  229. DRM_DEV_DEBUG_KMS(ctx->dev, #reg_id " = %08x\n", \
  230. (u32) readl(ctx->vp_regs + reg_id)); \
  231. } while (0)
  232. DUMPREG(VP_ENABLE);
  233. DUMPREG(VP_SRESET);
  234. DUMPREG(VP_SHADOW_UPDATE);
  235. DUMPREG(VP_FIELD_ID);
  236. DUMPREG(VP_MODE);
  237. DUMPREG(VP_IMG_SIZE_Y);
  238. DUMPREG(VP_IMG_SIZE_C);
  239. DUMPREG(VP_PER_RATE_CTRL);
  240. DUMPREG(VP_TOP_Y_PTR);
  241. DUMPREG(VP_BOT_Y_PTR);
  242. DUMPREG(VP_TOP_C_PTR);
  243. DUMPREG(VP_BOT_C_PTR);
  244. DUMPREG(VP_ENDIAN_MODE);
  245. DUMPREG(VP_SRC_H_POSITION);
  246. DUMPREG(VP_SRC_V_POSITION);
  247. DUMPREG(VP_SRC_WIDTH);
  248. DUMPREG(VP_SRC_HEIGHT);
  249. DUMPREG(VP_DST_H_POSITION);
  250. DUMPREG(VP_DST_V_POSITION);
  251. DUMPREG(VP_DST_WIDTH);
  252. DUMPREG(VP_DST_HEIGHT);
  253. DUMPREG(VP_H_RATIO);
  254. DUMPREG(VP_V_RATIO);
  255. #undef DUMPREG
  256. }
  257. static inline void vp_filter_set(struct mixer_context *ctx,
  258. int reg_id, const u8 *data, unsigned int size)
  259. {
  260. /* assure 4-byte align */
  261. BUG_ON(size & 3);
  262. for (; size; size -= 4, reg_id += 4, data += 4) {
  263. u32 val = (data[0] << 24) | (data[1] << 16) |
  264. (data[2] << 8) | data[3];
  265. vp_reg_write(ctx, reg_id, val);
  266. }
  267. }
  268. static void vp_default_filter(struct mixer_context *ctx)
  269. {
  270. vp_filter_set(ctx, VP_POLY8_Y0_LL,
  271. filter_y_horiz_tap8, sizeof(filter_y_horiz_tap8));
  272. vp_filter_set(ctx, VP_POLY4_Y0_LL,
  273. filter_y_vert_tap4, sizeof(filter_y_vert_tap4));
  274. vp_filter_set(ctx, VP_POLY4_C0_LL,
  275. filter_cr_horiz_tap4, sizeof(filter_cr_horiz_tap4));
  276. }
  277. static void mixer_cfg_gfx_blend(struct mixer_context *ctx, unsigned int win,
  278. unsigned int pixel_alpha, unsigned int alpha)
  279. {
  280. u32 win_alpha = alpha >> 8;
  281. u32 val;
  282. val = MXR_GRP_CFG_COLOR_KEY_DISABLE; /* no blank key */
  283. switch (pixel_alpha) {
  284. case DRM_MODE_BLEND_PIXEL_NONE:
  285. break;
  286. case DRM_MODE_BLEND_COVERAGE:
  287. val |= MXR_GRP_CFG_PIXEL_BLEND_EN;
  288. break;
  289. case DRM_MODE_BLEND_PREMULTI:
  290. default:
  291. val |= MXR_GRP_CFG_BLEND_PRE_MUL;
  292. val |= MXR_GRP_CFG_PIXEL_BLEND_EN;
  293. break;
  294. }
  295. if (alpha != DRM_BLEND_ALPHA_OPAQUE) {
  296. val |= MXR_GRP_CFG_WIN_BLEND_EN;
  297. val |= win_alpha;
  298. }
  299. mixer_reg_writemask(ctx, MXR_GRAPHIC_CFG(win),
  300. val, MXR_GRP_CFG_MISC_MASK);
  301. }
  302. static void mixer_cfg_vp_blend(struct mixer_context *ctx, unsigned int alpha)
  303. {
  304. u32 win_alpha = alpha >> 8;
  305. u32 val = 0;
  306. if (alpha != DRM_BLEND_ALPHA_OPAQUE) {
  307. val |= MXR_VID_CFG_BLEND_EN;
  308. val |= win_alpha;
  309. }
  310. mixer_reg_write(ctx, MXR_VIDEO_CFG, val);
  311. }
  312. static bool mixer_is_synced(struct mixer_context *ctx)
  313. {
  314. u32 base, shadow;
  315. if (ctx->mxr_ver == MXR_VER_16_0_33_0 ||
  316. ctx->mxr_ver == MXR_VER_128_0_0_184)
  317. return !(mixer_reg_read(ctx, MXR_CFG) &
  318. MXR_CFG_LAYER_UPDATE_COUNT_MASK);
  319. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags) &&
  320. vp_reg_read(ctx, VP_SHADOW_UPDATE))
  321. return false;
  322. base = mixer_reg_read(ctx, MXR_CFG);
  323. shadow = mixer_reg_read(ctx, MXR_CFG_S);
  324. if (base != shadow)
  325. return false;
  326. base = mixer_reg_read(ctx, MXR_GRAPHIC_BASE(0));
  327. shadow = mixer_reg_read(ctx, MXR_GRAPHIC_BASE_S(0));
  328. if (base != shadow)
  329. return false;
  330. base = mixer_reg_read(ctx, MXR_GRAPHIC_BASE(1));
  331. shadow = mixer_reg_read(ctx, MXR_GRAPHIC_BASE_S(1));
  332. if (base != shadow)
  333. return false;
  334. return true;
  335. }
  336. static int mixer_wait_for_sync(struct mixer_context *ctx)
  337. {
  338. ktime_t timeout = ktime_add_us(ktime_get(), 100000);
  339. while (!mixer_is_synced(ctx)) {
  340. usleep_range(1000, 2000);
  341. if (ktime_compare(ktime_get(), timeout) > 0)
  342. return -ETIMEDOUT;
  343. }
  344. return 0;
  345. }
  346. static void mixer_disable_sync(struct mixer_context *ctx)
  347. {
  348. mixer_reg_writemask(ctx, MXR_STATUS, 0, MXR_STATUS_SYNC_ENABLE);
  349. }
  350. static void mixer_enable_sync(struct mixer_context *ctx)
  351. {
  352. if (ctx->mxr_ver == MXR_VER_16_0_33_0 ||
  353. ctx->mxr_ver == MXR_VER_128_0_0_184)
  354. mixer_reg_writemask(ctx, MXR_CFG, ~0, MXR_CFG_LAYER_UPDATE);
  355. mixer_reg_writemask(ctx, MXR_STATUS, ~0, MXR_STATUS_SYNC_ENABLE);
  356. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags))
  357. vp_reg_write(ctx, VP_SHADOW_UPDATE, VP_SHADOW_UPDATE_ENABLE);
  358. }
  359. static void mixer_cfg_scan(struct mixer_context *ctx, int width, int height)
  360. {
  361. u32 val;
  362. /* choosing between interlace and progressive mode */
  363. val = test_bit(MXR_BIT_INTERLACE, &ctx->flags) ?
  364. MXR_CFG_SCAN_INTERLACE : MXR_CFG_SCAN_PROGRESSIVE;
  365. if (ctx->mxr_ver == MXR_VER_128_0_0_184)
  366. mixer_reg_write(ctx, MXR_RESOLUTION,
  367. MXR_MXR_RES_HEIGHT(height) | MXR_MXR_RES_WIDTH(width));
  368. else
  369. val |= ctx->scan_value;
  370. mixer_reg_writemask(ctx, MXR_CFG, val, MXR_CFG_SCAN_MASK);
  371. }
  372. static void mixer_cfg_rgb_fmt(struct mixer_context *ctx, struct drm_display_mode *mode)
  373. {
  374. enum hdmi_quantization_range range = drm_default_rgb_quant_range(mode);
  375. u32 val;
  376. if (mode->vdisplay < 720) {
  377. val = MXR_CFG_RGB601;
  378. } else {
  379. val = MXR_CFG_RGB709;
  380. /* Configure the BT.709 CSC matrix for full range RGB. */
  381. mixer_reg_write(ctx, MXR_CM_COEFF_Y,
  382. MXR_CSC_CT( 0.184, 0.614, 0.063) |
  383. MXR_CM_COEFF_RGB_FULL);
  384. mixer_reg_write(ctx, MXR_CM_COEFF_CB,
  385. MXR_CSC_CT(-0.102, -0.338, 0.440));
  386. mixer_reg_write(ctx, MXR_CM_COEFF_CR,
  387. MXR_CSC_CT( 0.440, -0.399, -0.040));
  388. }
  389. if (range == HDMI_QUANTIZATION_RANGE_FULL)
  390. val |= MXR_CFG_QUANT_RANGE_FULL;
  391. else
  392. val |= MXR_CFG_QUANT_RANGE_LIMITED;
  393. mixer_reg_writemask(ctx, MXR_CFG, val, MXR_CFG_RGB_FMT_MASK);
  394. }
  395. static void mixer_cfg_layer(struct mixer_context *ctx, unsigned int win,
  396. unsigned int priority, bool enable)
  397. {
  398. u32 val = enable ? ~0 : 0;
  399. switch (win) {
  400. case 0:
  401. mixer_reg_writemask(ctx, MXR_CFG, val, MXR_CFG_GRP0_ENABLE);
  402. mixer_reg_writemask(ctx, MXR_LAYER_CFG,
  403. MXR_LAYER_CFG_GRP0_VAL(priority),
  404. MXR_LAYER_CFG_GRP0_MASK);
  405. break;
  406. case 1:
  407. mixer_reg_writemask(ctx, MXR_CFG, val, MXR_CFG_GRP1_ENABLE);
  408. mixer_reg_writemask(ctx, MXR_LAYER_CFG,
  409. MXR_LAYER_CFG_GRP1_VAL(priority),
  410. MXR_LAYER_CFG_GRP1_MASK);
  411. break;
  412. case VP_DEFAULT_WIN:
  413. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags)) {
  414. vp_reg_writemask(ctx, VP_ENABLE, val, VP_ENABLE_ON);
  415. mixer_reg_writemask(ctx, MXR_CFG, val,
  416. MXR_CFG_VP_ENABLE);
  417. mixer_reg_writemask(ctx, MXR_LAYER_CFG,
  418. MXR_LAYER_CFG_VP_VAL(priority),
  419. MXR_LAYER_CFG_VP_MASK);
  420. }
  421. break;
  422. }
  423. }
  424. static void mixer_run(struct mixer_context *ctx)
  425. {
  426. mixer_reg_writemask(ctx, MXR_STATUS, ~0, MXR_STATUS_REG_RUN);
  427. }
  428. static void mixer_stop(struct mixer_context *ctx)
  429. {
  430. int timeout = 20;
  431. mixer_reg_writemask(ctx, MXR_STATUS, 0, MXR_STATUS_REG_RUN);
  432. while (!(mixer_reg_read(ctx, MXR_STATUS) & MXR_STATUS_REG_IDLE) &&
  433. --timeout)
  434. usleep_range(10000, 12000);
  435. }
  436. static void mixer_commit(struct mixer_context *ctx)
  437. {
  438. struct drm_display_mode *mode = &ctx->crtc->base.state->adjusted_mode;
  439. mixer_cfg_scan(ctx, mode->hdisplay, mode->vdisplay);
  440. mixer_cfg_rgb_fmt(ctx, mode);
  441. mixer_run(ctx);
  442. }
  443. static void vp_video_buffer(struct mixer_context *ctx,
  444. struct exynos_drm_plane *plane)
  445. {
  446. struct exynos_drm_plane_state *state =
  447. to_exynos_plane_state(plane->base.state);
  448. struct drm_framebuffer *fb = state->base.fb;
  449. unsigned int priority = state->base.normalized_zpos + 1;
  450. unsigned long flags;
  451. dma_addr_t luma_addr[2], chroma_addr[2];
  452. bool is_tiled, is_nv21;
  453. u32 val;
  454. is_nv21 = (fb->format->format == DRM_FORMAT_NV21);
  455. is_tiled = (fb->modifier == DRM_FORMAT_MOD_SAMSUNG_64_32_TILE);
  456. luma_addr[0] = exynos_drm_fb_dma_addr(fb, 0);
  457. chroma_addr[0] = exynos_drm_fb_dma_addr(fb, 1);
  458. if (test_bit(MXR_BIT_INTERLACE, &ctx->flags)) {
  459. if (is_tiled) {
  460. luma_addr[1] = luma_addr[0] + 0x40;
  461. chroma_addr[1] = chroma_addr[0] + 0x40;
  462. } else {
  463. luma_addr[1] = luma_addr[0] + fb->pitches[0];
  464. chroma_addr[1] = chroma_addr[0] + fb->pitches[1];
  465. }
  466. } else {
  467. luma_addr[1] = 0;
  468. chroma_addr[1] = 0;
  469. }
  470. spin_lock_irqsave(&ctx->reg_slock, flags);
  471. /* interlace or progressive scan mode */
  472. val = (test_bit(MXR_BIT_INTERLACE, &ctx->flags) ? ~0 : 0);
  473. vp_reg_writemask(ctx, VP_MODE, val, VP_MODE_LINE_SKIP);
  474. /* setup format */
  475. val = (is_nv21 ? VP_MODE_NV21 : VP_MODE_NV12);
  476. val |= (is_tiled ? VP_MODE_MEM_TILED : VP_MODE_MEM_LINEAR);
  477. vp_reg_writemask(ctx, VP_MODE, val, VP_MODE_FMT_MASK);
  478. /* setting size of input image */
  479. vp_reg_write(ctx, VP_IMG_SIZE_Y, VP_IMG_HSIZE(fb->pitches[0]) |
  480. VP_IMG_VSIZE(fb->height));
  481. /* chroma plane for NV12/NV21 is half the height of the luma plane */
  482. vp_reg_write(ctx, VP_IMG_SIZE_C, VP_IMG_HSIZE(fb->pitches[1]) |
  483. VP_IMG_VSIZE(fb->height / 2));
  484. vp_reg_write(ctx, VP_SRC_WIDTH, state->src.w);
  485. vp_reg_write(ctx, VP_SRC_H_POSITION,
  486. VP_SRC_H_POSITION_VAL(state->src.x));
  487. vp_reg_write(ctx, VP_DST_WIDTH, state->crtc.w);
  488. vp_reg_write(ctx, VP_DST_H_POSITION, state->crtc.x);
  489. if (test_bit(MXR_BIT_INTERLACE, &ctx->flags)) {
  490. vp_reg_write(ctx, VP_SRC_HEIGHT, state->src.h / 2);
  491. vp_reg_write(ctx, VP_SRC_V_POSITION, state->src.y / 2);
  492. vp_reg_write(ctx, VP_DST_HEIGHT, state->crtc.h / 2);
  493. vp_reg_write(ctx, VP_DST_V_POSITION, state->crtc.y / 2);
  494. } else {
  495. vp_reg_write(ctx, VP_SRC_HEIGHT, state->src.h);
  496. vp_reg_write(ctx, VP_SRC_V_POSITION, state->src.y);
  497. vp_reg_write(ctx, VP_DST_HEIGHT, state->crtc.h);
  498. vp_reg_write(ctx, VP_DST_V_POSITION, state->crtc.y);
  499. }
  500. vp_reg_write(ctx, VP_H_RATIO, state->h_ratio);
  501. vp_reg_write(ctx, VP_V_RATIO, state->v_ratio);
  502. vp_reg_write(ctx, VP_ENDIAN_MODE, VP_ENDIAN_MODE_LITTLE);
  503. /* set buffer address to vp */
  504. vp_reg_write(ctx, VP_TOP_Y_PTR, luma_addr[0]);
  505. vp_reg_write(ctx, VP_BOT_Y_PTR, luma_addr[1]);
  506. vp_reg_write(ctx, VP_TOP_C_PTR, chroma_addr[0]);
  507. vp_reg_write(ctx, VP_BOT_C_PTR, chroma_addr[1]);
  508. mixer_cfg_layer(ctx, plane->index, priority, true);
  509. mixer_cfg_vp_blend(ctx, state->base.alpha);
  510. spin_unlock_irqrestore(&ctx->reg_slock, flags);
  511. mixer_regs_dump(ctx);
  512. vp_regs_dump(ctx);
  513. }
  514. static void mixer_graph_buffer(struct mixer_context *ctx,
  515. struct exynos_drm_plane *plane)
  516. {
  517. struct exynos_drm_plane_state *state =
  518. to_exynos_plane_state(plane->base.state);
  519. struct drm_framebuffer *fb = state->base.fb;
  520. unsigned int priority = state->base.normalized_zpos + 1;
  521. unsigned long flags;
  522. unsigned int win = plane->index;
  523. unsigned int x_ratio = 0, y_ratio = 0;
  524. unsigned int dst_x_offset, dst_y_offset;
  525. unsigned int pixel_alpha;
  526. dma_addr_t dma_addr;
  527. unsigned int fmt;
  528. u32 val;
  529. if (fb->format->has_alpha)
  530. pixel_alpha = state->base.pixel_blend_mode;
  531. else
  532. pixel_alpha = DRM_MODE_BLEND_PIXEL_NONE;
  533. switch (fb->format->format) {
  534. case DRM_FORMAT_XRGB4444:
  535. case DRM_FORMAT_ARGB4444:
  536. fmt = MXR_FORMAT_ARGB4444;
  537. break;
  538. case DRM_FORMAT_XRGB1555:
  539. case DRM_FORMAT_ARGB1555:
  540. fmt = MXR_FORMAT_ARGB1555;
  541. break;
  542. case DRM_FORMAT_RGB565:
  543. fmt = MXR_FORMAT_RGB565;
  544. break;
  545. case DRM_FORMAT_XRGB8888:
  546. case DRM_FORMAT_ARGB8888:
  547. default:
  548. fmt = MXR_FORMAT_ARGB8888;
  549. break;
  550. }
  551. /* ratio is already checked by common plane code */
  552. x_ratio = state->h_ratio == (1 << 15);
  553. y_ratio = state->v_ratio == (1 << 15);
  554. dst_x_offset = state->crtc.x;
  555. dst_y_offset = state->crtc.y;
  556. /* translate dma address base s.t. the source image offset is zero */
  557. dma_addr = exynos_drm_fb_dma_addr(fb, 0)
  558. + (state->src.x * fb->format->cpp[0])
  559. + (state->src.y * fb->pitches[0]);
  560. spin_lock_irqsave(&ctx->reg_slock, flags);
  561. /* setup format */
  562. mixer_reg_writemask(ctx, MXR_GRAPHIC_CFG(win),
  563. MXR_GRP_CFG_FORMAT_VAL(fmt), MXR_GRP_CFG_FORMAT_MASK);
  564. /* setup geometry */
  565. mixer_reg_write(ctx, MXR_GRAPHIC_SPAN(win),
  566. fb->pitches[0] / fb->format->cpp[0]);
  567. val = MXR_GRP_WH_WIDTH(state->src.w);
  568. val |= MXR_GRP_WH_HEIGHT(state->src.h);
  569. val |= MXR_GRP_WH_H_SCALE(x_ratio);
  570. val |= MXR_GRP_WH_V_SCALE(y_ratio);
  571. mixer_reg_write(ctx, MXR_GRAPHIC_WH(win), val);
  572. /* setup offsets in display image */
  573. val = MXR_GRP_DXY_DX(dst_x_offset);
  574. val |= MXR_GRP_DXY_DY(dst_y_offset);
  575. mixer_reg_write(ctx, MXR_GRAPHIC_DXY(win), val);
  576. /* set buffer address to mixer */
  577. mixer_reg_write(ctx, MXR_GRAPHIC_BASE(win), dma_addr);
  578. mixer_cfg_layer(ctx, win, priority, true);
  579. mixer_cfg_gfx_blend(ctx, win, pixel_alpha, state->base.alpha);
  580. spin_unlock_irqrestore(&ctx->reg_slock, flags);
  581. mixer_regs_dump(ctx);
  582. }
  583. static void vp_win_reset(struct mixer_context *ctx)
  584. {
  585. unsigned int tries = 100;
  586. vp_reg_write(ctx, VP_SRESET, VP_SRESET_PROCESSING);
  587. while (--tries) {
  588. /* waiting until VP_SRESET_PROCESSING is 0 */
  589. if (~vp_reg_read(ctx, VP_SRESET) & VP_SRESET_PROCESSING)
  590. break;
  591. mdelay(10);
  592. }
  593. WARN(tries == 0, "failed to reset Video Processor\n");
  594. }
  595. static void mixer_win_reset(struct mixer_context *ctx)
  596. {
  597. unsigned long flags;
  598. spin_lock_irqsave(&ctx->reg_slock, flags);
  599. mixer_reg_writemask(ctx, MXR_CFG, MXR_CFG_DST_HDMI, MXR_CFG_DST_MASK);
  600. /* set output in RGB888 mode */
  601. mixer_reg_writemask(ctx, MXR_CFG, MXR_CFG_OUT_RGB888, MXR_CFG_OUT_MASK);
  602. /* 16 beat burst in DMA */
  603. mixer_reg_writemask(ctx, MXR_STATUS, MXR_STATUS_16_BURST,
  604. MXR_STATUS_BURST_MASK);
  605. /* reset default layer priority */
  606. mixer_reg_write(ctx, MXR_LAYER_CFG, 0);
  607. /* set all background colors to RGB (0,0,0) */
  608. mixer_reg_write(ctx, MXR_BG_COLOR0, MXR_YCBCR_VAL(0, 128, 128));
  609. mixer_reg_write(ctx, MXR_BG_COLOR1, MXR_YCBCR_VAL(0, 128, 128));
  610. mixer_reg_write(ctx, MXR_BG_COLOR2, MXR_YCBCR_VAL(0, 128, 128));
  611. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags)) {
  612. /* configuration of Video Processor Registers */
  613. vp_win_reset(ctx);
  614. vp_default_filter(ctx);
  615. }
  616. /* disable all layers */
  617. mixer_reg_writemask(ctx, MXR_CFG, 0, MXR_CFG_GRP0_ENABLE);
  618. mixer_reg_writemask(ctx, MXR_CFG, 0, MXR_CFG_GRP1_ENABLE);
  619. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags))
  620. mixer_reg_writemask(ctx, MXR_CFG, 0, MXR_CFG_VP_ENABLE);
  621. /* set all source image offsets to zero */
  622. mixer_reg_write(ctx, MXR_GRAPHIC_SXY(0), 0);
  623. mixer_reg_write(ctx, MXR_GRAPHIC_SXY(1), 0);
  624. spin_unlock_irqrestore(&ctx->reg_slock, flags);
  625. }
  626. static irqreturn_t mixer_irq_handler(int irq, void *arg)
  627. {
  628. struct mixer_context *ctx = arg;
  629. u32 val;
  630. spin_lock(&ctx->reg_slock);
  631. /* read interrupt status for handling and clearing flags for VSYNC */
  632. val = mixer_reg_read(ctx, MXR_INT_STATUS);
  633. /* handling VSYNC */
  634. if (val & MXR_INT_STATUS_VSYNC) {
  635. /* vsync interrupt use different bit for read and clear */
  636. val |= MXR_INT_CLEAR_VSYNC;
  637. val &= ~MXR_INT_STATUS_VSYNC;
  638. /* interlace scan need to check shadow register */
  639. if (test_bit(MXR_BIT_INTERLACE, &ctx->flags)
  640. && !mixer_is_synced(ctx))
  641. goto out;
  642. drm_crtc_handle_vblank(&ctx->crtc->base);
  643. }
  644. out:
  645. /* clear interrupts */
  646. mixer_reg_write(ctx, MXR_INT_STATUS, val);
  647. spin_unlock(&ctx->reg_slock);
  648. return IRQ_HANDLED;
  649. }
  650. static int mixer_resources_init(struct mixer_context *mixer_ctx)
  651. {
  652. struct device *dev = &mixer_ctx->pdev->dev;
  653. struct resource *res;
  654. int ret;
  655. spin_lock_init(&mixer_ctx->reg_slock);
  656. mixer_ctx->mixer = devm_clk_get(dev, "mixer");
  657. if (IS_ERR(mixer_ctx->mixer)) {
  658. dev_err(dev, "failed to get clock 'mixer'\n");
  659. return -ENODEV;
  660. }
  661. mixer_ctx->hdmi = devm_clk_get(dev, "hdmi");
  662. if (IS_ERR(mixer_ctx->hdmi)) {
  663. dev_err(dev, "failed to get clock 'hdmi'\n");
  664. return PTR_ERR(mixer_ctx->hdmi);
  665. }
  666. mixer_ctx->sclk_hdmi = devm_clk_get(dev, "sclk_hdmi");
  667. if (IS_ERR(mixer_ctx->sclk_hdmi)) {
  668. dev_err(dev, "failed to get clock 'sclk_hdmi'\n");
  669. return -ENODEV;
  670. }
  671. res = platform_get_resource(mixer_ctx->pdev, IORESOURCE_MEM, 0);
  672. if (res == NULL) {
  673. dev_err(dev, "get memory resource failed.\n");
  674. return -ENXIO;
  675. }
  676. mixer_ctx->mixer_regs = devm_ioremap(dev, res->start,
  677. resource_size(res));
  678. if (mixer_ctx->mixer_regs == NULL) {
  679. dev_err(dev, "register mapping failed.\n");
  680. return -ENXIO;
  681. }
  682. ret = platform_get_irq(mixer_ctx->pdev, 0);
  683. if (ret < 0)
  684. return ret;
  685. mixer_ctx->irq = ret;
  686. ret = devm_request_irq(dev, mixer_ctx->irq, mixer_irq_handler,
  687. 0, "drm_mixer", mixer_ctx);
  688. if (ret) {
  689. dev_err(dev, "request interrupt failed.\n");
  690. return ret;
  691. }
  692. return 0;
  693. }
  694. static int vp_resources_init(struct mixer_context *mixer_ctx)
  695. {
  696. struct device *dev = &mixer_ctx->pdev->dev;
  697. struct resource *res;
  698. mixer_ctx->vp = devm_clk_get(dev, "vp");
  699. if (IS_ERR(mixer_ctx->vp)) {
  700. dev_err(dev, "failed to get clock 'vp'\n");
  701. return -ENODEV;
  702. }
  703. if (test_bit(MXR_BIT_HAS_SCLK, &mixer_ctx->flags)) {
  704. mixer_ctx->sclk_mixer = devm_clk_get(dev, "sclk_mixer");
  705. if (IS_ERR(mixer_ctx->sclk_mixer)) {
  706. dev_err(dev, "failed to get clock 'sclk_mixer'\n");
  707. return -ENODEV;
  708. }
  709. mixer_ctx->mout_mixer = devm_clk_get(dev, "mout_mixer");
  710. if (IS_ERR(mixer_ctx->mout_mixer)) {
  711. dev_err(dev, "failed to get clock 'mout_mixer'\n");
  712. return -ENODEV;
  713. }
  714. if (mixer_ctx->sclk_hdmi && mixer_ctx->mout_mixer)
  715. clk_set_parent(mixer_ctx->mout_mixer,
  716. mixer_ctx->sclk_hdmi);
  717. }
  718. res = platform_get_resource(mixer_ctx->pdev, IORESOURCE_MEM, 1);
  719. if (res == NULL) {
  720. dev_err(dev, "get memory resource failed.\n");
  721. return -ENXIO;
  722. }
  723. mixer_ctx->vp_regs = devm_ioremap(dev, res->start,
  724. resource_size(res));
  725. if (mixer_ctx->vp_regs == NULL) {
  726. dev_err(dev, "register mapping failed.\n");
  727. return -ENXIO;
  728. }
  729. return 0;
  730. }
  731. static int mixer_initialize(struct mixer_context *mixer_ctx,
  732. struct drm_device *drm_dev)
  733. {
  734. int ret;
  735. mixer_ctx->drm_dev = drm_dev;
  736. /* acquire resources: regs, irqs, clocks */
  737. ret = mixer_resources_init(mixer_ctx);
  738. if (ret) {
  739. DRM_DEV_ERROR(mixer_ctx->dev,
  740. "mixer_resources_init failed ret=%d\n", ret);
  741. return ret;
  742. }
  743. if (test_bit(MXR_BIT_VP_ENABLED, &mixer_ctx->flags)) {
  744. /* acquire vp resources: regs, irqs, clocks */
  745. ret = vp_resources_init(mixer_ctx);
  746. if (ret) {
  747. DRM_DEV_ERROR(mixer_ctx->dev,
  748. "vp_resources_init failed ret=%d\n", ret);
  749. return ret;
  750. }
  751. }
  752. return exynos_drm_register_dma(drm_dev, mixer_ctx->dev,
  753. &mixer_ctx->dma_priv);
  754. }
  755. static void mixer_ctx_remove(struct mixer_context *mixer_ctx)
  756. {
  757. exynos_drm_unregister_dma(mixer_ctx->drm_dev, mixer_ctx->dev,
  758. &mixer_ctx->dma_priv);
  759. }
  760. static int mixer_enable_vblank(struct exynos_drm_crtc *crtc)
  761. {
  762. struct mixer_context *mixer_ctx = crtc->ctx;
  763. __set_bit(MXR_BIT_VSYNC, &mixer_ctx->flags);
  764. if (!test_bit(MXR_BIT_POWERED, &mixer_ctx->flags))
  765. return 0;
  766. /* enable vsync interrupt */
  767. mixer_reg_writemask(mixer_ctx, MXR_INT_STATUS, ~0, MXR_INT_CLEAR_VSYNC);
  768. mixer_reg_writemask(mixer_ctx, MXR_INT_EN, ~0, MXR_INT_EN_VSYNC);
  769. return 0;
  770. }
  771. static void mixer_disable_vblank(struct exynos_drm_crtc *crtc)
  772. {
  773. struct mixer_context *mixer_ctx = crtc->ctx;
  774. __clear_bit(MXR_BIT_VSYNC, &mixer_ctx->flags);
  775. if (!test_bit(MXR_BIT_POWERED, &mixer_ctx->flags))
  776. return;
  777. /* disable vsync interrupt */
  778. mixer_reg_writemask(mixer_ctx, MXR_INT_STATUS, ~0, MXR_INT_CLEAR_VSYNC);
  779. mixer_reg_writemask(mixer_ctx, MXR_INT_EN, 0, MXR_INT_EN_VSYNC);
  780. }
  781. static void mixer_atomic_begin(struct exynos_drm_crtc *crtc)
  782. {
  783. struct mixer_context *ctx = crtc->ctx;
  784. if (!test_bit(MXR_BIT_POWERED, &ctx->flags))
  785. return;
  786. if (mixer_wait_for_sync(ctx))
  787. dev_err(ctx->dev, "timeout waiting for VSYNC\n");
  788. mixer_disable_sync(ctx);
  789. }
  790. static void mixer_update_plane(struct exynos_drm_crtc *crtc,
  791. struct exynos_drm_plane *plane)
  792. {
  793. struct mixer_context *mixer_ctx = crtc->ctx;
  794. DRM_DEV_DEBUG_KMS(mixer_ctx->dev, "win: %d\n", plane->index);
  795. if (!test_bit(MXR_BIT_POWERED, &mixer_ctx->flags))
  796. return;
  797. if (plane->index == VP_DEFAULT_WIN)
  798. vp_video_buffer(mixer_ctx, plane);
  799. else
  800. mixer_graph_buffer(mixer_ctx, plane);
  801. }
  802. static void mixer_disable_plane(struct exynos_drm_crtc *crtc,
  803. struct exynos_drm_plane *plane)
  804. {
  805. struct mixer_context *mixer_ctx = crtc->ctx;
  806. unsigned long flags;
  807. DRM_DEV_DEBUG_KMS(mixer_ctx->dev, "win: %d\n", plane->index);
  808. if (!test_bit(MXR_BIT_POWERED, &mixer_ctx->flags))
  809. return;
  810. spin_lock_irqsave(&mixer_ctx->reg_slock, flags);
  811. mixer_cfg_layer(mixer_ctx, plane->index, 0, false);
  812. spin_unlock_irqrestore(&mixer_ctx->reg_slock, flags);
  813. }
  814. static void mixer_atomic_flush(struct exynos_drm_crtc *crtc)
  815. {
  816. struct mixer_context *mixer_ctx = crtc->ctx;
  817. if (!test_bit(MXR_BIT_POWERED, &mixer_ctx->flags))
  818. return;
  819. mixer_enable_sync(mixer_ctx);
  820. exynos_crtc_handle_event(crtc);
  821. }
  822. static void mixer_atomic_enable(struct exynos_drm_crtc *crtc)
  823. {
  824. struct mixer_context *ctx = crtc->ctx;
  825. int ret;
  826. if (test_bit(MXR_BIT_POWERED, &ctx->flags))
  827. return;
  828. ret = pm_runtime_resume_and_get(ctx->dev);
  829. if (ret < 0) {
  830. dev_err(ctx->dev, "failed to enable MIXER device.\n");
  831. return;
  832. }
  833. exynos_drm_pipe_clk_enable(crtc, true);
  834. mixer_disable_sync(ctx);
  835. mixer_reg_writemask(ctx, MXR_STATUS, ~0, MXR_STATUS_SOFT_RESET);
  836. if (test_bit(MXR_BIT_VSYNC, &ctx->flags)) {
  837. mixer_reg_writemask(ctx, MXR_INT_STATUS, ~0,
  838. MXR_INT_CLEAR_VSYNC);
  839. mixer_reg_writemask(ctx, MXR_INT_EN, ~0, MXR_INT_EN_VSYNC);
  840. }
  841. mixer_win_reset(ctx);
  842. mixer_commit(ctx);
  843. mixer_enable_sync(ctx);
  844. set_bit(MXR_BIT_POWERED, &ctx->flags);
  845. }
  846. static void mixer_atomic_disable(struct exynos_drm_crtc *crtc)
  847. {
  848. struct mixer_context *ctx = crtc->ctx;
  849. int i;
  850. if (!test_bit(MXR_BIT_POWERED, &ctx->flags))
  851. return;
  852. mixer_stop(ctx);
  853. mixer_regs_dump(ctx);
  854. for (i = 0; i < MIXER_WIN_NR; i++)
  855. mixer_disable_plane(crtc, &ctx->planes[i]);
  856. exynos_drm_pipe_clk_enable(crtc, false);
  857. pm_runtime_put(ctx->dev);
  858. clear_bit(MXR_BIT_POWERED, &ctx->flags);
  859. }
  860. static enum drm_mode_status mixer_mode_valid(struct exynos_drm_crtc *crtc,
  861. const struct drm_display_mode *mode)
  862. {
  863. struct mixer_context *ctx = crtc->ctx;
  864. u32 w = mode->hdisplay, h = mode->vdisplay;
  865. DRM_DEV_DEBUG_KMS(ctx->dev, "xres=%d, yres=%d, refresh=%d, intl=%d\n",
  866. w, h, drm_mode_vrefresh(mode),
  867. !!(mode->flags & DRM_MODE_FLAG_INTERLACE));
  868. if (ctx->mxr_ver == MXR_VER_128_0_0_184)
  869. return MODE_OK;
  870. if ((w >= 464 && w <= 720 && h >= 261 && h <= 576) ||
  871. (w >= 1024 && w <= 1280 && h >= 576 && h <= 720) ||
  872. (w >= 1664 && w <= 1920 && h >= 936 && h <= 1080))
  873. return MODE_OK;
  874. if ((w == 1024 && h == 768) ||
  875. (w == 1366 && h == 768) ||
  876. (w == 1280 && h == 1024))
  877. return MODE_OK;
  878. return MODE_BAD;
  879. }
  880. static bool mixer_mode_fixup(struct exynos_drm_crtc *crtc,
  881. const struct drm_display_mode *mode,
  882. struct drm_display_mode *adjusted_mode)
  883. {
  884. struct mixer_context *ctx = crtc->ctx;
  885. int width = mode->hdisplay, height = mode->vdisplay, i;
  886. static const struct {
  887. int hdisplay, vdisplay, htotal, vtotal, scan_val;
  888. } modes[] = {
  889. { 720, 480, 858, 525, MXR_CFG_SCAN_NTSC | MXR_CFG_SCAN_SD },
  890. { 720, 576, 864, 625, MXR_CFG_SCAN_PAL | MXR_CFG_SCAN_SD },
  891. { 1280, 720, 1650, 750, MXR_CFG_SCAN_HD_720 | MXR_CFG_SCAN_HD },
  892. { 1920, 1080, 2200, 1125, MXR_CFG_SCAN_HD_1080 |
  893. MXR_CFG_SCAN_HD }
  894. };
  895. if (mode->flags & DRM_MODE_FLAG_INTERLACE)
  896. __set_bit(MXR_BIT_INTERLACE, &ctx->flags);
  897. else
  898. __clear_bit(MXR_BIT_INTERLACE, &ctx->flags);
  899. if (ctx->mxr_ver == MXR_VER_128_0_0_184)
  900. return true;
  901. for (i = 0; i < ARRAY_SIZE(modes); ++i)
  902. if (width <= modes[i].hdisplay && height <= modes[i].vdisplay) {
  903. ctx->scan_value = modes[i].scan_val;
  904. if (width < modes[i].hdisplay ||
  905. height < modes[i].vdisplay) {
  906. adjusted_mode->hdisplay = modes[i].hdisplay;
  907. adjusted_mode->hsync_start = modes[i].hdisplay;
  908. adjusted_mode->hsync_end = modes[i].htotal;
  909. adjusted_mode->htotal = modes[i].htotal;
  910. adjusted_mode->vdisplay = modes[i].vdisplay;
  911. adjusted_mode->vsync_start = modes[i].vdisplay;
  912. adjusted_mode->vsync_end = modes[i].vtotal;
  913. adjusted_mode->vtotal = modes[i].vtotal;
  914. }
  915. return true;
  916. }
  917. return false;
  918. }
  919. static const struct exynos_drm_crtc_ops mixer_crtc_ops = {
  920. .atomic_enable = mixer_atomic_enable,
  921. .atomic_disable = mixer_atomic_disable,
  922. .enable_vblank = mixer_enable_vblank,
  923. .disable_vblank = mixer_disable_vblank,
  924. .atomic_begin = mixer_atomic_begin,
  925. .update_plane = mixer_update_plane,
  926. .disable_plane = mixer_disable_plane,
  927. .atomic_flush = mixer_atomic_flush,
  928. .mode_valid = mixer_mode_valid,
  929. .mode_fixup = mixer_mode_fixup,
  930. };
  931. static const struct mixer_drv_data exynos5420_mxr_drv_data = {
  932. .version = MXR_VER_128_0_0_184,
  933. .is_vp_enabled = 0,
  934. };
  935. static const struct mixer_drv_data exynos5250_mxr_drv_data = {
  936. .version = MXR_VER_16_0_33_0,
  937. .is_vp_enabled = 0,
  938. };
  939. static const struct mixer_drv_data exynos4212_mxr_drv_data = {
  940. .version = MXR_VER_0_0_0_16,
  941. .is_vp_enabled = 1,
  942. };
  943. static const struct mixer_drv_data exynos4210_mxr_drv_data = {
  944. .version = MXR_VER_0_0_0_16,
  945. .is_vp_enabled = 1,
  946. .has_sclk = 1,
  947. };
  948. static const struct of_device_id mixer_match_types[] = {
  949. {
  950. .compatible = "samsung,exynos4210-mixer",
  951. .data = &exynos4210_mxr_drv_data,
  952. }, {
  953. .compatible = "samsung,exynos4212-mixer",
  954. .data = &exynos4212_mxr_drv_data,
  955. }, {
  956. .compatible = "samsung,exynos5-mixer",
  957. .data = &exynos5250_mxr_drv_data,
  958. }, {
  959. .compatible = "samsung,exynos5250-mixer",
  960. .data = &exynos5250_mxr_drv_data,
  961. }, {
  962. .compatible = "samsung,exynos5420-mixer",
  963. .data = &exynos5420_mxr_drv_data,
  964. }, {
  965. /* end node */
  966. }
  967. };
  968. MODULE_DEVICE_TABLE(of, mixer_match_types);
  969. static int mixer_bind(struct device *dev, struct device *manager, void *data)
  970. {
  971. struct mixer_context *ctx = dev_get_drvdata(dev);
  972. struct drm_device *drm_dev = data;
  973. struct exynos_drm_plane *exynos_plane;
  974. unsigned int i;
  975. int ret;
  976. ret = mixer_initialize(ctx, drm_dev);
  977. if (ret)
  978. return ret;
  979. for (i = 0; i < MIXER_WIN_NR; i++) {
  980. if (i == VP_DEFAULT_WIN && !test_bit(MXR_BIT_VP_ENABLED,
  981. &ctx->flags))
  982. continue;
  983. ret = exynos_plane_init(drm_dev, &ctx->planes[i], i,
  984. &plane_configs[i]);
  985. if (ret)
  986. return ret;
  987. }
  988. exynos_plane = &ctx->planes[DEFAULT_WIN];
  989. ctx->crtc = exynos_drm_crtc_create(drm_dev, &exynos_plane->base,
  990. EXYNOS_DISPLAY_TYPE_HDMI, &mixer_crtc_ops, ctx);
  991. if (IS_ERR(ctx->crtc)) {
  992. mixer_ctx_remove(ctx);
  993. ret = PTR_ERR(ctx->crtc);
  994. goto free_ctx;
  995. }
  996. return 0;
  997. free_ctx:
  998. devm_kfree(dev, ctx);
  999. return ret;
  1000. }
  1001. static void mixer_unbind(struct device *dev, struct device *master, void *data)
  1002. {
  1003. struct mixer_context *ctx = dev_get_drvdata(dev);
  1004. mixer_ctx_remove(ctx);
  1005. }
  1006. static const struct component_ops mixer_component_ops = {
  1007. .bind = mixer_bind,
  1008. .unbind = mixer_unbind,
  1009. };
  1010. static int mixer_probe(struct platform_device *pdev)
  1011. {
  1012. struct device *dev = &pdev->dev;
  1013. const struct mixer_drv_data *drv;
  1014. struct mixer_context *ctx;
  1015. int ret;
  1016. ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
  1017. if (!ctx) {
  1018. DRM_DEV_ERROR(dev, "failed to alloc mixer context.\n");
  1019. return -ENOMEM;
  1020. }
  1021. drv = of_device_get_match_data(dev);
  1022. ctx->pdev = pdev;
  1023. ctx->dev = dev;
  1024. ctx->mxr_ver = drv->version;
  1025. if (drv->is_vp_enabled)
  1026. __set_bit(MXR_BIT_VP_ENABLED, &ctx->flags);
  1027. if (drv->has_sclk)
  1028. __set_bit(MXR_BIT_HAS_SCLK, &ctx->flags);
  1029. platform_set_drvdata(pdev, ctx);
  1030. pm_runtime_enable(dev);
  1031. ret = component_add(&pdev->dev, &mixer_component_ops);
  1032. if (ret)
  1033. pm_runtime_disable(dev);
  1034. return ret;
  1035. }
  1036. static int mixer_remove(struct platform_device *pdev)
  1037. {
  1038. pm_runtime_disable(&pdev->dev);
  1039. component_del(&pdev->dev, &mixer_component_ops);
  1040. return 0;
  1041. }
  1042. static int __maybe_unused exynos_mixer_suspend(struct device *dev)
  1043. {
  1044. struct mixer_context *ctx = dev_get_drvdata(dev);
  1045. clk_disable_unprepare(ctx->hdmi);
  1046. clk_disable_unprepare(ctx->mixer);
  1047. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags)) {
  1048. clk_disable_unprepare(ctx->vp);
  1049. if (test_bit(MXR_BIT_HAS_SCLK, &ctx->flags))
  1050. clk_disable_unprepare(ctx->sclk_mixer);
  1051. }
  1052. return 0;
  1053. }
  1054. static int __maybe_unused exynos_mixer_resume(struct device *dev)
  1055. {
  1056. struct mixer_context *ctx = dev_get_drvdata(dev);
  1057. int ret;
  1058. ret = clk_prepare_enable(ctx->mixer);
  1059. if (ret < 0) {
  1060. DRM_DEV_ERROR(ctx->dev,
  1061. "Failed to prepare_enable the mixer clk [%d]\n",
  1062. ret);
  1063. return ret;
  1064. }
  1065. ret = clk_prepare_enable(ctx->hdmi);
  1066. if (ret < 0) {
  1067. DRM_DEV_ERROR(dev,
  1068. "Failed to prepare_enable the hdmi clk [%d]\n",
  1069. ret);
  1070. return ret;
  1071. }
  1072. if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags)) {
  1073. ret = clk_prepare_enable(ctx->vp);
  1074. if (ret < 0) {
  1075. DRM_DEV_ERROR(dev,
  1076. "Failed to prepare_enable the vp clk [%d]\n",
  1077. ret);
  1078. return ret;
  1079. }
  1080. if (test_bit(MXR_BIT_HAS_SCLK, &ctx->flags)) {
  1081. ret = clk_prepare_enable(ctx->sclk_mixer);
  1082. if (ret < 0) {
  1083. DRM_DEV_ERROR(dev,
  1084. "Failed to prepare_enable the " \
  1085. "sclk_mixer clk [%d]\n",
  1086. ret);
  1087. return ret;
  1088. }
  1089. }
  1090. }
  1091. return 0;
  1092. }
  1093. static const struct dev_pm_ops exynos_mixer_pm_ops = {
  1094. SET_RUNTIME_PM_OPS(exynos_mixer_suspend, exynos_mixer_resume, NULL)
  1095. SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
  1096. pm_runtime_force_resume)
  1097. };
  1098. struct platform_driver mixer_driver = {
  1099. .driver = {
  1100. .name = "exynos-mixer",
  1101. .owner = THIS_MODULE,
  1102. .pm = &exynos_mixer_pm_ops,
  1103. .of_match_table = mixer_match_types,
  1104. },
  1105. .probe = mixer_probe,
  1106. .remove = mixer_remove,
  1107. };