wndw.c 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798
  1. /*
  2. * Copyright 2018 Red Hat Inc.
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  17. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20. * OTHER DEALINGS IN THE SOFTWARE.
  21. */
  22. #include "wndw.h"
  23. #include "wimm.h"
  24. #include "handles.h"
  25. #include <nvif/class.h>
  26. #include <nvif/cl0002.h>
  27. #include <nvhw/class/cl507c.h>
  28. #include <nvhw/class/cl507e.h>
  29. #include <nvhw/class/clc37e.h>
  30. #include <drm/drm_atomic.h>
  31. #include <drm/drm_atomic_helper.h>
  32. #include <drm/drm_blend.h>
  33. #include <drm/drm_gem_atomic_helper.h>
  34. #include <drm/drm_fourcc.h>
  35. #include "nouveau_bo.h"
  36. #include "nouveau_gem.h"
  37. static void
  38. nv50_wndw_ctxdma_del(struct nv50_wndw_ctxdma *ctxdma)
  39. {
  40. nvif_object_dtor(&ctxdma->object);
  41. list_del(&ctxdma->head);
  42. kfree(ctxdma);
  43. }
  44. static struct nv50_wndw_ctxdma *
  45. nv50_wndw_ctxdma_new(struct nv50_wndw *wndw, struct drm_framebuffer *fb)
  46. {
  47. struct nouveau_drm *drm = nouveau_drm(fb->dev);
  48. struct nv50_wndw_ctxdma *ctxdma;
  49. u32 handle;
  50. u32 unused;
  51. u8 kind;
  52. struct {
  53. struct nv_dma_v0 base;
  54. union {
  55. struct nv50_dma_v0 nv50;
  56. struct gf100_dma_v0 gf100;
  57. struct gf119_dma_v0 gf119;
  58. };
  59. } args = {};
  60. u32 argc = sizeof(args.base);
  61. int ret;
  62. nouveau_framebuffer_get_layout(fb, &unused, &kind);
  63. handle = NV50_DISP_HANDLE_WNDW_CTX(kind);
  64. list_for_each_entry(ctxdma, &wndw->ctxdma.list, head) {
  65. if (ctxdma->object.handle == handle)
  66. return ctxdma;
  67. }
  68. if (!(ctxdma = kzalloc(sizeof(*ctxdma), GFP_KERNEL)))
  69. return ERR_PTR(-ENOMEM);
  70. list_add(&ctxdma->head, &wndw->ctxdma.list);
  71. args.base.target = NV_DMA_V0_TARGET_VRAM;
  72. args.base.access = NV_DMA_V0_ACCESS_RDWR;
  73. args.base.start = 0;
  74. args.base.limit = drm->client.device.info.ram_user - 1;
  75. if (drm->client.device.info.chipset < 0x80) {
  76. args.nv50.part = NV50_DMA_V0_PART_256;
  77. argc += sizeof(args.nv50);
  78. } else
  79. if (drm->client.device.info.chipset < 0xc0) {
  80. args.nv50.part = NV50_DMA_V0_PART_256;
  81. args.nv50.kind = kind;
  82. argc += sizeof(args.nv50);
  83. } else
  84. if (drm->client.device.info.chipset < 0xd0) {
  85. args.gf100.kind = kind;
  86. argc += sizeof(args.gf100);
  87. } else {
  88. args.gf119.page = GF119_DMA_V0_PAGE_LP;
  89. args.gf119.kind = kind;
  90. argc += sizeof(args.gf119);
  91. }
  92. ret = nvif_object_ctor(wndw->ctxdma.parent, "kmsFbCtxDma", handle,
  93. NV_DMA_IN_MEMORY, &args, argc, &ctxdma->object);
  94. if (ret) {
  95. nv50_wndw_ctxdma_del(ctxdma);
  96. return ERR_PTR(ret);
  97. }
  98. return ctxdma;
  99. }
  100. int
  101. nv50_wndw_wait_armed(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  102. {
  103. struct nv50_disp *disp = nv50_disp(wndw->plane.dev);
  104. if (asyw->set.ntfy) {
  105. return wndw->func->ntfy_wait_begun(disp->sync,
  106. asyw->ntfy.offset,
  107. wndw->wndw.base.device);
  108. }
  109. return 0;
  110. }
  111. void
  112. nv50_wndw_flush_clr(struct nv50_wndw *wndw, u32 *interlock, bool flush,
  113. struct nv50_wndw_atom *asyw)
  114. {
  115. union nv50_wndw_atom_mask clr = {
  116. .mask = asyw->clr.mask & ~(flush ? 0 : asyw->set.mask),
  117. };
  118. if (clr.sema ) wndw->func-> sema_clr(wndw);
  119. if (clr.ntfy ) wndw->func-> ntfy_clr(wndw);
  120. if (clr.xlut ) wndw->func-> xlut_clr(wndw);
  121. if (clr.csc ) wndw->func-> csc_clr(wndw);
  122. if (clr.image) wndw->func->image_clr(wndw);
  123. interlock[wndw->interlock.type] |= wndw->interlock.data;
  124. }
  125. void
  126. nv50_wndw_flush_set(struct nv50_wndw *wndw, u32 *interlock,
  127. struct nv50_wndw_atom *asyw)
  128. {
  129. if (interlock[NV50_DISP_INTERLOCK_CORE]) {
  130. asyw->image.mode = NV507C_SET_PRESENT_CONTROL_BEGIN_MODE_NON_TEARING;
  131. asyw->image.interval = 1;
  132. }
  133. if (asyw->set.sema ) wndw->func->sema_set (wndw, asyw);
  134. if (asyw->set.ntfy ) wndw->func->ntfy_set (wndw, asyw);
  135. if (asyw->set.image) wndw->func->image_set(wndw, asyw);
  136. if (asyw->set.xlut ) {
  137. if (asyw->ilut) {
  138. asyw->xlut.i.offset =
  139. nv50_lut_load(&wndw->ilut, asyw->xlut.i.buffer,
  140. asyw->ilut, asyw->xlut.i.load);
  141. }
  142. wndw->func->xlut_set(wndw, asyw);
  143. }
  144. if (asyw->set.csc ) wndw->func->csc_set (wndw, asyw);
  145. if (asyw->set.scale) wndw->func->scale_set(wndw, asyw);
  146. if (asyw->set.blend) wndw->func->blend_set(wndw, asyw);
  147. if (asyw->set.point) {
  148. if (asyw->set.point = false, asyw->set.mask)
  149. interlock[wndw->interlock.type] |= wndw->interlock.data;
  150. interlock[NV50_DISP_INTERLOCK_WIMM] |= wndw->interlock.wimm;
  151. wndw->immd->point(wndw, asyw);
  152. wndw->immd->update(wndw, interlock);
  153. } else {
  154. interlock[wndw->interlock.type] |= wndw->interlock.data;
  155. }
  156. }
  157. void
  158. nv50_wndw_ntfy_enable(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
  159. {
  160. struct nv50_disp *disp = nv50_disp(wndw->plane.dev);
  161. asyw->ntfy.handle = wndw->wndw.sync.handle;
  162. asyw->ntfy.offset = wndw->ntfy;
  163. asyw->ntfy.awaken = false;
  164. asyw->set.ntfy = true;
  165. wndw->func->ntfy_reset(disp->sync, wndw->ntfy);
  166. wndw->ntfy ^= 0x10;
  167. }
  168. static void
  169. nv50_wndw_atomic_check_release(struct nv50_wndw *wndw,
  170. struct nv50_wndw_atom *asyw,
  171. struct nv50_head_atom *asyh)
  172. {
  173. struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
  174. NV_ATOMIC(drm, "%s release\n", wndw->plane.name);
  175. wndw->func->release(wndw, asyw, asyh);
  176. asyw->ntfy.handle = 0;
  177. asyw->sema.handle = 0;
  178. asyw->xlut.handle = 0;
  179. memset(asyw->image.handle, 0x00, sizeof(asyw->image.handle));
  180. }
  181. static int
  182. nv50_wndw_atomic_check_acquire_yuv(struct nv50_wndw_atom *asyw)
  183. {
  184. switch (asyw->state.fb->format->format) {
  185. case DRM_FORMAT_YUYV:
  186. asyw->image.format = NV507E_SURFACE_SET_PARAMS_FORMAT_VE8YO8UE8YE8;
  187. break;
  188. case DRM_FORMAT_UYVY:
  189. asyw->image.format = NV507E_SURFACE_SET_PARAMS_FORMAT_YO8VE8YE8UE8;
  190. break;
  191. default:
  192. WARN_ON(1);
  193. return -EINVAL;
  194. }
  195. asyw->image.colorspace = NV507E_SURFACE_SET_PARAMS_COLOR_SPACE_YUV_601;
  196. return 0;
  197. }
  198. static int
  199. nv50_wndw_atomic_check_acquire_rgb(struct nv50_wndw_atom *asyw)
  200. {
  201. switch (asyw->state.fb->format->format) {
  202. case DRM_FORMAT_C8:
  203. asyw->image.format = NV507C_SURFACE_SET_PARAMS_FORMAT_I8;
  204. break;
  205. case DRM_FORMAT_XRGB8888:
  206. case DRM_FORMAT_ARGB8888:
  207. asyw->image.format = NV507C_SURFACE_SET_PARAMS_FORMAT_A8R8G8B8;
  208. break;
  209. case DRM_FORMAT_RGB565:
  210. asyw->image.format = NV507C_SURFACE_SET_PARAMS_FORMAT_R5G6B5;
  211. break;
  212. case DRM_FORMAT_XRGB1555:
  213. case DRM_FORMAT_ARGB1555:
  214. asyw->image.format = NV507C_SURFACE_SET_PARAMS_FORMAT_A1R5G5B5;
  215. break;
  216. case DRM_FORMAT_XBGR2101010:
  217. case DRM_FORMAT_ABGR2101010:
  218. asyw->image.format = NV507C_SURFACE_SET_PARAMS_FORMAT_A2B10G10R10;
  219. break;
  220. case DRM_FORMAT_XBGR8888:
  221. case DRM_FORMAT_ABGR8888:
  222. asyw->image.format = NV507C_SURFACE_SET_PARAMS_FORMAT_A8B8G8R8;
  223. break;
  224. case DRM_FORMAT_XRGB2101010:
  225. case DRM_FORMAT_ARGB2101010:
  226. asyw->image.format = NVC37E_SET_PARAMS_FORMAT_A2R10G10B10;
  227. break;
  228. case DRM_FORMAT_XBGR16161616F:
  229. case DRM_FORMAT_ABGR16161616F:
  230. asyw->image.format = NV507C_SURFACE_SET_PARAMS_FORMAT_RF16_GF16_BF16_AF16;
  231. break;
  232. default:
  233. return -EINVAL;
  234. }
  235. asyw->image.colorspace = NV507E_SURFACE_SET_PARAMS_COLOR_SPACE_RGB;
  236. return 0;
  237. }
  238. static int
  239. nv50_wndw_atomic_check_acquire(struct nv50_wndw *wndw, bool modeset,
  240. struct nv50_wndw_atom *armw,
  241. struct nv50_wndw_atom *asyw,
  242. struct nv50_head_atom *asyh)
  243. {
  244. struct drm_framebuffer *fb = asyw->state.fb;
  245. struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
  246. uint8_t kind;
  247. uint32_t tile_mode;
  248. int ret;
  249. NV_ATOMIC(drm, "%s acquire\n", wndw->plane.name);
  250. if (fb != armw->state.fb || !armw->visible || modeset) {
  251. nouveau_framebuffer_get_layout(fb, &tile_mode, &kind);
  252. asyw->image.w = fb->width;
  253. asyw->image.h = fb->height;
  254. asyw->image.kind = kind;
  255. ret = nv50_wndw_atomic_check_acquire_rgb(asyw);
  256. if (ret) {
  257. ret = nv50_wndw_atomic_check_acquire_yuv(asyw);
  258. if (ret)
  259. return ret;
  260. }
  261. if (asyw->image.kind) {
  262. asyw->image.layout = NV507C_SURFACE_SET_STORAGE_MEMORY_LAYOUT_BLOCKLINEAR;
  263. if (drm->client.device.info.chipset >= 0xc0)
  264. asyw->image.blockh = tile_mode >> 4;
  265. else
  266. asyw->image.blockh = tile_mode;
  267. asyw->image.blocks[0] = fb->pitches[0] / 64;
  268. asyw->image.pitch[0] = 0;
  269. } else {
  270. asyw->image.layout = NV507C_SURFACE_SET_STORAGE_MEMORY_LAYOUT_PITCH;
  271. asyw->image.blockh = NV507C_SURFACE_SET_STORAGE_BLOCK_HEIGHT_ONE_GOB;
  272. asyw->image.blocks[0] = 0;
  273. asyw->image.pitch[0] = fb->pitches[0];
  274. }
  275. if (!asyh->state.async_flip)
  276. asyw->image.interval = 1;
  277. else
  278. asyw->image.interval = 0;
  279. if (asyw->image.interval)
  280. asyw->image.mode = NV507C_SET_PRESENT_CONTROL_BEGIN_MODE_NON_TEARING;
  281. else
  282. asyw->image.mode = NV507C_SET_PRESENT_CONTROL_BEGIN_MODE_IMMEDIATE;
  283. asyw->set.image = wndw->func->image_set != NULL;
  284. }
  285. if (wndw->func->scale_set) {
  286. asyw->scale.sx = asyw->state.src_x >> 16;
  287. asyw->scale.sy = asyw->state.src_y >> 16;
  288. asyw->scale.sw = asyw->state.src_w >> 16;
  289. asyw->scale.sh = asyw->state.src_h >> 16;
  290. asyw->scale.dw = asyw->state.crtc_w;
  291. asyw->scale.dh = asyw->state.crtc_h;
  292. if (memcmp(&armw->scale, &asyw->scale, sizeof(asyw->scale)))
  293. asyw->set.scale = true;
  294. }
  295. if (wndw->func->blend_set) {
  296. asyw->blend.depth = 255 - asyw->state.normalized_zpos;
  297. asyw->blend.k1 = asyw->state.alpha >> 8;
  298. switch (asyw->state.pixel_blend_mode) {
  299. case DRM_MODE_BLEND_PREMULTI:
  300. asyw->blend.src_color = NVC37E_SET_COMPOSITION_FACTOR_SELECT_SRC_COLOR_FACTOR_MATCH_SELECT_K1;
  301. asyw->blend.dst_color = NVC37E_SET_COMPOSITION_FACTOR_SELECT_DST_COLOR_FACTOR_MATCH_SELECT_NEG_K1_TIMES_SRC;
  302. break;
  303. case DRM_MODE_BLEND_COVERAGE:
  304. asyw->blend.src_color = NVC37E_SET_COMPOSITION_FACTOR_SELECT_SRC_COLOR_FACTOR_MATCH_SELECT_K1_TIMES_SRC;
  305. asyw->blend.dst_color = NVC37E_SET_COMPOSITION_FACTOR_SELECT_DST_COLOR_FACTOR_MATCH_SELECT_NEG_K1_TIMES_SRC;
  306. break;
  307. case DRM_MODE_BLEND_PIXEL_NONE:
  308. default:
  309. asyw->blend.src_color = NVC37E_SET_COMPOSITION_FACTOR_SELECT_SRC_COLOR_FACTOR_MATCH_SELECT_K1;
  310. asyw->blend.dst_color = NVC37E_SET_COMPOSITION_FACTOR_SELECT_DST_COLOR_FACTOR_MATCH_SELECT_NEG_K1;
  311. break;
  312. }
  313. if (memcmp(&armw->blend, &asyw->blend, sizeof(asyw->blend)))
  314. asyw->set.blend = true;
  315. }
  316. if (wndw->immd) {
  317. asyw->point.x = asyw->state.crtc_x;
  318. asyw->point.y = asyw->state.crtc_y;
  319. if (memcmp(&armw->point, &asyw->point, sizeof(asyw->point)))
  320. asyw->set.point = true;
  321. }
  322. return wndw->func->acquire(wndw, asyw, asyh);
  323. }
  324. static int
  325. nv50_wndw_atomic_check_lut(struct nv50_wndw *wndw,
  326. struct nv50_wndw_atom *armw,
  327. struct nv50_wndw_atom *asyw,
  328. struct nv50_head_atom *asyh)
  329. {
  330. struct drm_property_blob *ilut = asyh->state.degamma_lut;
  331. /* I8 format without an input LUT makes no sense, and the
  332. * HW error-checks for this.
  333. *
  334. * In order to handle legacy gamma, when there's no input
  335. * LUT we need to steal the output LUT and use it instead.
  336. */
  337. if (!ilut && asyw->state.fb->format->format == DRM_FORMAT_C8) {
  338. /* This should be an error, but there's legacy clients
  339. * that do a modeset before providing a gamma table.
  340. *
  341. * We keep the window disabled to avoid angering HW.
  342. */
  343. if (!(ilut = asyh->state.gamma_lut)) {
  344. asyw->visible = false;
  345. return 0;
  346. }
  347. if (wndw->func->ilut)
  348. asyh->wndw.olut |= BIT(wndw->id);
  349. } else {
  350. asyh->wndw.olut &= ~BIT(wndw->id);
  351. }
  352. if (!ilut && wndw->func->ilut_identity &&
  353. asyw->state.fb->format->format != DRM_FORMAT_XBGR16161616F &&
  354. asyw->state.fb->format->format != DRM_FORMAT_ABGR16161616F) {
  355. static struct drm_property_blob dummy = {};
  356. ilut = &dummy;
  357. }
  358. /* Recalculate LUT state. */
  359. memset(&asyw->xlut, 0x00, sizeof(asyw->xlut));
  360. if ((asyw->ilut = wndw->func->ilut ? ilut : NULL)) {
  361. wndw->func->ilut(wndw, asyw, drm_color_lut_size(ilut));
  362. asyw->xlut.handle = wndw->wndw.vram.handle;
  363. asyw->xlut.i.buffer = !asyw->xlut.i.buffer;
  364. asyw->set.xlut = true;
  365. } else {
  366. asyw->clr.xlut = armw->xlut.handle != 0;
  367. }
  368. /* Handle setting base SET_OUTPUT_LUT_LO_ENABLE_USE_CORE_LUT. */
  369. if (wndw->func->olut_core &&
  370. (!armw->visible || (armw->xlut.handle && !asyw->xlut.handle)))
  371. asyw->set.xlut = true;
  372. if (wndw->func->csc && asyh->state.ctm) {
  373. const struct drm_color_ctm *ctm = asyh->state.ctm->data;
  374. wndw->func->csc(wndw, asyw, ctm);
  375. asyw->csc.valid = true;
  376. asyw->set.csc = true;
  377. } else {
  378. asyw->csc.valid = false;
  379. asyw->clr.csc = armw->csc.valid;
  380. }
  381. /* Can't do an immediate flip while changing the LUT. */
  382. asyh->state.async_flip = false;
  383. return 0;
  384. }
  385. static int
  386. nv50_wndw_atomic_check(struct drm_plane *plane,
  387. struct drm_atomic_state *state)
  388. {
  389. struct drm_plane_state *new_plane_state = drm_atomic_get_new_plane_state(state,
  390. plane);
  391. struct nouveau_drm *drm = nouveau_drm(plane->dev);
  392. struct nv50_wndw *wndw = nv50_wndw(plane);
  393. struct nv50_wndw_atom *armw = nv50_wndw_atom(wndw->plane.state);
  394. struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
  395. struct nv50_head_atom *harm = NULL, *asyh = NULL;
  396. bool modeset = false;
  397. int ret;
  398. NV_ATOMIC(drm, "%s atomic_check\n", plane->name);
  399. /* Fetch the assembly state for the head the window will belong to,
  400. * and determine whether the window will be visible.
  401. */
  402. if (asyw->state.crtc) {
  403. asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc);
  404. if (IS_ERR(asyh))
  405. return PTR_ERR(asyh);
  406. modeset = drm_atomic_crtc_needs_modeset(&asyh->state);
  407. asyw->visible = asyh->state.active;
  408. } else {
  409. asyw->visible = false;
  410. }
  411. /* Fetch assembly state for the head the window used to belong to. */
  412. if (armw->state.crtc) {
  413. harm = nv50_head_atom_get(asyw->state.state, armw->state.crtc);
  414. if (IS_ERR(harm))
  415. return PTR_ERR(harm);
  416. }
  417. /* LUT configuration can potentially cause the window to be disabled. */
  418. if (asyw->visible && wndw->func->xlut_set &&
  419. (!armw->visible ||
  420. asyh->state.color_mgmt_changed ||
  421. asyw->state.fb->format->format !=
  422. armw->state.fb->format->format)) {
  423. ret = nv50_wndw_atomic_check_lut(wndw, armw, asyw, asyh);
  424. if (ret)
  425. return ret;
  426. }
  427. /* Calculate new window state. */
  428. if (asyw->visible) {
  429. ret = nv50_wndw_atomic_check_acquire(wndw, modeset,
  430. armw, asyw, asyh);
  431. if (ret)
  432. return ret;
  433. asyh->wndw.mask |= BIT(wndw->id);
  434. } else
  435. if (armw->visible) {
  436. nv50_wndw_atomic_check_release(wndw, asyw, harm);
  437. harm->wndw.mask &= ~BIT(wndw->id);
  438. } else {
  439. return 0;
  440. }
  441. /* Aside from the obvious case where the window is actively being
  442. * disabled, we might also need to temporarily disable the window
  443. * when performing certain modeset operations.
  444. */
  445. if (!asyw->visible || modeset) {
  446. asyw->clr.ntfy = armw->ntfy.handle != 0;
  447. asyw->clr.sema = armw->sema.handle != 0;
  448. asyw->clr.xlut = armw->xlut.handle != 0;
  449. if (asyw->clr.xlut && asyw->visible)
  450. asyw->set.xlut = asyw->xlut.handle != 0;
  451. asyw->clr.csc = armw->csc.valid;
  452. if (wndw->func->image_clr)
  453. asyw->clr.image = armw->image.handle[0] != 0;
  454. }
  455. return 0;
  456. }
  457. static void
  458. nv50_wndw_cleanup_fb(struct drm_plane *plane, struct drm_plane_state *old_state)
  459. {
  460. struct nouveau_drm *drm = nouveau_drm(plane->dev);
  461. struct nouveau_bo *nvbo;
  462. NV_ATOMIC(drm, "%s cleanup: %p\n", plane->name, old_state->fb);
  463. if (!old_state->fb)
  464. return;
  465. nvbo = nouveau_gem_object(old_state->fb->obj[0]);
  466. nouveau_bo_unpin(nvbo);
  467. }
  468. static int
  469. nv50_wndw_prepare_fb(struct drm_plane *plane, struct drm_plane_state *state)
  470. {
  471. struct drm_framebuffer *fb = state->fb;
  472. struct nouveau_drm *drm = nouveau_drm(plane->dev);
  473. struct nv50_wndw *wndw = nv50_wndw(plane);
  474. struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
  475. struct nouveau_bo *nvbo;
  476. struct nv50_head_atom *asyh;
  477. struct nv50_wndw_ctxdma *ctxdma;
  478. int ret;
  479. NV_ATOMIC(drm, "%s prepare: %p\n", plane->name, fb);
  480. if (!asyw->state.fb)
  481. return 0;
  482. nvbo = nouveau_gem_object(fb->obj[0]);
  483. ret = nouveau_bo_pin(nvbo, NOUVEAU_GEM_DOMAIN_VRAM, true);
  484. if (ret)
  485. return ret;
  486. if (wndw->ctxdma.parent) {
  487. ctxdma = nv50_wndw_ctxdma_new(wndw, fb);
  488. if (IS_ERR(ctxdma)) {
  489. nouveau_bo_unpin(nvbo);
  490. return PTR_ERR(ctxdma);
  491. }
  492. if (asyw->visible)
  493. asyw->image.handle[0] = ctxdma->object.handle;
  494. }
  495. ret = drm_gem_plane_helper_prepare_fb(plane, state);
  496. if (ret)
  497. return ret;
  498. asyw->image.offset[0] = nvbo->offset;
  499. if (wndw->func->prepare) {
  500. asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc);
  501. if (IS_ERR(asyh))
  502. return PTR_ERR(asyh);
  503. wndw->func->prepare(wndw, asyh, asyw);
  504. }
  505. return 0;
  506. }
  507. static const struct drm_plane_helper_funcs
  508. nv50_wndw_helper = {
  509. .prepare_fb = nv50_wndw_prepare_fb,
  510. .cleanup_fb = nv50_wndw_cleanup_fb,
  511. .atomic_check = nv50_wndw_atomic_check,
  512. };
  513. static void
  514. nv50_wndw_atomic_destroy_state(struct drm_plane *plane,
  515. struct drm_plane_state *state)
  516. {
  517. struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
  518. __drm_atomic_helper_plane_destroy_state(&asyw->state);
  519. kfree(asyw);
  520. }
  521. static struct drm_plane_state *
  522. nv50_wndw_atomic_duplicate_state(struct drm_plane *plane)
  523. {
  524. struct nv50_wndw_atom *armw = nv50_wndw_atom(plane->state);
  525. struct nv50_wndw_atom *asyw;
  526. if (!(asyw = kmalloc(sizeof(*asyw), GFP_KERNEL)))
  527. return NULL;
  528. __drm_atomic_helper_plane_duplicate_state(plane, &asyw->state);
  529. asyw->sema = armw->sema;
  530. asyw->ntfy = armw->ntfy;
  531. asyw->ilut = NULL;
  532. asyw->xlut = armw->xlut;
  533. asyw->csc = armw->csc;
  534. asyw->image = armw->image;
  535. asyw->point = armw->point;
  536. asyw->clr.mask = 0;
  537. asyw->set.mask = 0;
  538. return &asyw->state;
  539. }
  540. static int
  541. nv50_wndw_zpos_default(struct drm_plane *plane)
  542. {
  543. return (plane->type == DRM_PLANE_TYPE_PRIMARY) ? 0 :
  544. (plane->type == DRM_PLANE_TYPE_OVERLAY) ? 1 : 255;
  545. }
  546. static void
  547. nv50_wndw_reset(struct drm_plane *plane)
  548. {
  549. struct nv50_wndw_atom *asyw;
  550. if (WARN_ON(!(asyw = kzalloc(sizeof(*asyw), GFP_KERNEL))))
  551. return;
  552. if (plane->state)
  553. plane->funcs->atomic_destroy_state(plane, plane->state);
  554. __drm_atomic_helper_plane_reset(plane, &asyw->state);
  555. }
  556. static void
  557. nv50_wndw_destroy(struct drm_plane *plane)
  558. {
  559. struct nv50_wndw *wndw = nv50_wndw(plane);
  560. struct nv50_wndw_ctxdma *ctxdma, *ctxtmp;
  561. list_for_each_entry_safe(ctxdma, ctxtmp, &wndw->ctxdma.list, head) {
  562. nv50_wndw_ctxdma_del(ctxdma);
  563. }
  564. nv50_dmac_destroy(&wndw->wimm);
  565. nv50_dmac_destroy(&wndw->wndw);
  566. nv50_lut_fini(&wndw->ilut);
  567. drm_plane_cleanup(&wndw->plane);
  568. kfree(wndw);
  569. }
  570. /* This function assumes the format has already been validated against the plane
  571. * and the modifier was validated against the device-wides modifier list at FB
  572. * creation time.
  573. */
  574. static bool nv50_plane_format_mod_supported(struct drm_plane *plane,
  575. u32 format, u64 modifier)
  576. {
  577. struct nouveau_drm *drm = nouveau_drm(plane->dev);
  578. uint8_t i;
  579. if (drm->client.device.info.chipset < 0xc0) {
  580. const struct drm_format_info *info = drm_format_info(format);
  581. const uint8_t kind = (modifier >> 12) & 0xff;
  582. if (!format) return false;
  583. for (i = 0; i < info->num_planes; i++)
  584. if ((info->cpp[i] != 4) && kind != 0x70) return false;
  585. }
  586. return true;
  587. }
  588. const struct drm_plane_funcs
  589. nv50_wndw = {
  590. .update_plane = drm_atomic_helper_update_plane,
  591. .disable_plane = drm_atomic_helper_disable_plane,
  592. .destroy = nv50_wndw_destroy,
  593. .reset = nv50_wndw_reset,
  594. .atomic_duplicate_state = nv50_wndw_atomic_duplicate_state,
  595. .atomic_destroy_state = nv50_wndw_atomic_destroy_state,
  596. .format_mod_supported = nv50_plane_format_mod_supported,
  597. };
  598. static const u64 nv50_cursor_format_modifiers[] = {
  599. DRM_FORMAT_MOD_LINEAR,
  600. DRM_FORMAT_MOD_INVALID,
  601. };
  602. int
  603. nv50_wndw_new_(const struct nv50_wndw_func *func, struct drm_device *dev,
  604. enum drm_plane_type type, const char *name, int index,
  605. const u32 *format, u32 heads,
  606. enum nv50_disp_interlock_type interlock_type, u32 interlock_data,
  607. struct nv50_wndw **pwndw)
  608. {
  609. struct nouveau_drm *drm = nouveau_drm(dev);
  610. struct nvif_mmu *mmu = &drm->client.mmu;
  611. struct nv50_disp *disp = nv50_disp(dev);
  612. struct nv50_wndw *wndw;
  613. const u64 *format_modifiers;
  614. int nformat;
  615. int ret;
  616. if (!(wndw = *pwndw = kzalloc(sizeof(*wndw), GFP_KERNEL)))
  617. return -ENOMEM;
  618. wndw->func = func;
  619. wndw->id = index;
  620. wndw->interlock.type = interlock_type;
  621. wndw->interlock.data = interlock_data;
  622. wndw->ctxdma.parent = &wndw->wndw.base.user;
  623. INIT_LIST_HEAD(&wndw->ctxdma.list);
  624. for (nformat = 0; format[nformat]; nformat++);
  625. if (type == DRM_PLANE_TYPE_CURSOR)
  626. format_modifiers = nv50_cursor_format_modifiers;
  627. else
  628. format_modifiers = nouveau_display(dev)->format_modifiers;
  629. ret = drm_universal_plane_init(dev, &wndw->plane, heads, &nv50_wndw, format, nformat,
  630. format_modifiers, type, "%s-%d", name, index);
  631. if (ret) {
  632. kfree(*pwndw);
  633. *pwndw = NULL;
  634. return ret;
  635. }
  636. drm_plane_helper_add(&wndw->plane, &nv50_wndw_helper);
  637. if (wndw->func->ilut) {
  638. ret = nv50_lut_init(disp, mmu, &wndw->ilut);
  639. if (ret)
  640. return ret;
  641. }
  642. if (wndw->func->blend_set) {
  643. ret = drm_plane_create_zpos_property(&wndw->plane,
  644. nv50_wndw_zpos_default(&wndw->plane), 0, 254);
  645. if (ret)
  646. return ret;
  647. ret = drm_plane_create_alpha_property(&wndw->plane);
  648. if (ret)
  649. return ret;
  650. ret = drm_plane_create_blend_mode_property(&wndw->plane,
  651. BIT(DRM_MODE_BLEND_PIXEL_NONE) |
  652. BIT(DRM_MODE_BLEND_PREMULTI) |
  653. BIT(DRM_MODE_BLEND_COVERAGE));
  654. if (ret)
  655. return ret;
  656. } else {
  657. ret = drm_plane_create_zpos_immutable_property(&wndw->plane,
  658. nv50_wndw_zpos_default(&wndw->plane));
  659. if (ret)
  660. return ret;
  661. }
  662. return 0;
  663. }
  664. int
  665. nv50_wndw_new(struct nouveau_drm *drm, enum drm_plane_type type, int index,
  666. struct nv50_wndw **pwndw)
  667. {
  668. struct {
  669. s32 oclass;
  670. int version;
  671. int (*new)(struct nouveau_drm *, enum drm_plane_type,
  672. int, s32, struct nv50_wndw **);
  673. } wndws[] = {
  674. { GA102_DISP_WINDOW_CHANNEL_DMA, 0, wndwc67e_new },
  675. { TU102_DISP_WINDOW_CHANNEL_DMA, 0, wndwc57e_new },
  676. { GV100_DISP_WINDOW_CHANNEL_DMA, 0, wndwc37e_new },
  677. {}
  678. };
  679. struct nv50_disp *disp = nv50_disp(drm->dev);
  680. int cid, ret;
  681. cid = nvif_mclass(&disp->disp->object, wndws);
  682. if (cid < 0) {
  683. NV_ERROR(drm, "No supported window class\n");
  684. return cid;
  685. }
  686. ret = wndws[cid].new(drm, type, index, wndws[cid].oclass, pwndw);
  687. if (ret)
  688. return ret;
  689. return nv50_wimm_init(drm, *pwndw);
  690. }